You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by li...@apache.org on 2017/06/05 05:22:49 UTC

[01/67] [abbrv] kylin git commit: Revert "reformat code"

Repository: kylin
Updated Branches:
  refs/heads/master 53aafa97e -> c38def7b5


http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/MetadataCleanupJob.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/MetadataCleanupJob.java b/tool/src/main/java/org/apache/kylin/tool/MetadataCleanupJob.java
index 7709760..8e17645 100644
--- a/tool/src/main/java/org/apache/kylin/tool/MetadataCleanupJob.java
+++ b/tool/src/main/java/org/apache/kylin/tool/MetadataCleanupJob.java
@@ -46,13 +46,10 @@ import com.google.common.collect.Sets;
 public class MetadataCleanupJob extends AbstractHadoopJob {
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false)
-            .withDescription("Delete the unused metadata").create("delete");
+    private static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false).withDescription("Delete the unused metadata").create("delete");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_THRESHOLD_FOR_JOB = OptionBuilder.withArgName("jobThreshold").hasArg()
-            .isRequired(false).withDescription("Specify how many days of job metadata keeping. Default 30 days")
-            .create("jobThreshold");
+    private static final Option OPTION_THRESHOLD_FOR_JOB = OptionBuilder.withArgName("jobThreshold").hasArg().isRequired(false).withDescription("Specify how many days of job metadata keeping. Default 30 days").create("jobThreshold");
 
     protected static final Logger logger = LoggerFactory.getLogger(MetadataCleanupJob.class);
 
@@ -61,7 +58,7 @@ public class MetadataCleanupJob extends AbstractHadoopJob {
     private KylinConfig config = null;
 
     private static final long TIME_THREADSHOLD = 1 * 3600 * 1000L; // 1 hour
-    private static final int DEFAULT_DAY_THREADSHOLD_FOR_JOB = 30; // 30 days
+    private static final int DEFAULT_DAY_THREADSHOLD_FOR_JOB = 30 ; // 30 days
 
     /*
      * (non-Javadoc)
@@ -107,8 +104,7 @@ public class MetadataCleanupJob extends AbstractHadoopJob {
         List<String> toDeleteResource = Lists.newArrayList();
 
         // two level resources, snapshot tables and cube statistics
-        for (String resourceRoot : new String[] { ResourceStore.SNAPSHOT_RESOURCE_ROOT,
-                ResourceStore.CUBE_STATISTICS_ROOT }) {
+        for (String resourceRoot : new String[] { ResourceStore.SNAPSHOT_RESOURCE_ROOT, ResourceStore.CUBE_STATISTICS_ROOT }) {
             NavigableSet<String> snapshotTables = getStore().listResources(resourceRoot);
 
             if (snapshotTables != null) {
@@ -157,14 +153,10 @@ public class MetadataCleanupJob extends AbstractHadoopJob {
         for (ExecutablePO executable : allExecutable) {
             long lastModified = executable.getLastModified();
             ExecutableOutputPO output = executableDao.getJobOutput(executable.getUuid());
-            int jobThresholdDay = optionsHelper.hasOption(OPTION_THRESHOLD_FOR_JOB)
-                    ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_THRESHOLD_FOR_JOB))
-                    : DEFAULT_DAY_THREADSHOLD_FOR_JOB;
+            int jobThresholdDay = optionsHelper.hasOption(OPTION_THRESHOLD_FOR_JOB) ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_THRESHOLD_FOR_JOB)) : DEFAULT_DAY_THREADSHOLD_FOR_JOB;
             long jobThresholdTime = jobThresholdDay * 24 * 3600 * 1000L;
 
-            if (System.currentTimeMillis() - lastModified > jobThresholdTime
-                    && (ExecutableState.SUCCEED.toString().equals(output.getStatus())
-                            || ExecutableState.DISCARDED.toString().equals(output.getStatus()))) {
+            if (System.currentTimeMillis() - lastModified > jobThresholdTime && (ExecutableState.SUCCEED.toString().equals(output.getStatus()) || ExecutableState.DISCARDED.toString().equals(output.getStatus()))) {
                 toDeleteResource.add(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + executable.getUuid());
                 toDeleteResource.add(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + executable.getUuid());
 
@@ -175,8 +167,7 @@ public class MetadataCleanupJob extends AbstractHadoopJob {
         }
 
         if (toDeleteResource.size() > 0) {
-            logger.info(
-                    "The following resources have no reference or is too old, will be cleaned from metadata store: \n");
+            logger.info("The following resources have no reference or is too old, will be cleaned from metadata store: \n");
 
             for (String s : toDeleteResource) {
                 logger.info(s);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/MrJobInfoExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/MrJobInfoExtractor.java b/tool/src/main/java/org/apache/kylin/tool/MrJobInfoExtractor.java
index b18f812..ca4c7e1 100644
--- a/tool/src/main/java/org/apache/kylin/tool/MrJobInfoExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/MrJobInfoExtractor.java
@@ -54,13 +54,10 @@ public class MrJobInfoExtractor extends AbstractInfoExtractor {
     private static final Logger logger = LoggerFactory.getLogger(MrJobInfoExtractor.class);
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_DETAILS = OptionBuilder.withArgName("includeTasks").hasArg()
-            .isRequired(false).withDescription("Specify whether to include mr task details to extract. Default true.")
-            .create("includeTasks");
+    private static final Option OPTION_INCLUDE_DETAILS = OptionBuilder.withArgName("includeTasks").hasArg().isRequired(false).withDescription("Specify whether to include mr task details to extract. Default true.").create("includeTasks");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_MR_JOB_ID = OptionBuilder.withArgName("mrJobId").hasArg().isRequired(false)
-            .withDescription("Specify MR Job Id").create("mrJobId");
+    private static final Option OPTION_MR_JOB_ID = OptionBuilder.withArgName("mrJobId").hasArg().isRequired(false).withDescription("Specify MR Job Id").create("mrJobId");
 
     private static final int HTTP_RETRY = 3;
 
@@ -95,13 +92,11 @@ public class MrJobInfoExtractor extends AbstractInfoExtractor {
         logger.info("kylin.engine.mr.yarn-check-status-url" + " is not set, read from hadoop configuration");
 
         Configuration conf = HadoopUtil.getCurrentConfiguration();
-        String rmWebHost = HAUtil.getConfValueForRMInstance(YarnConfiguration.RM_WEBAPP_ADDRESS,
-                YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS, conf);
+        String rmWebHost = HAUtil.getConfValueForRMInstance(YarnConfiguration.RM_WEBAPP_ADDRESS, YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS, conf);
         if (HAUtil.isHAEnabled(conf)) {
             YarnConfiguration yarnConf = new YarnConfiguration(conf);
             String active = RMHAUtils.findActiveRMHAId(yarnConf);
-            rmWebHost = HAUtil.getConfValueForRMInstance(HAUtil.addSuffix(YarnConfiguration.RM_WEBAPP_ADDRESS, active),
-                    YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS, yarnConf);
+            rmWebHost = HAUtil.getConfValueForRMInstance(HAUtil.addSuffix(YarnConfiguration.RM_WEBAPP_ADDRESS, active), YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS, yarnConf);
         }
         if (StringUtils.isEmpty(rmWebHost)) {
             return;
@@ -112,8 +107,7 @@ public class MrJobInfoExtractor extends AbstractInfoExtractor {
         Matcher m = pattern.matcher(rmWebHost);
         Preconditions.checkArgument(m.matches(), "Yarn master URL not found.");
         yarnMasterUrlBase = rmWebHost;
-        jobHistoryUrlBase = m.group(1)
-                + HAUtil.getConfValueForRMInstance("mapreduce.jobhistory.webapp.address", m.group(2) + ":19888", conf);
+        jobHistoryUrlBase = m.group(1) + HAUtil.getConfValueForRMInstance("mapreduce.jobhistory.webapp.address", m.group(2) + ":19888", conf);
     }
 
     private String getHttpResponse(String url) {
@@ -137,8 +131,7 @@ public class MrJobInfoExtractor extends AbstractInfoExtractor {
         return msg;
     }
 
-    private void extractTaskDetail(String taskId, String user, File exportDir, String taskUrl, String urlBase)
-            throws IOException {
+    private void extractTaskDetail(String taskId, String user, File exportDir, String taskUrl, String urlBase) throws IOException {
         try {
             if (StringUtils.isEmpty(taskId)) {
                 return;
@@ -152,8 +145,7 @@ public class MrJobInfoExtractor extends AbstractInfoExtractor {
             JsonNode taskAttempt = new ObjectMapper().readTree(taskInfo).path("task").path("successfulAttempt");
             String succAttemptId = taskAttempt.textValue();
 
-            String attemptInfo = saveHttpResponseQuietly(new File(destDir, "task_attempts.json"),
-                    taskUrlBase + "/attempts/" + succAttemptId);
+            String attemptInfo = saveHttpResponseQuietly(new File(destDir, "task_attempts.json"), taskUrlBase + "/attempts/" + succAttemptId);
             JsonNode attemptAttempt = new ObjectMapper().readTree(attemptInfo).path("taskAttempt");
             String containerId = attemptAttempt.get("assignedContainerId").textValue();
             String nodeId = nodeInfoMap.get(attemptAttempt.get("nodeHttpAddress").textValue());
@@ -162,8 +154,7 @@ public class MrJobInfoExtractor extends AbstractInfoExtractor {
             saveHttpResponseQuietly(new File(destDir, "task_counters.json"), taskUrlBase + "/counters");
 
             // save task logs
-            String logUrl = urlBase + "/jobhistory/logs/" + nodeId + "/" + containerId + "/" + succAttemptId + "/"
-                    + user + "/syslog/?start=0";
+            String logUrl = urlBase + "/jobhistory/logs/" + nodeId + "/" + containerId + "/" + succAttemptId + "/" + user + "/syslog/?start=0";
             logger.debug("Fetch task log from url: " + logUrl);
 
             saveHttpResponseQuietly(new File(destDir, "task_log.txt"), logUrl);
@@ -190,8 +181,7 @@ public class MrJobInfoExtractor extends AbstractInfoExtractor {
     @Override
     protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
         try {
-            boolean includeTaskDetails = optionsHelper.hasOption(OPTION_INCLUDE_DETAILS)
-                    ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_DETAILS)) : true;
+            boolean includeTaskDetails = optionsHelper.hasOption(OPTION_INCLUDE_DETAILS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_DETAILS)) : true;
             String mrJobId = optionsHelper.getOptionValue(OPTION_MR_JOB_ID);
             extractRestCheckUrl();
 
@@ -359,4 +349,4 @@ public class MrJobInfoExtractor extends AbstractInfoExtractor {
             logger.warn("Failed to get mr tasks rest response.", e);
         }
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/util/ToolUtil.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/util/ToolUtil.java b/tool/src/main/java/org/apache/kylin/tool/util/ToolUtil.java
index 70c3be2..2e357da 100644
--- a/tool/src/main/java/org/apache/kylin/tool/util/ToolUtil.java
+++ b/tool/src/main/java/org/apache/kylin/tool/util/ToolUtil.java
@@ -19,18 +19,17 @@
 
 package org.apache.kylin.tool.util;
 
+import com.google.common.collect.Maps;
+import org.apache.commons.lang.StringUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.persistence.ResourceStore;
+
 import java.io.File;
 import java.io.IOException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.Map;
 
-import org.apache.commons.lang.StringUtils;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.persistence.ResourceStore;
-
-import com.google.common.collect.Maps;
-
 public class ToolUtil {
     public static String getConfFolder() {
         final String CONF = "conf";

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/test/java/org/apache/kylin/tool/CubeMetaIngesterTest.java
----------------------------------------------------------------------
diff --git a/tool/src/test/java/org/apache/kylin/tool/CubeMetaIngesterTest.java b/tool/src/test/java/org/apache/kylin/tool/CubeMetaIngesterTest.java
index 063a145..ff12084 100644
--- a/tool/src/test/java/org/apache/kylin/tool/CubeMetaIngesterTest.java
+++ b/tool/src/test/java/org/apache/kylin/tool/CubeMetaIngesterTest.java
@@ -58,15 +58,13 @@ public class CubeMetaIngesterTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testHappyIngest() {
-        String srcPath = Thread.currentThread().getContextClassLoader().getResource("cloned_cube_and_model.zip")
-                .getPath();
+        String srcPath = Thread.currentThread().getContextClassLoader().getResource("cloned_cube_and_model.zip").getPath();
         CubeMetaIngester.main(new String[] { "-project", "default", "-srcPath", srcPath });
 
         ProjectInstance project = ProjectManager.getInstance(KylinConfig.getInstanceFromEnv()).getProject("default");
         Assert.assertEquals(1, Collections.frequency(project.getTables(), "DEFAULT.TEST_KYLIN_FACT"));
         Assert.assertTrue(project.getModels().contains("cloned_model"));
-        Assert.assertTrue(
-                project.getRealizationEntries().contains(RealizationEntry.create(RealizationType.CUBE, "cloned_cube")));
+        Assert.assertTrue(project.getRealizationEntries().contains(RealizationEntry.create(RealizationType.CUBE, "cloned_cube")));
 
         MetadataManager.clearCache();
         CubeDescManager.clearCache();
@@ -83,8 +81,7 @@ public class CubeMetaIngesterTest extends LocalFileMetadataTestCase {
         ProjectInstance project = ProjectManager.getInstance(KylinConfig.getInstanceFromEnv()).getProject("default");
         Assert.assertEquals(1, Collections.frequency(project.getTables(), "SSB.CUSTOMER"));
         Assert.assertTrue(project.getModels().contains("benchmark_model"));
-        Assert.assertTrue(project.getRealizationEntries()
-                .contains(RealizationEntry.create(RealizationType.CUBE, "benchmark_cube")));
+        Assert.assertTrue(project.getRealizationEntries().contains(RealizationEntry.create(RealizationType.CUBE, "benchmark_cube")));
 
         MetadataManager.clearCache();
         CubeDescManager.clearCache();
@@ -102,8 +99,7 @@ public class CubeMetaIngesterTest extends LocalFileMetadataTestCase {
             @Override
             public boolean matches(Object item) {
                 if (item instanceof IllegalStateException) {
-                    if (((IllegalStateException) item).getMessage()
-                            .equals("Already exist a model called test_kylin_inner_join_model_desc")) {
+                    if (((IllegalStateException) item).getMessage().equals("Already exist a model called test_kylin_inner_join_model_desc")) {
                         return true;
                     }
                 }
@@ -129,4 +125,4 @@ public class CubeMetaIngesterTest extends LocalFileMetadataTestCase {
         CubeMetaIngester.main(new String[] { "-project", "Xdefault", "-srcPath", srcPath });
     }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/test/java/org/apache/kylin/tool/HybridCubeCLITest.java
----------------------------------------------------------------------
diff --git a/tool/src/test/java/org/apache/kylin/tool/HybridCubeCLITest.java b/tool/src/test/java/org/apache/kylin/tool/HybridCubeCLITest.java
index 68f9ff8..f4ed595 100644
--- a/tool/src/test/java/org/apache/kylin/tool/HybridCubeCLITest.java
+++ b/tool/src/test/java/org/apache/kylin/tool/HybridCubeCLITest.java
@@ -50,8 +50,7 @@ public class HybridCubeCLITest extends LocalFileMetadataTestCase {
     public void test1Create() throws IOException {
         HybridManager hybridManager = HybridManager.getInstance(KylinConfig.getInstanceFromEnv());
         Assert.assertNull(hybridManager.getHybridInstance("ssb_hybrid"));
-        HybridCubeCLI.main(new String[] { "-name", "ssb_hybrid", "-project", "default", "-model", "ssb", "-cubes",
-                "ssb_cube1,ssb_cube2", "-action", "create" });
+        HybridCubeCLI.main(new String[] { "-name", "ssb_hybrid", "-project", "default", "-model", "ssb", "-cubes", "ssb_cube1,ssb_cube2", "-action", "create" });
 
         HybridInstance hybridInstance = hybridManager.getHybridInstance("ssb_hybrid");
         Assert.assertNotNull(hybridInstance);
@@ -63,15 +62,13 @@ public class HybridCubeCLITest extends LocalFileMetadataTestCase {
     public void test2Update() throws IOException {
         HybridManager hybridManager = HybridManager.getInstance(KylinConfig.getInstanceFromEnv());
         Assert.assertNull(hybridManager.getHybridInstance("ssb_hybrid"));
-        HybridCubeCLI.main(new String[] { "-name", "ssb_hybrid", "-project", "default", "-model", "ssb", "-cubes",
-                "ssb_cube1,ssb_cube2", "-action", "create" });
+        HybridCubeCLI.main(new String[] { "-name", "ssb_hybrid", "-project", "default", "-model", "ssb", "-cubes", "ssb_cube1,ssb_cube2", "-action", "create" });
 
         HybridInstance hybridInstance = hybridManager.getHybridInstance("ssb_hybrid");
         Assert.assertNotNull(hybridManager.getHybridInstance("ssb_hybrid"));
         Assert.assertEquals("ssb_hybrid", hybridInstance.getName());
         Assert.assertEquals(2, hybridInstance.getRealizationEntries().size());
-        HybridCubeCLI.main(new String[] { "-name", "ssb_hybrid", "-project", "default", "-model", "ssb", "-cubes",
-                "ssb_cube1,ssb_cube2,ssb_cube3", "-action", "update" });
+        HybridCubeCLI.main(new String[] { "-name", "ssb_hybrid", "-project", "default", "-model", "ssb", "-cubes", "ssb_cube1,ssb_cube2,ssb_cube3", "-action", "update" });
 
         hybridInstance = hybridManager.getHybridInstance("ssb_hybrid");
         Assert.assertNotNull(hybridInstance);
@@ -83,16 +80,13 @@ public class HybridCubeCLITest extends LocalFileMetadataTestCase {
     public void test3Delete() throws IOException {
         HybridManager hybridManager = HybridManager.getInstance(KylinConfig.getInstanceFromEnv());
         Assert.assertNull(hybridManager.getHybridInstance("ssb_hybrid"));
-        HybridCubeCLI.main(new String[] { "-name", "ssb_hybrid", "-project", "default", "-model", "ssb", "-cubes",
-                "ssb_cube1,ssb_cube2", "-action", "create" });
+        HybridCubeCLI.main(new String[] { "-name", "ssb_hybrid", "-project", "default", "-model", "ssb", "-cubes", "ssb_cube1,ssb_cube2", "-action", "create" });
         Assert.assertNotNull(hybridManager.getHybridInstance("ssb_hybrid"));
-        HybridCubeCLI.main(
-                new String[] { "-name", "ssb_hybrid", "-project", "default", "-model", "ssb", "-action", "delete" });
+        HybridCubeCLI.main(new String[] { "-name", "ssb_hybrid", "-project", "default", "-model", "ssb", "-action", "delete" });
 
         HybridInstance hybridInstance = hybridManager.getHybridInstance("ssb_hybrid");
         Assert.assertNull(hybridInstance);
-        Assert.assertEquals(0, ProjectManager.getInstance(KylinConfig.getInstanceFromEnv())
-                .findProjects(RealizationType.HYBRID, "ssb_hybrid").size());
+        Assert.assertEquals(0, ProjectManager.getInstance(KylinConfig.getInstanceFromEnv()).findProjects(RealizationType.HYBRID, "ssb_hybrid").size());
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/test/java/org/apache/kylin/tool/KylinConfigCLITest.java
----------------------------------------------------------------------
diff --git a/tool/src/test/java/org/apache/kylin/tool/KylinConfigCLITest.java b/tool/src/test/java/org/apache/kylin/tool/KylinConfigCLITest.java
index 61a4b52..7d1e248 100644
--- a/tool/src/test/java/org/apache/kylin/tool/KylinConfigCLITest.java
+++ b/tool/src/test/java/org/apache/kylin/tool/KylinConfigCLITest.java
@@ -55,9 +55,7 @@ public class KylinConfigCLITest extends LocalFileMetadataTestCase {
         KylinConfigCLI.main(new String[] { "kylin.cube.engine." });
 
         String val = FileUtils.readFileToString(f, Charset.defaultCharset()).trim();
-        assertEquals(
-                "2=org.apache.kylin.engine.mr.MRBatchCubingEngine2\n0=org.apache.kylin.engine.mr.MRBatchCubingEngine",
-                val);
+        assertEquals("2=org.apache.kylin.engine.mr.MRBatchCubingEngine2\n0=org.apache.kylin.engine.mr.MRBatchCubingEngine", val);
 
         FileUtils.forceDelete(f);
         System.setOut(o);


[07/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/LegacyUserService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/LegacyUserService.java b/server-base/src/main/java/org/apache/kylin/rest/service/LegacyUserService.java
index d98fc33..b8f3700 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/LegacyUserService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/LegacyUserService.java
@@ -58,8 +58,7 @@ public class LegacyUserService implements UserDetailsManager {
 
     public static final String PWD_PREFIX = "PWD:";
 
-    private Serializer<UserGrantedAuthority[]> ugaSerializer = new Serializer<UserGrantedAuthority[]>(
-            UserGrantedAuthority[].class);
+    private Serializer<UserGrantedAuthority[]> ugaSerializer = new Serializer<UserGrantedAuthority[]>(UserGrantedAuthority[].class);
 
     private String userTableName = null;
 
@@ -99,8 +98,7 @@ public class LegacyUserService implements UserDetailsManager {
 
         String username = Bytes.toString(result.getRow());
 
-        byte[] valueBytes = result.getValue(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY),
-                Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
+        byte[] valueBytes = result.getValue(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
         UserGrantedAuthority[] deserialized = ugaSerializer.deserialize(valueBytes);
 
         String password = "";
@@ -153,8 +151,7 @@ public class LegacyUserService implements UserDetailsManager {
             Pair<byte[], byte[]> pair = userToHBaseRow(user);
             Put put = new Put(pair.getKey());
 
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY),
-                    Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
+            put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
 
             htable.put(put);
         } catch (IOException e) {
@@ -215,8 +212,7 @@ public class LegacyUserService implements UserDetailsManager {
 
     public List<UserDetails> listUsers() {
         Scan s = new Scan();
-        s.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY),
-                Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
+        s.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
 
         List<UserDetails> all = new ArrayList<UserDetails>();
         Table htable = null;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java b/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
index 45774e3..701717d 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
@@ -84,8 +84,7 @@ public class ModelService extends BasicService {
 
         List<DataModelDesc> filterModels = new ArrayList<DataModelDesc>();
         for (DataModelDesc modelDesc : models) {
-            boolean isModelMatch = (null == modelName) || modelName.length() == 0
-                    || modelDesc.getName().toLowerCase().equals(modelName.toLowerCase());
+            boolean isModelMatch = (null == modelName) || modelName.length() == 0 || modelDesc.getName().toLowerCase().equals(modelName.toLowerCase());
 
             if (isModelMatch) {
                 filterModels.add(modelDesc);
@@ -95,8 +94,7 @@ public class ModelService extends BasicService {
         return filterModels;
     }
 
-    public List<DataModelDesc> getModels(final String modelName, final String projectName, final Integer limit,
-            final Integer offset) throws IOException {
+    public List<DataModelDesc> getModels(final String modelName, final String projectName, final Integer limit, final Integer offset) throws IOException {
 
         List<DataModelDesc> modelDescs = listAllModels(modelName, projectName);
 
@@ -129,16 +127,14 @@ public class ModelService extends BasicService {
         return createdDesc;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
     public DataModelDesc updateModelAndDesc(DataModelDesc desc) throws IOException {
 
         getMetadataManager().updateDataModelDesc(desc);
         return desc;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
     public void dropModel(DataModelDesc desc) throws IOException {
         Message msg = MsgPicker.getMsg();
 
@@ -155,24 +151,21 @@ public class ModelService extends BasicService {
         accessService.clean(desc, true);
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
     public boolean isTableInAnyModel(String tableName) {
         String[] dbTableName = HadoopUtil.parseHiveTableName(tableName);
         tableName = dbTableName[0] + "." + dbTableName[1];
         return getMetadataManager().isTableInAnyModel(tableName);
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
     public boolean isTableInModel(String tableName, String projectName) throws IOException {
         String[] dbTableName = HadoopUtil.parseHiveTableName(tableName);
         tableName = dbTableName[0] + "." + dbTableName[1];
         return getMetadataManager().isTableInModel(tableName, projectName);
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
     public List<String> getModelsUsingTable(String tableName, String projectName) throws IOException {
         String[] dbTableName = HadoopUtil.parseHiveTableName(tableName);
         tableName = dbTableName[0] + "." + dbTableName[1];
@@ -349,8 +342,7 @@ public class ModelService extends BasicService {
         return youngerSelf;
     }
 
-    public DataModelDesc updateModelToResourceStore(DataModelDesc modelDesc, String projectName, boolean createNew,
-            boolean isDraft) throws IOException {
+    public DataModelDesc updateModelToResourceStore(DataModelDesc modelDesc, String projectName, boolean createNew, boolean isDraft) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         if (createNew) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java b/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java
index e2bf821..d4dd220 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java
@@ -77,24 +77,20 @@ public class ProjectService extends BasicService {
             throw new BadRequestException(String.format(msg.getPROJECT_ALREADY_EXIST(), projectName));
         }
         String owner = SecurityContextHolder.getContext().getAuthentication().getName();
-        ProjectInstance createdProject = getProjectManager().createProject(projectName, owner, description,
-                overrideProps);
+        ProjectInstance createdProject = getProjectManager().createProject(projectName, owner, description, overrideProps);
         accessService.init(createdProject, AclPermission.ADMINISTRATION);
         logger.debug("New project created.");
 
         return createdProject;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#currentProject, 'ADMINISTRATION') or hasPermission(#currentProject, 'MANAGEMENT')")
-    public ProjectInstance updateProject(ProjectInstance newProject, ProjectInstance currentProject)
-            throws IOException {
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#currentProject, 'ADMINISTRATION') or hasPermission(#currentProject, 'MANAGEMENT')")
+    public ProjectInstance updateProject(ProjectInstance newProject, ProjectInstance currentProject) throws IOException {
         String newProjectName = newProject.getName();
         String newDescription = newProject.getDescription();
         LinkedHashMap<String, String> overrideProps = newProject.getOverrideKylinProps();
 
-        ProjectInstance updatedProject = getProjectManager().updateProject(currentProject, newProjectName,
-                newDescription, overrideProps);
+        ProjectInstance updatedProject = getProjectManager().updateProject(currentProject, newProjectName, newDescription, overrideProps);
 
         logger.debug("Project updated.");
 
@@ -125,8 +121,7 @@ public class ProjectService extends BasicService {
         return projects.subList(coffset, coffset + climit);
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#project, 'ADMINISTRATION') or hasPermission(#project, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#project, 'ADMINISTRATION') or hasPermission(#project, 'MANAGEMENT')")
     public void deleteProject(String projectName, ProjectInstance project) throws IOException {
         getProjectManager().dropProject(projectName);
 
@@ -203,5 +198,5 @@ public class ProjectService extends BasicService {
 
         }
         return readableProjects;
-    }
+}
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index c4757fb..33fb614 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -243,8 +243,7 @@ public class QueryService extends BasicService {
             Get get = new Get(Bytes.toBytes(creator));
             get.addFamily(Bytes.toBytes(USER_QUERY_FAMILY));
             Result result = htable.get(get);
-            Query[] query = querySerializer
-                    .deserialize(result.getValue(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN)));
+            Query[] query = querySerializer.deserialize(result.getValue(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN)));
 
             if (null != query) {
                 queries.addAll(Arrays.asList(query));
@@ -319,16 +318,14 @@ public class QueryService extends BasicService {
                 return;
             }
         } catch (AccessDeniedException e) {
-            logger.warn(
-                    "Current user {} has no READ permission on current project {}, please ask Administrator for permission granting.");
+            logger.warn("Current user {} has no READ permission on current project {}, please ask Administrator for permission granting.");
             //just continue
         }
 
         String realizationsStr = sqlResponse.getCube();//CUBE[name=abc],HYBRID[name=xyz]
 
         if (StringUtils.isEmpty(realizationsStr)) {
-            throw new AccessDeniedException(
-                    "Ad-hoc query requires having READ permission on project, please ask Administrator to grant you permissions");
+            throw new AccessDeniedException("Ad-hoc query requires having READ permission on project, please ask Administrator to grant you permissions");
         }
 
         String[] splits = StringUtils.split(realizationsStr, ",");
@@ -373,8 +370,7 @@ public class QueryService extends BasicService {
 
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         String serverMode = kylinConfig.getServerMode();
-        if (!(Constant.SERVER_MODE_QUERY.equals(serverMode.toLowerCase())
-                || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase()))) {
+        if (!(Constant.SERVER_MODE_QUERY.equals(serverMode.toLowerCase()) || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase()))) {
             throw new BadRequestException(String.format(msg.getQUERY_NOT_ALLOWED(), serverMode));
         }
         if (StringUtils.isBlank(sqlRequest.getProject())) {
@@ -400,8 +396,7 @@ public class QueryService extends BasicService {
             long startTime = System.currentTimeMillis();
 
             SQLResponse sqlResponse = null;
-            boolean queryCacheEnabled = checkCondition(kylinConfig.isQueryCacheEnabled(),
-                    "query cache disabled in KylinConfig") && //
+            boolean queryCacheEnabled = checkCondition(kylinConfig.isQueryCacheEnabled(), "query cache disabled in KylinConfig") && //
                     checkCondition(!BackdoorToggles.getDisableCache(), "query cache disabled in BackdoorToggles");
 
             if (queryCacheEnabled) {
@@ -417,20 +412,12 @@ public class QueryService extends BasicService {
                     long scanBytesThreshold = kylinConfig.getQueryScanBytesCacheThreshold();
                     sqlResponse.setDuration(System.currentTimeMillis() - startTime);
                     logger.info("Stats of SQL response: isException: {}, duration: {}, total scan count {}", //
-                            String.valueOf(sqlResponse.getIsException()), String.valueOf(sqlResponse.getDuration()),
-                            String.valueOf(sqlResponse.getTotalScanCount()));
+                            String.valueOf(sqlResponse.getIsException()), String.valueOf(sqlResponse.getDuration()), String.valueOf(sqlResponse.getTotalScanCount()));
                     if (checkCondition(queryCacheEnabled, "query cache is disabled") //
                             && checkCondition(!sqlResponse.getIsException(), "query has exception") //
-                            && checkCondition(
-                                    sqlResponse.getDuration() > durationThreshold
-                                            || sqlResponse.getTotalScanCount() > scanCountThreshold
-                                            || sqlResponse.getTotalScanBytes() > scanBytesThreshold, //
-                                    "query is too lightweight with duration: {} (threshold {}), scan count: {} (threshold {}), scan bytes: {} (threshold {})",
-                                    sqlResponse.getDuration(), durationThreshold, sqlResponse.getTotalScanCount(),
-                                    scanCountThreshold, sqlResponse.getTotalScanBytes(), scanBytesThreshold)
-                            && checkCondition(sqlResponse.getResults().size() < kylinConfig.getLargeQueryThreshold(),
-                                    "query response is too large: {} ({})", sqlResponse.getResults().size(),
-                                    kylinConfig.getLargeQueryThreshold())) {
+                            && checkCondition(sqlResponse.getDuration() > durationThreshold || sqlResponse.getTotalScanCount() > scanCountThreshold || sqlResponse.getTotalScanBytes() > scanBytesThreshold, //
+                                    "query is too lightweight with duration: {} (threshold {}), scan count: {} (threshold {}), scan bytes: {} (threshold {})", sqlResponse.getDuration(), durationThreshold, sqlResponse.getTotalScanCount(), scanCountThreshold, sqlResponse.getTotalScanBytes(), scanBytesThreshold)
+                            && checkCondition(sqlResponse.getResults().size() < kylinConfig.getLargeQueryThreshold(), "query response is too large: {} ({})", sqlResponse.getResults().size(), kylinConfig.getLargeQueryThreshold())) {
                         cacheManager.getCache(SUCCESS_QUERY_CACHE).put(new Element(sqlRequest, sqlResponse));
                     }
 
@@ -450,8 +437,7 @@ public class QueryService extends BasicService {
                 sqlResponse.setTotalScanCount(queryContext.getScannedRows());
                 sqlResponse.setTotalScanBytes(queryContext.getScannedBytes());
 
-                if (queryCacheEnabled && e.getCause() != null
-                        && e.getCause() instanceof ResourceLimitExceededException) {
+                if (queryCacheEnabled && e.getCause() != null && e.getCause() instanceof ResourceLimitExceededException) {
                     Cache exceptionCache = cacheManager.getCache(EXCEPTION_QUERY_CACHE);
                     exceptionCache.put(new Element(sqlRequest, sqlResponse));
                 }
@@ -500,8 +486,7 @@ public class QueryService extends BasicService {
 
     private SQLResponse queryWithSqlMassage(SQLRequest sqlRequest) throws Exception {
         String userInfo = SecurityContextHolder.getContext().getAuthentication().getName();
-        final Collection<? extends GrantedAuthority> grantedAuthorities = SecurityContextHolder.getContext()
-                .getAuthentication().getAuthorities();
+        final Collection<? extends GrantedAuthority> grantedAuthorities = SecurityContextHolder.getContext().getAuthentication().getAuthorities();
         for (GrantedAuthority grantedAuthority : grantedAuthorities) {
             userInfo += ",";
             userInfo += grantedAuthority.getAuthority();
@@ -556,12 +541,9 @@ public class QueryService extends BasicService {
                 String schemaName = JDBCTableMeta.getString(2);
 
                 // Not every JDBC data provider offers full 10 columns, e.g., PostgreSQL has only 5
-                TableMeta tblMeta = new TableMeta(catalogName == null ? Constant.FakeCatalogName : catalogName,
-                        schemaName == null ? Constant.FakeSchemaName : schemaName, JDBCTableMeta.getString(3),
-                        JDBCTableMeta.getString(4), JDBCTableMeta.getString(5), null, null, null, null, null);
+                TableMeta tblMeta = new TableMeta(catalogName == null ? Constant.FakeCatalogName : catalogName, schemaName == null ? Constant.FakeSchemaName : schemaName, JDBCTableMeta.getString(3), JDBCTableMeta.getString(4), JDBCTableMeta.getString(5), null, null, null, null, null);
 
-                if (!cubedOnly
-                        || getProjectManager().isExposedTable(project, schemaName + "." + tblMeta.getTABLE_NAME())) {
+                if (!cubedOnly || getProjectManager().isExposedTable(project, schemaName + "." + tblMeta.getTABLE_NAME())) {
                     tableMetas.add(tblMeta);
                     tableMap.put(tblMeta.getTABLE_SCHEM() + "#" + tblMeta.getTABLE_NAME(), tblMeta);
                 }
@@ -574,18 +556,9 @@ public class QueryService extends BasicService {
                 String schemaName = columnMeta.getString(2);
 
                 // kylin(optiq) is not strictly following JDBC specification
-                ColumnMeta colmnMeta = new ColumnMeta(catalogName == null ? Constant.FakeCatalogName : catalogName,
-                        schemaName == null ? Constant.FakeSchemaName : schemaName, columnMeta.getString(3),
-                        columnMeta.getString(4), columnMeta.getInt(5), columnMeta.getString(6), columnMeta.getInt(7),
-                        getInt(columnMeta.getString(8)), columnMeta.getInt(9), columnMeta.getInt(10),
-                        columnMeta.getInt(11), columnMeta.getString(12), columnMeta.getString(13),
-                        getInt(columnMeta.getString(14)), getInt(columnMeta.getString(15)), columnMeta.getInt(16),
-                        columnMeta.getInt(17), columnMeta.getString(18), columnMeta.getString(19),
-                        columnMeta.getString(20), columnMeta.getString(21), getShort(columnMeta.getString(22)),
-                        columnMeta.getString(23));
-
-                if (!cubedOnly || getProjectManager().isExposedColumn(project,
-                        schemaName + "." + colmnMeta.getTABLE_NAME(), colmnMeta.getCOLUMN_NAME())) {
+                ColumnMeta colmnMeta = new ColumnMeta(catalogName == null ? Constant.FakeCatalogName : catalogName, schemaName == null ? Constant.FakeSchemaName : schemaName, columnMeta.getString(3), columnMeta.getString(4), columnMeta.getInt(5), columnMeta.getString(6), columnMeta.getInt(7), getInt(columnMeta.getString(8)), columnMeta.getInt(9), columnMeta.getInt(10), columnMeta.getInt(11), columnMeta.getString(12), columnMeta.getString(13), getInt(columnMeta.getString(14)), getInt(columnMeta.getString(15)), columnMeta.getInt(16), columnMeta.getInt(17), columnMeta.getString(18), columnMeta.getString(19), columnMeta.getString(20), columnMeta.getString(21), getShort(columnMeta.getString(22)), columnMeta.getString(23));
+
+                if (!cubedOnly || getProjectManager().isExposedColumn(project, schemaName + "." + colmnMeta.getTABLE_NAME(), colmnMeta.getCOLUMN_NAME())) {
                     tableMap.get(colmnMeta.getTABLE_SCHEM() + "#" + colmnMeta.getTABLE_NAME()).addColumn(colmnMeta);
                 }
             }
@@ -604,8 +577,7 @@ public class QueryService extends BasicService {
         return getMetadataV2(getCubeManager(), project, true);
     }
 
-    protected List<TableMetaWithType> getMetadataV2(CubeManager cubeMgr, String project, boolean cubedOnly)
-            throws SQLException, IOException {
+    protected List<TableMetaWithType> getMetadataV2(CubeManager cubeMgr, String project, boolean cubedOnly) throws SQLException, IOException {
         //Message msg = MsgPicker.getMsg();
 
         Connection conn = null;
@@ -632,13 +604,9 @@ public class QueryService extends BasicService {
                 String schemaName = JDBCTableMeta.getString(2);
 
                 // Not every JDBC data provider offers full 10 columns, e.g., PostgreSQL has only 5
-                TableMetaWithType tblMeta = new TableMetaWithType(
-                        catalogName == null ? Constant.FakeCatalogName : catalogName,
-                        schemaName == null ? Constant.FakeSchemaName : schemaName, JDBCTableMeta.getString(3),
-                        JDBCTableMeta.getString(4), JDBCTableMeta.getString(5), null, null, null, null, null);
+                TableMetaWithType tblMeta = new TableMetaWithType(catalogName == null ? Constant.FakeCatalogName : catalogName, schemaName == null ? Constant.FakeSchemaName : schemaName, JDBCTableMeta.getString(3), JDBCTableMeta.getString(4), JDBCTableMeta.getString(5), null, null, null, null, null);
 
-                if (!cubedOnly
-                        || getProjectManager().isExposedTable(project, schemaName + "." + tblMeta.getTABLE_NAME())) {
+                if (!cubedOnly || getProjectManager().isExposedTable(project, schemaName + "." + tblMeta.getTABLE_NAME())) {
                     tableMetas.add(tblMeta);
                     tableMap.put(tblMeta.getTABLE_SCHEM() + "#" + tblMeta.getTABLE_NAME(), tblMeta);
                 }
@@ -651,22 +619,11 @@ public class QueryService extends BasicService {
                 String schemaName = columnMeta.getString(2);
 
                 // kylin(optiq) is not strictly following JDBC specification
-                ColumnMetaWithType colmnMeta = new ColumnMetaWithType(
-                        catalogName == null ? Constant.FakeCatalogName : catalogName,
-                        schemaName == null ? Constant.FakeSchemaName : schemaName, columnMeta.getString(3),
-                        columnMeta.getString(4), columnMeta.getInt(5), columnMeta.getString(6), columnMeta.getInt(7),
-                        getInt(columnMeta.getString(8)), columnMeta.getInt(9), columnMeta.getInt(10),
-                        columnMeta.getInt(11), columnMeta.getString(12), columnMeta.getString(13),
-                        getInt(columnMeta.getString(14)), getInt(columnMeta.getString(15)), columnMeta.getInt(16),
-                        columnMeta.getInt(17), columnMeta.getString(18), columnMeta.getString(19),
-                        columnMeta.getString(20), columnMeta.getString(21), getShort(columnMeta.getString(22)),
-                        columnMeta.getString(23));
-
-                if (!cubedOnly || getProjectManager().isExposedColumn(project,
-                        schemaName + "." + colmnMeta.getTABLE_NAME(), colmnMeta.getCOLUMN_NAME())) {
+                ColumnMetaWithType colmnMeta = new ColumnMetaWithType(catalogName == null ? Constant.FakeCatalogName : catalogName, schemaName == null ? Constant.FakeSchemaName : schemaName, columnMeta.getString(3), columnMeta.getString(4), columnMeta.getInt(5), columnMeta.getString(6), columnMeta.getInt(7), getInt(columnMeta.getString(8)), columnMeta.getInt(9), columnMeta.getInt(10), columnMeta.getInt(11), columnMeta.getString(12), columnMeta.getString(13), getInt(columnMeta.getString(14)), getInt(columnMeta.getString(15)), columnMeta.getInt(16), columnMeta.getInt(17), columnMeta.getString(18), columnMeta.getString(19), columnMeta.getString(20), columnMeta.getString(21), getShort(columnMeta.getString(22)), columnMeta.getString(23));
+
+                if (!cubedOnly || getProjectManager().isExposedColumn(project, schemaName + "." + colmnMeta.getTABLE_NAME(), colmnMeta.getCOLUMN_NAME())) {
                     tableMap.get(colmnMeta.getTABLE_SCHEM() + "#" + colmnMeta.getTABLE_NAME()).addColumn(colmnMeta);
-                    columnMap.put(colmnMeta.getTABLE_SCHEM() + "#" + colmnMeta.getTABLE_NAME() + "#"
-                            + colmnMeta.getCOLUMN_NAME(), colmnMeta);
+                    columnMap.put(colmnMeta.getTABLE_SCHEM() + "#" + colmnMeta.getTABLE_NAME() + "#" + colmnMeta.getCOLUMN_NAME(), colmnMeta);
                 }
             }
 
@@ -729,8 +686,7 @@ public class QueryService extends BasicService {
                 List<ModelDimensionDesc> dimensions = dataModelDesc.getDimensions();
                 for (ModelDimensionDesc dimension : dimensions) {
                     for (String column : dimension.getColumns()) {
-                        String columnIdentity = (dataModelDesc.findTable(dimension.getTable()).getTableIdentity() + "."
-                                + column).replace('.', '#');
+                        String columnIdentity = (dataModelDesc.findTable(dimension.getTable()).getTableIdentity() + "." + column).replace('.', '#');
                         if (columnMap.containsKey(columnIdentity)) {
                             columnMap.get(columnIdentity).getTYPE().add(ColumnMetaWithType.columnTypeEnum.DIMENSION);
                         } else {
@@ -742,8 +698,7 @@ public class QueryService extends BasicService {
 
                 String[] measures = dataModelDesc.getMetrics();
                 for (String measure : measures) {
-                    String columnIdentity = (dataModelDesc.findTable(measure.substring(0, measure.indexOf(".")))
-                            .getTableIdentity() + measure.substring(measure.indexOf("."))).replace('.', '#');
+                    String columnIdentity = (dataModelDesc.findTable(measure.substring(0, measure.indexOf("."))).getTableIdentity() + measure.substring(measure.indexOf("."))).replace('.', '#');
                     if (columnMap.containsKey(columnIdentity)) {
                         columnMap.get(columnIdentity).getTYPE().add(ColumnMetaWithType.columnTypeEnum.MEASURE);
                     } else {
@@ -802,13 +757,7 @@ public class QueryService extends BasicService {
 
             // Fill in selected column meta
             for (int i = 1; i <= columnCount; ++i) {
-                columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i),
-                        metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i),
-                        metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i),
-                        metaData.getSchemaName(i), metaData.getCatalogName(i), metaData.getTableName(i),
-                        metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i),
-                        metaData.getColumnTypeName(i), metaData.isReadOnly(i), metaData.isWritable(i),
-                        metaData.isDefinitelyWritable(i)));
+                columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i), metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i), metaData.getSchemaName(i), metaData.getCatalogName(i), metaData.getTableName(i), metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i), metaData.isWritable(i), metaData.isDefinitelyWritable(i)));
             }
 
             // fill in results
@@ -843,8 +792,7 @@ public class QueryService extends BasicService {
         }
         logger.info(logSb.toString());
 
-        SQLResponse response = new SQLResponse(columnMetas, results, cubeSb.toString(), 0, false, null, isPartialResult,
-                isAdHoc);
+        SQLResponse response = new SQLResponse(columnMetas, results, cubeSb.toString(), 0, false, null, isPartialResult, isAdHoc);
         response.setTotalScanCount(QueryContext.current().getScannedRows());
         response.setTotalScanBytes(QueryContext.current().getScannedBytes());
 
@@ -856,8 +804,7 @@ public class QueryService extends BasicService {
      * @param param
      * @throws SQLException
      */
-    private void setParam(PreparedStatement preparedState, int index, PrepareSqlRequest.StateParam param)
-            throws SQLException {
+    private void setParam(PreparedStatement preparedState, int index, PrepareSqlRequest.StateParam param) throws SQLException {
         boolean isNull = (null == param.getValue());
 
         Class<?> clazz;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java b/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java
index ebc1f30..adae67c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java
@@ -49,8 +49,7 @@ public class StreamingService extends BasicService {
         return streamingConfigs;
     }
 
-    public List<StreamingConfig> getStreamingConfigs(final String table, final Integer limit, final Integer offset)
-            throws IOException {
+    public List<StreamingConfig> getStreamingConfigs(final String table, final Integer limit, final Integer offset) throws IOException {
 
         List<StreamingConfig> streamingConfigs;
         streamingConfigs = listAllStreamingConfigs(table);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java b/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java
index 7050223..a87ddd8 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java
@@ -77,9 +77,7 @@ public class TableSchemaUpdateChecker {
                 buf.append("- ").append(reason).append("\n");
             }
 
-            return new CheckResult(false,
-                    format("Found %d issue(s) with '%s':%n%s Please disable and purge related cube(s) first",
-                            reasons.size(), tableName, buf.toString()));
+            return new CheckResult(false, format("Found %d issue(s) with '%s':%n%s Please disable and purge related cube(s) first", reasons.size(), tableName, buf.toString()));
         }
     }
 
@@ -89,19 +87,18 @@ public class TableSchemaUpdateChecker {
     }
 
     private List<CubeInstance> findCubeByTable(final String fullTableName) {
-        Iterable<CubeInstance> relatedCubes = Iterables.filter(cubeManager.listAllCubes(),
-                new Predicate<CubeInstance>() {
-                    @Override
-                    public boolean apply(@Nullable CubeInstance cube) {
-                        if (cube == null || cube.allowBrokenDescriptor()) {
-                            return false;
-                        }
-                        DataModelDesc model = cube.getModel();
-                        if (model == null)
-                            return false;
-                        return model.containsTable(fullTableName);
-                    }
-                });
+        Iterable<CubeInstance> relatedCubes = Iterables.filter(cubeManager.listAllCubes(), new Predicate<CubeInstance>() {
+            @Override
+            public boolean apply(@Nullable CubeInstance cube) {
+                if (cube == null || cube.allowBrokenDescriptor()) {
+                    return false;
+                }
+                DataModelDesc model = cube.getModel();
+                if (model == null)
+                    return false;
+                return model.containsTable(fullTableName);
+            }
+        });
 
         return ImmutableList.copyOf(relatedCubes);
     }
@@ -188,8 +185,7 @@ public class TableSchemaUpdateChecker {
                 TableDesc factTable = cube.getModel().findFirstTable(fullTableName).getTableDesc();
                 List<String> violateColumns = checkAllColumnsInCube(cube, factTable, newTableDesc);
                 if (!violateColumns.isEmpty()) {
-                    issues.add(format("Column %s used in cube[%s] and model[%s], but changed in hive", violateColumns,
-                            cube.getName(), modelName));
+                    issues.add(format("Column %s used in cube[%s] and model[%s], but changed in hive", violateColumns, cube.getName(), modelName));
                 }
             }
 
@@ -198,9 +194,7 @@ public class TableSchemaUpdateChecker {
             if (cube.getModel().isLookupTable(fullTableName)) {
                 TableDesc lookupTable = cube.getModel().findFirstTable(fullTableName).getTableDesc();
                 if (!checkAllColumnsInTableDesc(lookupTable, newTableDesc)) {
-                    issues.add(
-                            format("Table '%s' is used as Lookup Table in cube[%s] and model[%s], but changed in hive",
-                                    lookupTable.getIdentity(), cube.getName(), modelName));
+                    issues.add(format("Table '%s' is used as Lookup Table in cube[%s] and model[%s], but changed in hive", lookupTable.getIdentity(), cube.getName(), modelName));
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java b/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java
index 858535b..5cbdb76 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java
@@ -144,7 +144,7 @@ public class TableService extends BasicService {
         for (Pair<TableDesc, TableExtDesc> pair : allMeta) {
             TableDesc tableDesc = pair.getFirst();
             TableExtDesc extDesc = pair.getSecond();
-
+            
             TableDesc origTable = metaMgr.getTableDesc(tableDesc.getIdentity());
             if (origTable == null) {
                 tableDesc.setUuid(UUID.randomUUID().toString());
@@ -153,7 +153,7 @@ public class TableService extends BasicService {
                 tableDesc.setUuid(origTable.getUuid());
                 tableDesc.setLastModified(origTable.getLastModified());
             }
-
+            
             TableExtDesc origExt = metaMgr.getTableExt(tableDesc.getIdentity());
             if (origExt == null) {
                 extDesc.setUuid(UUID.randomUUID().toString());
@@ -172,9 +172,8 @@ public class TableService extends BasicService {
         syncTableToProject(result, project);
         return result;
     }
-
-    public Map<String, String[]> loadHiveTables(String[] tableNames, String project, boolean isNeedProfile)
-            throws Exception {
+    
+    public Map<String, String[]> loadHiveTables(String[] tableNames, String project, boolean isNeedProfile) throws Exception {
         String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
         Map<String, String[]> result = new HashMap<String, String[]>();
 
@@ -264,8 +263,7 @@ public class TableService extends BasicService {
             rtn = true;
         }
 
-        if (tableType == 1 && !projectService.isTableInAnyProject(tableName)
-                && !modelService.isTableInAnyModel(tableName)) {
+        if (tableType == 1 && !projectService.isTableInAnyProject(tableName) && !modelService.isTableInAnyModel(tableName)) {
             StreamingConfig config = null;
             KafkaConfig kafkaConfig = null;
             try {
@@ -332,8 +330,7 @@ public class TableService extends BasicService {
                 if (cards.length > i) {
                     cardinality.put(columnDesc.getName(), Long.parseLong(cards[i]));
                 } else {
-                    logger.error("The result cardinality is not identical with hive table metadata, cardinality : "
-                            + scard + " column array length: " + cdescs.length);
+                    logger.error("The result cardinality is not identical with hive table metadata, cardinality : " + scard + " column array length: " + cdescs.length);
                     break;
                 }
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/UserGrantedAuthority.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/UserGrantedAuthority.java b/server-base/src/main/java/org/apache/kylin/rest/service/UserGrantedAuthority.java
index 86d28ec..4c2a392 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/UserGrantedAuthority.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/UserGrantedAuthority.java
@@ -20,6 +20,7 @@ package org.apache.kylin.rest.service;
 
 import org.springframework.security.core.GrantedAuthority;
 
+
 public class UserGrantedAuthority implements GrantedAuthority {
     private static final long serialVersionUID = -5128905636841891058L;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/UserInfo.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/UserInfo.java b/server-base/src/main/java/org/apache/kylin/rest/service/UserInfo.java
index fa74e50..644883d 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/UserInfo.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/UserInfo.java
@@ -27,6 +27,7 @@ import org.springframework.security.core.userdetails.UserDetails;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 
+
 @SuppressWarnings("serial")
 public class UserInfo extends RootPersistentEntity {
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
index c30515e..5452543 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
@@ -134,8 +134,7 @@ public class UserService implements UserDetailsManager {
 
     public List<UserDetails> listUsers() throws IOException {
         List<UserDetails> all = new ArrayList<UserDetails>();
-        List<UserInfo> userInfos = aclStore.getAllResources(DIR_PREFIX, UserInfo.class,
-                UserInfoSerializer.getInstance());
+        List<UserInfo> userInfos = aclStore.getAllResources(DIR_PREFIX, UserInfo.class, UserInfoSerializer.getInstance());
         for (UserInfo info : userInfos) {
             all.add(wrap(info));
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/util/AclUtil.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/util/AclUtil.java b/server-base/src/main/java/org/apache/kylin/rest/util/AclUtil.java
index 2488dc2..3b47288 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/util/AclUtil.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/util/AclUtil.java
@@ -29,17 +29,13 @@ import org.springframework.stereotype.Component;
 public class AclUtil {
 
     //such method MUST NOT be called from within same class
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')"
-            + " or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'READ')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')" + " or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'READ')")
     public boolean hasCubeReadPermission(CubeInstance cube) {
         return true;
     }
 
     //such method MUST NOT be called from within same class
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#project, 'ADMINISTRATION') or hasPermission(#project, 'MANAGEMENT')"
-            + " or hasPermission(#project, 'OPERATION') or hasPermission(#project, 'READ')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#project, 'ADMINISTRATION') or hasPermission(#project, 'MANAGEMENT')" + " or hasPermission(#project, 'OPERATION') or hasPermission(#project, 'READ')")
     public boolean hasProjectReadPermission(ProjectInstance project) {
         return true;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java b/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
index b6e4d23..f6b3496 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
@@ -18,25 +18,23 @@
 
 package org.apache.kylin.rest.util;
 
-import java.sql.SQLException;
-import java.util.List;
-
 import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.apache.kylin.query.routing.NoRealizationFoundException;
-import org.apache.kylin.rest.exception.InternalErrorException;
 import org.apache.kylin.storage.adhoc.AdHocRunnerBase;
+import org.apache.kylin.rest.exception.InternalErrorException;
+import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.sql.SQLException;
+import java.util.List;
+
 public class AdHocUtil {
     private static final Logger logger = LoggerFactory.getLogger(AdHocUtil.class);
 
-    public static boolean doAdHocQuery(String sql, List<List<String>> results, List<SelectedColumnMeta> columnMetas,
-            SQLException sqlException) throws Exception {
-        boolean isExpectedCause = (ExceptionUtils.getRootCause(sqlException).getClass()
-                .equals(NoRealizationFoundException.class));
+    public static boolean doAdHocQuery(String sql, List<List<String>> results, List<SelectedColumnMeta> columnMetas, SQLException sqlException) throws Exception {
+        boolean isExpectedCause = (ExceptionUtils.getRootCause(sqlException).getClass().equals(NoRealizationFoundException.class));
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         Boolean isAdHoc = false;
 
@@ -69,3 +67,4 @@ public class AdHocUtil {
         return isAdHoc;
     }
 }
+

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/util/ControllerSplitter.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/util/ControllerSplitter.java b/server-base/src/main/java/org/apache/kylin/rest/util/ControllerSplitter.java
index 154eb9f..e043327 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/util/ControllerSplitter.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/util/ControllerSplitter.java
@@ -32,9 +32,9 @@ public class ControllerSplitter {
     static File v1dir = new File("src/main/java/org/apache/kylin/rest/controller");
     static File v2dir = new File("src/main/java/org/apache/kylin/rest/controller2");
     static boolean dryRun = false;
-
+    
     public static void main(String[] args) throws IOException {
-
+        
         for (File f : v1dir.listFiles()) {
             chopOff(f, "application/vnd.apache.kylin-v2+json");
         }
@@ -45,28 +45,28 @@ public class ControllerSplitter {
     }
 
     private static void chopOff(File f, String annoPtn) throws IOException {
-
+        
         System.out.println("Processing " + f);
-
+        
         FileInputStream is = new FileInputStream(f);
         List<String> lines = IOUtils.readLines(is, "UTF-8");
         is.close();
         List<String> outLines = new ArrayList<>(lines.size());
-
+        
         boolean del = false;
         for (String l : lines) {
             if (l.startsWith("    @") && l.contains(annoPtn))
                 del = true;
-
+            
             if (del)
                 System.out.println("x " + l);
             else
                 outLines.add(l);
-
+            
             if (del && l.startsWith("    }"))
                 del = false;
         }
-
+        
         if (!dryRun && outLines.size() < lines.size()) {
             FileOutputStream os = new FileOutputStream(f);
             IOUtils.writeLines(outLines, "\n", os, "UTF-8");
@@ -75,7 +75,7 @@ public class ControllerSplitter {
         } else {
             System.out.println("skipped");
         }
-
+        
         System.out.println("============================================================================");
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/util/TableauInterceptor.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/util/TableauInterceptor.java b/server-base/src/main/java/org/apache/kylin/rest/util/TableauInterceptor.java
index 6100b04..49b7502 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/util/TableauInterceptor.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/util/TableauInterceptor.java
@@ -48,8 +48,7 @@ public class TableauInterceptor {
         private static final long serialVersionUID = -8086728462624901359L;
 
         {
-            add(new SelectedColumnMeta(false, false, true, false, 2, true, 11, "COL", "COL", "", "", "", 10, 0, 4,
-                    "int4", false, true, false));
+            add(new SelectedColumnMeta(false, false, true, false, 2, true, 11, "COL", "COL", "", "", "", 10, 0, 4, "int4", false, true, false));
         }
     }, new LinkedList<List<String>>() {
         private static final long serialVersionUID = -470083340592928073L;
@@ -112,4 +111,5 @@ public class TableauInterceptor {
         return true;
     }
 
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/test/java/org/apache/kylin/rest/bean/BeanTest.java
----------------------------------------------------------------------
diff --git a/server-base/src/test/java/org/apache/kylin/rest/bean/BeanTest.java b/server-base/src/test/java/org/apache/kylin/rest/bean/BeanTest.java
index 8dd5ad6..27c77c3 100644
--- a/server-base/src/test/java/org/apache/kylin/rest/bean/BeanTest.java
+++ b/server-base/src/test/java/org/apache/kylin/rest/bean/BeanTest.java
@@ -20,10 +20,10 @@ package org.apache.kylin.rest.bean;
 
 import java.beans.IntrospectionException;
 
+import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.metadata.querymeta.ColumnMeta;
 import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.apache.kylin.metadata.querymeta.TableMeta;
-import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.request.AccessRequest;
 import org.apache.kylin.rest.request.CubeRequest;
 import org.apache.kylin.rest.request.JobListRequest;
@@ -56,8 +56,7 @@ public class BeanTest {
 
         new SQLResponse(null, null, null, 0, true, null);
 
-        SelectedColumnMeta coulmnMeta = new SelectedColumnMeta(false, false, false, false, 0, false, 0, null, null,
-                null, null, null, 0, 0, 0, null, false, false, false);
+        SelectedColumnMeta coulmnMeta = new SelectedColumnMeta(false, false, false, false, 0, false, 0, null, null, null, null, null, 0, 0, 0, null, false, false, false);
         Assert.assertTrue(!coulmnMeta.isAutoIncrement());
         Assert.assertTrue(!coulmnMeta.isCaseSensitive());
         Assert.assertTrue(!coulmnMeta.isSearchable());
@@ -66,9 +65,7 @@ public class BeanTest {
         Assert.assertTrue(!coulmnMeta.isSigned());
 
         Assert.assertEquals(Constant.ACCESS_HAS_ROLE_ADMIN, "hasRole('ROLE_ADMIN')");
-        Assert.assertEquals(Constant.ACCESS_POST_FILTER_READ,
-                "hasRole('ROLE_ADMIN') or hasPermission(filterObject, 'READ') or hasPermission(filterObject, 'MANAGEMENT') "
-                        + "or hasPermission(filterObject, 'OPERATION') or hasPermission(filterObject, 'ADMINISTRATION')");
+        Assert.assertEquals(Constant.ACCESS_POST_FILTER_READ, "hasRole('ROLE_ADMIN') or hasPermission(filterObject, 'READ') or hasPermission(filterObject, 'MANAGEMENT') " + "or hasPermission(filterObject, 'OPERATION') or hasPermission(filterObject, 'ADMINISTRATION')");
         Assert.assertEquals(Constant.FakeCatalogName, "defaultCatalog");
         Assert.assertEquals(Constant.FakeSchemaName, "defaultSchema");
         Assert.assertEquals(Constant.IDENTITY_ROLE, "role");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java
----------------------------------------------------------------------
diff --git a/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java b/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java
index 943424c..08010e4 100644
--- a/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java
+++ b/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java
@@ -42,8 +42,7 @@ public class BeanValidator {
     /**
      * Tests the get/set methods of the specified class.
      */
-    public static <T> void validateAccssor(final Class<T> clazz, final String... skipThese)
-            throws IntrospectionException {
+    public static <T> void validateAccssor(final Class<T> clazz, final String... skipThese) throws IntrospectionException {
         final PropertyDescriptor[] props = Introspector.getBeanInfo(clazz).getPropertyDescriptors();
         for (PropertyDescriptor prop : props) {
 
@@ -70,21 +69,18 @@ public class BeanValidator {
 
                         setter.invoke(bean, value);
 
-                        Assert.assertEquals(String.format("Failed while testing property %s", prop.getName()), value,
-                                getter.invoke(bean));
+                        Assert.assertEquals(String.format("Failed while testing property %s", prop.getName()), value, getter.invoke(bean));
 
                     } catch (Exception ex) {
                         ex.printStackTrace();
-                        System.err.println(String.format("An exception was thrown while testing the property %s: %s",
-                                prop.getName(), ex.toString()));
+                        System.err.println(String.format("An exception was thrown while testing the property %s: %s", prop.getName(), ex.toString()));
                     }
                 }
             }
         }
     }
 
-    private static Object buildValue(Class<?> clazz) throws InstantiationException, IllegalAccessException,
-            IllegalArgumentException, SecurityException, InvocationTargetException {
+    private static Object buildValue(Class<?> clazz) throws InstantiationException, IllegalAccessException, IllegalArgumentException, SecurityException, InvocationTargetException {
 
         final Constructor<?>[] ctrs = clazz.getConstructors();
         for (Constructor<?> ctr : ctrs) {
@@ -121,33 +117,30 @@ public class BeanValidator {
         } else if (clazz.isEnum()) {
             return clazz.getEnumConstants()[0];
         } else if (clazz.isInterface()) {
-            return Proxy.newProxyInstance(clazz.getClassLoader(), new java.lang.Class[] { clazz },
-                    new java.lang.reflect.InvocationHandler() {
-                        @Override
-                        public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
-                            if (Object.class.getMethod("equals", Object.class).equals(method)) {
-                                return proxy == args[0];
-                            }
-                            if (Object.class.getMethod("hashCode", Object.class).equals(method)) {
-                                return Integer.valueOf(System.identityHashCode(proxy));
-                            }
-                            if (Object.class.getMethod("toString", Object.class).equals(method)) {
-                                return "Bean " + getMockedType(proxy);
-                            }
-
-                            return null;
-                        }
-
-                    });
+            return Proxy.newProxyInstance(clazz.getClassLoader(), new java.lang.Class[] { clazz }, new java.lang.reflect.InvocationHandler() {
+                @Override
+                public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
+                    if (Object.class.getMethod("equals", Object.class).equals(method)) {
+                        return proxy == args[0];
+                    }
+                    if (Object.class.getMethod("hashCode", Object.class).equals(method)) {
+                        return Integer.valueOf(System.identityHashCode(proxy));
+                    }
+                    if (Object.class.getMethod("toString", Object.class).equals(method)) {
+                        return "Bean " + getMockedType(proxy);
+                    }
+
+                    return null;
+                }
+
+            });
         } else {
-            System.err.println("Unable to build an instance of class " + clazz.getName()
-                    + ", please add some code to the " + BeanValidator.class.getName() + " class to do this.");
+            System.err.println("Unable to build an instance of class " + clazz.getName() + ", please add some code to the " + BeanValidator.class.getName() + " class to do this.");
             return null;
         }
     }
 
-    public static <T> void findBooleanIsMethods(Class<T> clazz, PropertyDescriptor descriptor)
-            throws IntrospectionException {
+    public static <T> void findBooleanIsMethods(Class<T> clazz, PropertyDescriptor descriptor) throws IntrospectionException {
         if (descriptor.getReadMethod() == null && descriptor.getPropertyType() == Boolean.class) {
             try {
                 PropertyDescriptor pd = new PropertyDescriptor(descriptor.getName(), clazz);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/test/java/org/apache/kylin/rest/security/PasswordPlaceHolderConfigurerTest.java
----------------------------------------------------------------------
diff --git a/server-base/src/test/java/org/apache/kylin/rest/security/PasswordPlaceHolderConfigurerTest.java b/server-base/src/test/java/org/apache/kylin/rest/security/PasswordPlaceHolderConfigurerTest.java
index 067de08..ac42ad7 100644
--- a/server-base/src/test/java/org/apache/kylin/rest/security/PasswordPlaceHolderConfigurerTest.java
+++ b/server-base/src/test/java/org/apache/kylin/rest/security/PasswordPlaceHolderConfigurerTest.java
@@ -24,7 +24,7 @@ import org.junit.Test;
 public class PasswordPlaceHolderConfigurerTest {
 
     @Test
-    public void testAESEncrypt() {
+    public void testAESEncrypt(){
         String input = "hello world";
         String result = PasswordPlaceholderConfigurer.encrypt(input);
         Assert.assertEquals("4stv/RRleOtvie/8SLHmXA==", result);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
index 0c2fc92..cd6768e 100644
--- a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
+++ b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
@@ -18,10 +18,6 @@
 
 package org.apache.kylin.rest;
 
-import java.io.File;
-import java.lang.reflect.Field;
-import java.lang.reflect.Modifier;
-
 import org.apache.catalina.Context;
 import org.apache.catalina.core.AprLifecycleListener;
 import org.apache.catalina.core.StandardServer;
@@ -31,6 +27,10 @@ import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.util.Shell;
 import org.apache.kylin.common.KylinConfig;
 
+import java.io.File;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+
 public class DebugTomcat {
 
     public static void setupDebugEnv() {
@@ -48,8 +48,7 @@ public class DebugTomcat {
                 System.setProperty("catalina.home", ".");
 
             if (StringUtils.isEmpty(System.getProperty("hdp.version"))) {
-                System.err.println(
-                        "No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
+                System.err.println("No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
                 System.exit(1);
             }
 
@@ -110,8 +109,7 @@ public class DebugTomcat {
 
         String webBase = new File("../webapp/app").getAbsolutePath();
         if (new File(webBase, "WEB-INF").exists() == false) {
-            throw new RuntimeException(
-                    "In order to launch Kylin web app from IDE, please copy server/src/main/webapp/WEB-INF to  webapp/app/");
+            throw new RuntimeException("In order to launch Kylin web app from IDE, please copy server/src/main/webapp/WEB-INF to  webapp/app/");
         }
 
         Tomcat tomcat = new Tomcat();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server/src/test/java/org/apache/kylin/rest/controller/AccessControllerTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/controller/AccessControllerTest.java b/server/src/test/java/org/apache/kylin/rest/controller/AccessControllerTest.java
index b6e0717..18fbd06 100644
--- a/server/src/test/java/org/apache/kylin/rest/controller/AccessControllerTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/controller/AccessControllerTest.java
@@ -18,13 +18,6 @@
 
 package org.apache.kylin.rest.controller;
 
-import static junit.framework.TestCase.fail;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.List;
-
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.rest.request.AccessRequest;
@@ -45,6 +38,13 @@ import org.springframework.security.authentication.TestingAuthenticationToken;
 import org.springframework.security.core.Authentication;
 import org.springframework.security.core.context.SecurityContextHolder;
 
+import java.io.IOException;
+import java.util.List;
+
+import static junit.framework.TestCase.fail;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 /**
  * @author xduo
  */
@@ -88,8 +88,7 @@ public class AccessControllerTest extends ServiceTestBase implements AclEntityTy
     @Test
     public void testBasics() throws IOException {
         swichToAdmin();
-        List<AccessEntryResponse> aes = accessController.getAccessEntities(CUBE_INSTANCE,
-                "a24ca905-1fc6-4f67-985c-38fa5aeafd92");
+        List<AccessEntryResponse> aes = accessController.getAccessEntities(CUBE_INSTANCE, "a24ca905-1fc6-4f67-985c-38fa5aeafd92");
         Assert.assertTrue(aes.size() == 0);
 
         AccessRequest accessRequest = getAccessRequest(MODELER, ADMINISTRATION);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server/src/test/java/org/apache/kylin/rest/controller/CubeControllerTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/controller/CubeControllerTest.java b/server/src/test/java/org/apache/kylin/rest/controller/CubeControllerTest.java
index a7ca5a2..e67c238 100644
--- a/server/src/test/java/org/apache/kylin/rest/controller/CubeControllerTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/controller/CubeControllerTest.java
@@ -35,10 +35,10 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.Lists;
+import org.springframework.beans.factory.annotation.Qualifier;
 
 /**
  * @author xduo
@@ -169,6 +169,7 @@ public class CubeControllerTest extends ServiceTestBase {
         Assert.assertTrue(segNumber == newSegNumber + 1);
     }
 
+
     @Test
     public void testGetHoles() throws IOException {
         String cubeName = "test_kylin_cube_with_slr_ready_3_segments";
@@ -178,7 +179,7 @@ public class CubeControllerTest extends ServiceTestBase {
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
         List<CubeSegment> segments = cube.getSegments();
 
-        final long dateEnd = segments.get(segments.size() - 1).getDateRangeEnd();
+        final long dateEnd = segments.get(segments.size() -1).getDateRangeEnd();
 
         final long ONEDAY = 24 * 60 * 60000;
         cubeService.getCubeManager().appendSegment(cube, dateEnd + ONEDAY, dateEnd + ONEDAY * 2);
@@ -193,6 +194,7 @@ public class CubeControllerTest extends ServiceTestBase {
 
     }
 
+
     @Test
     public void testGetCubes() {
         List<CubeInstance> cubes = cubeController.getCubes(null, null, null, 1, 0);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server/src/test/java/org/apache/kylin/rest/controller/ProjectControllerTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/controller/ProjectControllerTest.java b/server/src/test/java/org/apache/kylin/rest/controller/ProjectControllerTest.java
index 30cc0ff..f805095 100644
--- a/server/src/test/java/org/apache/kylin/rest/controller/ProjectControllerTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/controller/ProjectControllerTest.java
@@ -31,9 +31,9 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
+import org.springframework.beans.factory.annotation.Qualifier;
 
 /**
  */
@@ -74,16 +74,14 @@ public class ProjectControllerTest extends ServiceTestBase {
         ProjectInstance ret = projectController.saveProject(getProjectRequest(project, null));
 
         Assert.assertEquals(ret.getOwner(), "ADMIN");
-        Assert.assertEquals(ProjectManager.getInstance(getTestConfig()).listAllProjects().size(),
-                originalProjectCount + 1);
+        Assert.assertEquals(ProjectManager.getInstance(getTestConfig()).listAllProjects().size(), originalProjectCount + 1);
 
         //test update project
         ProjectInstance newProject = new ProjectInstance();
         newProject.setName("new_project_2");
         projectController.updateProject(getProjectRequest(newProject, "new_project"));
 
-        Assert.assertEquals(ProjectManager.getInstance(getTestConfig()).listAllProjects().size(),
-                originalProjectCount + 1);
+        Assert.assertEquals(ProjectManager.getInstance(getTestConfig()).listAllProjects().size(), originalProjectCount + 1);
         Assert.assertEquals(ProjectManager.getInstance(getTestConfig()).getProject("new_project"), null);
         Assert.assertNotEquals(ProjectManager.getInstance(getTestConfig()).getProject("new_project_2"), null);
 
@@ -93,12 +91,10 @@ public class ProjectControllerTest extends ServiceTestBase {
         newProject2.setDescription("hello world");
         projectController.updateProject(getProjectRequest(newProject2, "new_project_2"));
 
-        Assert.assertEquals(ProjectManager.getInstance(getTestConfig()).listAllProjects().size(),
-                originalProjectCount + 1);
+        Assert.assertEquals(ProjectManager.getInstance(getTestConfig()).listAllProjects().size(), originalProjectCount + 1);
         Assert.assertEquals(ProjectManager.getInstance(getTestConfig()).getProject("new_project"), null);
         Assert.assertNotEquals(ProjectManager.getInstance(getTestConfig()).getProject("new_project_2"), null);
-        Assert.assertEquals(ProjectManager.getInstance(getTestConfig()).getProject("new_project_2").getDescription(),
-                "hello world");
+        Assert.assertEquals(ProjectManager.getInstance(getTestConfig()).getProject("new_project_2").getDescription(), "hello world");
     }
 
     @Test(expected = InternalErrorException.class)

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server/src/test/java/org/apache/kylin/rest/controller/QueryControllerTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/controller/QueryControllerTest.java b/server/src/test/java/org/apache/kylin/rest/controller/QueryControllerTest.java
index 6400583..d9eb3fa 100644
--- a/server/src/test/java/org/apache/kylin/rest/controller/QueryControllerTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/controller/QueryControllerTest.java
@@ -29,9 +29,9 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
 
 import net.sf.ehcache.CacheManager;
+import org.springframework.beans.factory.annotation.Qualifier;
 
 /**
  * @author xduo
@@ -70,9 +70,7 @@ public class QueryControllerTest extends ServiceTestBase {
     @Test
     public void testErrorMsg() {
         String errorMsg = "error while executing SQL \"select lkp.clsfd_ga_prfl_id, ga.sum_dt, sum(ga.bounces) as bounces, sum(ga.exits) as exits, sum(ga.entrances) as entrances, sum(ga.pageviews) as pageviews, count(distinct ga.GA_VSTR_ID, ga.GA_VST_ID) as visits, count(distinct ga.GA_VSTR_ID) as uniqVistors from CLSFD_GA_PGTYPE_CATEG_LOC ga left join clsfd_ga_prfl_lkp lkp on ga.SRC_GA_PRFL_ID = lkp.SRC_GA_PRFL_ID group by lkp.clsfd_ga_prfl_id,ga.sum_dt order by lkp.clsfd_ga_prfl_id,ga.sum_dt LIMIT 50000\": From line 14, column 14 to line 14, column 29: Column 'CLSFD_GA_PRFL_ID' not found in table 'LKP'";
-        assert QueryUtil.makeErrorMsgUserFriendly(errorMsg)
-                .equals("From line 14, column 14 to line 14, column 29: Column 'CLSFD_GA_PRFL_ID' not found in table 'LKP'\n"
-                        + "while executing SQL: \"select lkp.clsfd_ga_prfl_id, ga.sum_dt, sum(ga.bounces) as bounces, sum(ga.exits) as exits, sum(ga.entrances) as entrances, sum(ga.pageviews) as pageviews, count(distinct ga.GA_VSTR_ID, ga.GA_VST_ID) as visits, count(distinct ga.GA_VSTR_ID) as uniqVistors from CLSFD_GA_PGTYPE_CATEG_LOC ga left join clsfd_ga_prfl_lkp lkp on ga.SRC_GA_PRFL_ID = lkp.SRC_GA_PRFL_ID group by lkp.clsfd_ga_prfl_id,ga.sum_dt order by lkp.clsfd_ga_prfl_id,ga.sum_dt LIMIT 50000\"");
+        assert QueryUtil.makeErrorMsgUserFriendly(errorMsg).equals("From line 14, column 14 to line 14, column 29: Column 'CLSFD_GA_PRFL_ID' not found in table 'LKP'\n" + "while executing SQL: \"select lkp.clsfd_ga_prfl_id, ga.sum_dt, sum(ga.bounces) as bounces, sum(ga.exits) as exits, sum(ga.entrances) as entrances, sum(ga.pageviews) as pageviews, count(distinct ga.GA_VSTR_ID, ga.GA_VST_ID) as visits, count(distinct ga.GA_VSTR_ID) as uniqVistors from CLSFD_GA_PGTYPE_CATEG_LOC ga left join clsfd_ga_prfl_lkp lkp on ga.SRC_GA_PRFL_ID = lkp.SRC_GA_PRFL_ID group by lkp.clsfd_ga_prfl_id,ga.sum_dt order by lkp.clsfd_ga_prfl_id,ga.sum_dt LIMIT 50000\"");
     }
 
     @Test

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server/src/test/java/org/apache/kylin/rest/service/AccessServiceTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/AccessServiceTest.java b/server/src/test/java/org/apache/kylin/rest/service/AccessServiceTest.java
index e525f72..481b0bf 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/AccessServiceTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/AccessServiceTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.rest.service;
 
+import com.fasterxml.jackson.core.JsonProcessingException;
 import org.apache.kylin.common.persistence.AclEntity;
 import org.apache.kylin.rest.response.AccessEntryResponse;
 import org.apache.kylin.rest.security.AclPermission;
@@ -31,8 +32,6 @@ import org.springframework.security.acls.model.AccessControlEntry;
 import org.springframework.security.acls.model.Acl;
 import org.springframework.security.acls.model.Sid;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-
 /**
  * @author xduo
  */
@@ -105,8 +104,7 @@ public class AccessServiceTest extends ServiceTestBase {
 
         attachedEntityAcl = accessService.getAcl(attachedEntity);
         Assert.assertTrue(attachedEntityAcl.getParentAcl() != null);
-        Assert.assertTrue(
-                attachedEntityAcl.getParentAcl().getObjectIdentity().getIdentifier().equals("test-domain-object"));
+        Assert.assertTrue(attachedEntityAcl.getParentAcl().getObjectIdentity().getIdentifier().equals("test-domain-object"));
         Assert.assertTrue(attachedEntityAcl.getEntries().size() == 1);
 
         // test revoke

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server/src/test/java/org/apache/kylin/rest/service/BadQueryDetectorTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/BadQueryDetectorTest.java b/server/src/test/java/org/apache/kylin/rest/service/BadQueryDetectorTest.java
index 6fb4f55..7aabb0e 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/BadQueryDetectorTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/BadQueryDetectorTest.java
@@ -52,8 +52,7 @@ public class BadQueryDetectorTest extends LocalFileMetadataTestCase {
         BadQueryDetector badQueryDetector = new BadQueryDetector(alertRunningSec * 1000, alertMB, alertRunningSec);
         badQueryDetector.registerNotifier(new BadQueryDetector.Notifier() {
             @Override
-            public void badQueryFound(String adj, float runningSec, long startTime, String project, String sql,
-                    String user, Thread t) {
+            public void badQueryFound(String adj, float runningSec, long startTime, String project, String sql, String user, Thread t) {
                 alerts.add(new String[] { adj, sql });
             }
         });

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java b/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
index a7ef001..004f660 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
@@ -116,23 +116,22 @@ public class CacheServiceTest extends LocalFileMetadataTestCase {
         serviceA.setCubeService(cubeServiceA);
         serviceB.setCubeService(cubeServiceB);
 
-        context.addServlet(
-                new ServletHolder(new BroadcasterReceiveServlet(new BroadcasterReceiveServlet.BroadcasterHandler() {
-                    @Override
-                    public void handle(String entity, String cacheKey, String event) {
-                        Broadcaster.Event wipeEvent = Broadcaster.Event.getEvent(event);
-                        final String log = "wipe cache type: " + entity + " event:" + wipeEvent + " name:" + cacheKey;
-                        logger.info(log);
-                        try {
-                            serviceA.notifyMetadataChange(entity, wipeEvent, cacheKey);
-                            serviceB.notifyMetadataChange(entity, wipeEvent, cacheKey);
-                        } catch (IOException e) {
-                            throw new RuntimeException(e);
-                        } finally {
-                            counter.incrementAndGet();
-                        }
-                    }
-                })), "/");
+        context.addServlet(new ServletHolder(new BroadcasterReceiveServlet(new BroadcasterReceiveServlet.BroadcasterHandler() {
+            @Override
+            public void handle(String entity, String cacheKey, String event) {
+                Broadcaster.Event wipeEvent = Broadcaster.Event.getEvent(event);
+                final String log = "wipe cache type: " + entity + " event:" + wipeEvent + " name:" + cacheKey;
+                logger.info(log);
+                try {
+                    serviceA.notifyMetadataChange(entity, wipeEvent, cacheKey);
+                    serviceB.notifyMetadataChange(entity, wipeEvent, cacheKey);
+                } catch (IOException e) {
+                    throw new RuntimeException(e);
+                } finally {
+                    counter.incrementAndGet();
+                }
+            }
+        })), "/");
 
         server.start();
     }
@@ -200,8 +199,7 @@ public class CacheServiceTest extends LocalFileMetadataTestCase {
         assertTrue(!getProjectManager(configA).equals(getProjectManager(configB)));
         assertTrue(!getMetadataManager(configA).equals(getMetadataManager(configB)));
 
-        assertEquals(getProjectManager(configA).listAllProjects().size(),
-                getProjectManager(configB).listAllProjects().size());
+        assertEquals(getProjectManager(configA).listAllProjects().size(), getProjectManager(configB).listAllProjects().size());
     }
 
     @Test
@@ -224,10 +222,8 @@ public class CacheServiceTest extends LocalFileMetadataTestCase {
 
         assertTrue(cubeManager.getCube(cubeName) == null);
         assertTrue(cubeManagerB.getCube(cubeName) == null);
-        assertTrue(!containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME),
-                RealizationType.CUBE, cubeName));
-        assertTrue(!containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME),
-                RealizationType.CUBE, cubeName));
+        assertTrue(!containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
+        assertTrue(!containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
         cubeManager.createCube(cubeName, ProjectInstance.DEFAULT_PROJECT_NAME, cubeDesc, null);
         //one for cube update, one for project update
         assertEquals(2, broadcaster.getCounterAndClear());
@@ -235,10 +231,8 @@ public class CacheServiceTest extends LocalFileMetadataTestCase {
 
         assertNotNull(cubeManager.getCube(cubeName));
         assertNotNull(cubeManagerB.getCube(cubeName));
-        assertTrue(containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME),
-                RealizationType.CUBE, cubeName));
-        assertTrue(containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME),
-                RealizationType.CUBE, cubeName));
+        assertTrue(containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
+        assertTrue(containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
 
         //update cube
         CubeInstance cube = cubeManager.getCube(cubeName);
@@ -258,11 +252,9 @@ public class CacheServiceTest extends LocalFileMetadataTestCase {
         waitForCounterAndClear(2);
 
         assertTrue(cubeManager.getCube(cubeName) == null);
-        assertTrue(!containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME),
-                RealizationType.CUBE, cubeName));
+        assertTrue(!containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
         assertTrue(cubeManagerB.getCube(cubeName) == null);
-        assertTrue(!containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME),
-                RealizationType.CUBE, cubeName));
+        assertTrue(!containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
 
         final String cubeDescName = "test_cube_desc";
         cubeDesc.setName(cubeDescName);
@@ -338,8 +330,7 @@ public class CacheServiceTest extends LocalFileMetadataTestCase {
         //only one for data model update
         assertEquals(1, broadcaster.getCounterAndClear());
         waitForCounterAndClear(1);
-        assertEquals(dataModelDesc.getJoinTables().length,
-                metadataManagerB.getDataModelDesc(dataModelName).getJoinTables().length);
+        assertEquals(dataModelDesc.getJoinTables().length, metadataManagerB.getDataModelDesc(dataModelName).getJoinTables().length);
 
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server/src/test/java/org/apache/kylin/rest/service/CubeServiceTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/CubeServiceTest.java b/server/src/test/java/org/apache/kylin/rest/service/CubeServiceTest.java
index bbedb25..a190d6d 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/CubeServiceTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/CubeServiceTest.java
@@ -27,9 +27,9 @@ import org.apache.kylin.metadata.project.ProjectInstance;
 import org.junit.Assert;
 import org.junit.Test;
 import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
 
 import com.fasterxml.jackson.core.JsonProcessingException;
+import org.springframework.beans.factory.annotation.Qualifier;
 
 /**
  * @author xduo

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java b/server/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
index 26f42dd..2012a05 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
@@ -62,8 +62,7 @@ public class ModelServiceTest extends ServiceTestBase {
     }
 
     @Test
-    public void testSuccessModelUpdateOnComputedColumn()
-            throws IOException, JobException, NoSuchFieldException, IllegalAccessException {
+    public void testSuccessModelUpdateOnComputedColumn() throws IOException, JobException, NoSuchFieldException, IllegalAccessException {
 
         List<DataModelDesc> dataModelDescs = modelService.listAllModels("ci_left_join_model", "default");
         Assert.assertTrue(dataModelDescs.size() == 1);
@@ -80,11 +79,9 @@ public class ModelServiceTest extends ServiceTestBase {
     }
 
     @Test
-    public void testFailureModelUpdateDueToComputedColumnConflict()
-            throws IOException, JobException, NoSuchFieldException, IllegalAccessException {
+    public void testFailureModelUpdateDueToComputedColumnConflict() throws IOException, JobException, NoSuchFieldException, IllegalAccessException {
         expectedEx.expect(IllegalStateException.class);
-        expectedEx.expectMessage(
-                "Computed column named DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT is already defined in other models: [DataModelDesc [name=ci_left_join_model], DataModelDesc [name=ci_inner_join_model]]. Please change another name, or try to keep consistent definition");
+        expectedEx.expectMessage("Computed column named DEFAULT.TEST_KYLIN_FACT.DEAL_AMOUNT is already defined in other models: [DataModelDesc [name=ci_left_join_model], DataModelDesc [name=ci_inner_join_model]]. Please change another name, or try to keep consistent definition");
 
         List<DataModelDesc> dataModelDescs = modelService.listAllModels("ci_left_join_model", "default");
         Assert.assertTrue(dataModelDescs.size() == 1);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java b/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
index 93596d9..b45b27b 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
@@ -72,18 +72,17 @@ public class ServiceTestBase extends LocalFileMetadataTestCase {
 
         if (!userService.userExists("ADMIN")) {
             userService.createUser(new User("ADMIN", "KYLIN", Arrays.asList(//
-                    new UserGrantedAuthority(Constant.ROLE_ADMIN), new UserGrantedAuthority(Constant.ROLE_ANALYST),
-                    new UserGrantedAuthority(Constant.ROLE_MODELER))));
+                new UserGrantedAuthority(Constant.ROLE_ADMIN), new UserGrantedAuthority(Constant.ROLE_ANALYST), new UserGrantedAuthority(Constant.ROLE_MODELER))));
         }
 
         if (!userService.userExists("MODELER")) {
             userService.createUser(new User("MODELER", "MODELER", Arrays.asList(//
-                    new UserGrantedAuthority(Constant.ROLE_ANALYST), new UserGrantedAuthority(Constant.ROLE_MODELER))));
+                new UserGrantedAuthority(Constant.ROLE_ANALYST), new UserGrantedAuthority(Constant.ROLE_MODELER))));
         }
 
         if (!userService.userExists("ANALYST")) {
             userService.createUser(new User("ANALYST", "ANALYST", Arrays.asList(//
-                    new UserGrantedAuthority(Constant.ROLE_ANALYST))));
+                new UserGrantedAuthority(Constant.ROLE_ANALYST))));
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
index 9800919..468ccb1 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
@@ -133,8 +133,7 @@ public class BeelineHiveClient implements IHiveClient {
 
         List<HiveTableMeta.HiveTableColumnMeta> allColumns = Lists.newArrayList();
         while (columns.next()) {
-            allColumns.add(new HiveTableMeta.HiveTableColumnMeta(columns.getString(4), columns.getString(6),
-                    columns.getString(12)));
+            allColumns.add(new HiveTableMeta.HiveTableColumnMeta(columns.getString(4), columns.getString(6), columns.getString(12)));
         }
         builder.setAllColumns(allColumns);
         DBUtils.closeQuietly(columns);
@@ -158,8 +157,7 @@ public class BeelineHiveClient implements IHiveClient {
                     if ("".equals(resultSet.getString(1).trim())) {
                         break;
                     }
-                    partitionColumns.add(new HiveTableMeta.HiveTableColumnMeta(resultSet.getString(1).trim(),
-                            resultSet.getString(2).trim(), resultSet.getString(3).trim()));
+                    partitionColumns.add(new HiveTableMeta.HiveTableColumnMeta(resultSet.getString(1).trim(), resultSet.getString(2).trim(), resultSet.getString(3).trim()));
                 }
                 builder.setPartitionColumns(partitionColumns);
             }
@@ -216,8 +214,7 @@ public class BeelineHiveClient implements IHiveClient {
 
     public static void main(String[] args) throws SQLException {
 
-        BeelineHiveClient loader = new BeelineHiveClient(
-                "-n root --hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://sandbox:10000'");
+        BeelineHiveClient loader = new BeelineHiveClient("-n root --hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://sandbox:10000'");
         //BeelineHiveClient loader = new BeelineHiveClient(StringUtils.join(args, " "));
         HiveTableMeta hiveTableMeta = loader.getHiveTableMeta("default", "test_kylin_fact_part");
         System.out.println(hiveTableMeta);


[43/67] [abbrv] kylin git commit: KYLIN-2632 Refactor error msg

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/QueryControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/QueryControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/QueryControllerV2.java
index 7f71801..ab4741d 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/QueryControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/QueryControllerV2.java
@@ -31,7 +31,6 @@ import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.apache.kylin.rest.controller.BasicController;
 import org.apache.kylin.rest.exception.InternalErrorException;
 import org.apache.kylin.rest.model.Query;
-import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.request.MetaRequest;
 import org.apache.kylin.rest.request.PrepareSqlRequest;
 import org.apache.kylin.rest.request.SQLRequest;
@@ -48,7 +47,6 @@ import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
@@ -72,48 +70,52 @@ public class QueryControllerV2 extends BasicController {
     @Qualifier("queryService")
     private QueryService queryService;
 
-    @RequestMapping(value = "/query", method = RequestMethod.POST, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/query", method = RequestMethod.POST, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse queryV2(@RequestHeader("Accept-Language") String lang, @RequestBody SQLRequest sqlRequest) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse queryV2(@RequestBody SQLRequest sqlRequest) {
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, queryService.doQueryWithCache(sqlRequest), "");
     }
 
     // TODO should be just "prepare" a statement, get back expected ResultSetMetaData
 
-    @RequestMapping(value = "/query/prestate", method = RequestMethod.POST, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/query/prestate", method = RequestMethod.POST, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse prepareQueryV2(@RequestHeader("Accept-Language") String lang, @RequestBody PrepareSqlRequest sqlRequest) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse prepareQueryV2(@RequestBody PrepareSqlRequest sqlRequest) {
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, queryService.doQueryWithCache(sqlRequest), "");
     }
 
-    @RequestMapping(value = "/saved_queries", method = RequestMethod.POST, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/saved_queries", method = RequestMethod.POST, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void saveQueryV2(@RequestHeader("Accept-Language") String lang, @RequestBody SaveSqlRequest sqlRequest) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void saveQueryV2(@RequestBody SaveSqlRequest sqlRequest) throws IOException {
 
         String creator = SecurityContextHolder.getContext().getAuthentication().getName();
-        Query newQuery = new Query(sqlRequest.getName(), sqlRequest.getProject(), sqlRequest.getSql(), sqlRequest.getDescription());
+        Query newQuery = new Query(sqlRequest.getName(), sqlRequest.getProject(), sqlRequest.getSql(),
+                sqlRequest.getDescription());
 
         queryService.saveQuery(creator, newQuery);
     }
 
-    @RequestMapping(value = "/saved_queries/{id}", method = RequestMethod.DELETE, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/saved_queries/{id}", method = RequestMethod.DELETE, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void removeQueryV2(@RequestHeader("Accept-Language") String lang, @PathVariable String id) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void removeQueryV2(@PathVariable String id) throws IOException {
 
         String creator = SecurityContextHolder.getContext().getAuthentication().getName();
         queryService.removeQuery(creator, id);
     }
 
-    @RequestMapping(value = "/saved_queries/{project}", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/saved_queries/{project}", method = RequestMethod.GET, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getQueriesV2(@RequestHeader("Accept-Language") String lang, @PathVariable String project, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getQueriesV2(@PathVariable String project,
+            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
+            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize)
+            throws IOException {
 
         HashMap<String, Object> data = new HashMap<String, Object>();
         String creator = SecurityContextHolder.getContext().getAuthentication().getName();
@@ -141,10 +143,11 @@ public class QueryControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }
 
-    @RequestMapping(value = "/query/format/{format}", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/query/format/{format}", method = RequestMethod.GET, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void downloadQueryResultV2(@RequestHeader("Accept-Language") String lang, @PathVariable String format, SQLRequest sqlRequest, HttpServletResponse response) {
-        MsgPicker.setMsg(lang);
+    public void downloadQueryResultV2(@PathVariable String format, SQLRequest sqlRequest,
+            HttpServletResponse response) {
 
         SQLResponse result = queryService.doQueryWithCache(sqlRequest);
         response.setContentType("text/" + format + ";charset=utf-8");
@@ -173,12 +176,13 @@ public class QueryControllerV2 extends BasicController {
         }
     }
 
-    @RequestMapping(value = "/tables_and_columns", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/tables_and_columns", method = RequestMethod.GET, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getMetadataV2(@RequestHeader("Accept-Language") String lang, MetaRequest metaRequest) throws SQLException, IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getMetadataV2(MetaRequest metaRequest) throws SQLException, IOException {
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, queryService.getMetadataV2(metaRequest.getProject()), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, queryService.getMetadataV2(metaRequest.getProject()),
+                "");
     }
 
     public void setQueryService(QueryService queryService) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/StreamingControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/StreamingControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/StreamingControllerV2.java
index c5bebf5..5e93e59 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/StreamingControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/StreamingControllerV2.java
@@ -47,7 +47,6 @@ import org.springframework.security.access.AccessDeniedException;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
@@ -78,26 +77,35 @@ public class StreamingControllerV2 extends BasicController {
     @Qualifier("tableService")
     private TableService tableService;
 
-    @RequestMapping(value = "/getConfig", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/getConfig", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getStreamingsV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "table", required = false) String table, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getStreamingsV2(@RequestParam(value = "table", required = false) String table,
+            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
+            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize)
+            throws IOException {
 
         int offset = pageOffset * pageSize;
         int limit = pageSize;
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, streamingService.getStreamingConfigs(table, limit, offset), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
+                streamingService.getStreamingConfigs(table, limit, offset), "");
     }
 
-    @RequestMapping(value = "/getKfkConfig", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/getKfkConfig", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getKafkaConfigsV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "kafkaConfigName", required = false) String kafkaConfigName, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getKafkaConfigsV2(
+            @RequestParam(value = "kafkaConfigName", required = false) String kafkaConfigName,
+            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
+            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize)
+            throws IOException {
 
         int offset = pageOffset * pageSize;
         int limit = pageSize;
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, kafkaConfigService.getKafkaConfigs(kafkaConfigName, limit, offset), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
+                kafkaConfigService.getKafkaConfigs(kafkaConfigName, limit, offset), "");
     }
 
     /**
@@ -108,8 +116,7 @@ public class StreamingControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void saveStreamingConfigV2(@RequestHeader("Accept-Language") String lang, @RequestBody StreamingRequest streamingRequest) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void saveStreamingConfigV2(@RequestBody StreamingRequest streamingRequest) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         String project = streamingRequest.getProject();
@@ -161,7 +168,8 @@ public class StreamingControllerV2 extends BasicController {
             if (saveKafkaSuccess == false || saveStreamingSuccess == false) {
 
                 if (saveStreamingSuccess == true) {
-                    StreamingConfig sConfig = streamingService.getStreamingManager().getStreamingConfig(streamingConfig.getName());
+                    StreamingConfig sConfig = streamingService.getStreamingManager()
+                            .getStreamingConfig(streamingConfig.getName());
                     try {
                         streamingService.dropStreamingConfig(sConfig);
                     } catch (IOException e) {
@@ -183,8 +191,7 @@ public class StreamingControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void updateStreamingConfigV2(@RequestHeader("Accept-Language") String lang, @RequestBody StreamingRequest streamingRequest) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void updateStreamingConfigV2(@RequestBody StreamingRequest streamingRequest) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         StreamingConfig streamingConfig = deserializeSchemalDescV2(streamingRequest);
@@ -206,10 +213,10 @@ public class StreamingControllerV2 extends BasicController {
         }
     }
 
-    @RequestMapping(value = "/{configName}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{configName}", method = { RequestMethod.DELETE }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void deleteConfigV2(@RequestHeader("Accept-Language") String lang, @PathVariable String configName) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void deleteConfigV2(@PathVariable String configName) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         StreamingConfig config = streamingService.getStreamingManager().getStreamingConfig(configName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/TableControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/TableControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/TableControllerV2.java
index 4e2506c..c73bca3 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/TableControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/TableControllerV2.java
@@ -37,7 +37,6 @@ import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
@@ -65,10 +64,11 @@ public class TableControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getTableDescV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "ext", required = false) boolean withExt, @RequestParam(value = "project", required = true) String project) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getTableDescV2(@RequestParam(value = "ext", required = false) boolean withExt,
+            @RequestParam(value = "project", required = true) String project) throws IOException {
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, tableService.getTableDescByProject(project, withExt), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, tableService.getTableDescByProject(project, withExt),
+                "");
     }
 
     /**
@@ -78,10 +78,10 @@ public class TableControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{tableName:.+}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{tableName:.+}", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getTableDescV2(@RequestHeader("Accept-Language") String lang, @PathVariable String tableName) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getTableDescV2(@PathVariable String tableName) {
         Message msg = MsgPicker.getMsg();
 
         TableDesc table = tableService.getTableDescByName(tableName, false);
@@ -90,20 +90,23 @@ public class TableControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, table, "");
     }
 
-    @RequestMapping(value = "/load", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/load", method = { RequestMethod.POST }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse loadHiveTablesV2(@RequestHeader("Accept-Language") String lang, @RequestBody HiveTableRequestV2 requestV2) throws Exception {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse loadHiveTablesV2(@RequestBody HiveTableRequestV2 requestV2) throws Exception {
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, tableService.loadHiveTables(requestV2.getTables(), requestV2.getProject(), requestV2.isNeedProfile()), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
+                tableService.loadHiveTables(requestV2.getTables(), requestV2.getProject(), requestV2.isNeedProfile()),
+                "");
     }
 
-    @RequestMapping(value = "/load", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/load", method = { RequestMethod.DELETE }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse unLoadHiveTablesV2(@RequestHeader("Accept-Language") String lang, @RequestBody HiveTableRequestV2 requestV2) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse unLoadHiveTablesV2(@RequestBody HiveTableRequestV2 requestV2) throws IOException {
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, tableService.unloadHiveTables(requestV2.getTables(), requestV2.getProject()), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
+                tableService.unloadHiveTables(requestV2.getTables(), requestV2.getProject()), "");
     }
 
     /**
@@ -113,10 +116,10 @@ public class TableControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/cardinality", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/cardinality", method = { RequestMethod.POST }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void generateCardinalityV2(@RequestHeader("Accept-Language") String lang, @RequestBody HiveTableRequestV2 requestV2) throws Exception {
-        MsgPicker.setMsg(lang);
+    public void generateCardinalityV2(@RequestBody HiveTableRequestV2 requestV2) throws Exception {
 
         String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
         String[] tables = requestV2.getTables();
@@ -133,10 +136,10 @@ public class TableControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/hive", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/hive", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    private EnvelopeResponse showHiveDatabasesV2(@RequestHeader("Accept-Language") String lang) throws Exception {
-        MsgPicker.setMsg(lang);
+    private EnvelopeResponse showHiveDatabasesV2() throws Exception {
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, tableService.getHiveDbNames(), "");
     }
@@ -148,10 +151,10 @@ public class TableControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    private EnvelopeResponse showHiveTablesV2(@RequestHeader("Accept-Language") String lang, @PathVariable String database) throws Exception {
-        MsgPicker.setMsg(lang);
+    private EnvelopeResponse showHiveTablesV2(@PathVariable String database) throws Exception {
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, tableService.getHiveTableNames(database), "");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/UserControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/UserControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/UserControllerV2.java
index ebf8b36..f75f351 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/UserControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/UserControllerV2.java
@@ -35,7 +35,6 @@ import org.springframework.security.core.Authentication;
 import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.security.core.userdetails.UserDetails;
 import org.springframework.stereotype.Controller;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.ResponseBody;
@@ -57,18 +56,19 @@ public class UserControllerV2 extends BasicController {
     @Qualifier("userService")
     UserService userService;
 
-    @RequestMapping(value = "/authentication", method = RequestMethod.POST, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/authentication", method = RequestMethod.POST, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse authenticateV2(@RequestHeader("Accept-Language") String lang) {
-        EnvelopeResponse response = authenticatedUserV2(lang);
+    public EnvelopeResponse authenticateV2() {
+        EnvelopeResponse response = authenticatedUserV2();
         logger.debug("User login: {}", response.data);
         return response;
     }
 
-    @RequestMapping(value = "/authentication", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/authentication", method = RequestMethod.GET, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse authenticatedUserV2(@RequestHeader("Accept-Language") String lang) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse authenticatedUserV2() {
         Message msg = MsgPicker.getMsg();
 
         Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
@@ -92,10 +92,10 @@ public class UserControllerV2 extends BasicController {
         throw new BadRequestException(msg.getAUTH_INFO_NOT_FOUND());
     }
 
-    @RequestMapping(value = "/authentication/authorities", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/authentication/authorities", method = RequestMethod.GET, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getAuthoritiesV2(@RequestHeader("Accept-Language") String lang) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getAuthoritiesV2() throws IOException {
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, userService.listUserAuthorities(), "");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/msg/CnMessage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/msg/CnMessage.java b/server-base/src/main/java/org/apache/kylin/rest/msg/CnMessage.java
index 53cbaba..1c6fb69 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/msg/CnMessage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/msg/CnMessage.java
@@ -21,7 +21,7 @@ package org.apache.kylin.rest.msg;
 /**
  * Created by luwei on 17-4-12.
  */
-public class CnMessage extends Message{
+public class CnMessage extends Message {
 
     private static CnMessage instance = null;
 
@@ -35,4 +35,380 @@ public class CnMessage extends Message{
         }
         return instance;
     }
+
+    // Cube
+    public String getCUBE_NOT_FOUND() {
+        return "找不到 Cube '%s'";
+    }
+
+    public String getSEG_NOT_FOUND() {
+        return "找不到 Segment '%s'";
+    }
+
+    public String getKAFKA_DEP_NOT_FOUND() {
+        return "找不到 Kafka 依赖";
+    }
+
+    public String getBUILD_DRAFT_CUBE() {
+        return "Cube 草稿不能被构建";
+    }
+
+    public String getBUILD_BROKEN_CUBE() {
+        return "损坏的 cube '%s' 不能被构建";
+    }
+
+    public String getINCONSISTENT_CUBE_DESC_SIGNATURE() {
+        return "Inconsistent cube desc signature for '%s', if it's right after an upgrade, please try 'Edit CubeDesc' to delete the 'signature' field. Or use 'bin/metastore.sh refresh-cube-signature' to batch refresh all cubes' signatures, then reload metadata to take effect.";
+    }
+
+    public String getDELETE_NOT_FIRST_LAST_SEG() {
+        return "非首尾 segment '%s' 不能被删除";
+    }
+
+    public String getDELETE_NOT_READY_SEG() {
+        return "非 READY 状态 segment '%s' 不能被删除, 请先抛弃它正在运行的任务";
+    }
+
+    public String getINVALID_BUILD_TYPE() {
+        return "非法构建类型: '%s'";
+    }
+
+    public String getNO_ACL_ENTRY() {
+        return "找不到对象 '%s' 的授权记录";
+    }
+
+    public String getACL_INFO_NOT_FOUND() {
+        return "找不到对象 '%s' 的授权信息";
+    }
+
+    public String getACL_DOMAIN_NOT_FOUND() {
+        return "找不到授权对象";
+    }
+
+    public String getPARENT_ACL_NOT_FOUND() {
+        return "找不到上级授权";
+    }
+
+    public String getDISABLE_NOT_READY_CUBE() {
+        return "仅 ready 状态的 cube 可以被禁用, '%s' 的状态是 %s";
+    }
+
+    public String getPURGE_NOT_DISABLED_CUBE() {
+        return "仅 disabled 状态的 cube 可以被清空, '%s' 的状态是 %s";
+    }
+
+    public String getCLONE_BROKEN_CUBE() {
+        return "损坏的 cube '%s' 不能被克隆";
+    }
+
+    public String getINVALID_CUBE_NAME() {
+        return "非法 cube 名称 '%s', 仅支持字母, 数字和下划线";
+    }
+
+    public String getCUBE_ALREADY_EXIST() {
+        return "Cube 名称 '%s' 已存在";
+    }
+
+    public String getCUBE_DESC_ALREADY_EXIST() {
+        return "Cube '%s' 已存在";
+    }
+
+    public String getBROKEN_CUBE_DESC() {
+        return "损坏的 Cube 描述 '%s'";
+    }
+
+    public String getENABLE_NOT_DISABLED_CUBE() {
+        return "仅 disabled 状态的 cube 可以被启用, '%s' 的状态是 %s";
+    }
+
+    public String getNO_READY_SEGMENT() {
+        return "Cube '%s' 不包含任何 READY 状态的 segment";
+    }
+
+    public String getENABLE_WITH_RUNNING_JOB() {
+        return "Cube 存在正在运行的任务, 不能被启用";
+    }
+
+    public String getDISCARD_JOB_FIRST() {
+        return "Cube '%s' 存在正在运行或失败的任务, 请抛弃它们后重试";
+    }
+
+    public String getIDENTITY_EXIST_CHILDREN() {
+        return "'%s' 存在下级授权";
+    }
+
+    public String getINVALID_CUBE_DEFINITION() {
+        return "非法 cube 定义";
+    }
+
+    public String getEMPTY_CUBE_NAME() {
+        return "Cube 名称不可为空";
+    }
+
+    public String getUSE_DRAFT_MODEL() {
+        return "不能使用模型草稿 '%s'";
+    }
+
+    public String getINCONSISTENT_CUBE_DESC() {
+        return "Cube 描述 '%s' 与现有不一致, 请清理 cube 或避免更新 cube 描述的关键字段";
+    }
+
+    public String getUPDATE_CUBE_NO_RIGHT() {
+        return "无权限更新此 cube";
+    }
+
+    public String getNOT_STREAMING_CUBE() {
+        return "Cube '%s' 不是实时 cube";
+    }
+
+    public String getCUBE_RENAME() {
+        return "Cube 不能被重命名";
+    }
+
+    // Model
+    public String getINVALID_MODEL_DEFINITION() {
+        return "非法模型定义";
+    }
+
+    public String getEMPTY_MODEL_NAME() {
+        return "模型名称不可为空";
+    }
+
+    public String getINVALID_MODEL_NAME() {
+        return "非法模型名称 '%s', 仅支持字母, 数字和下划线";
+    }
+
+    public String getDUPLICATE_MODEL_NAME() {
+        return "模型名称 '%s' 已存在, 不能被创建";
+    }
+
+    public String getDROP_REFERENCED_MODEL() {
+        return "模型被 Cube '%s' 引用, 不能被删除";
+    }
+
+    public String getUPDATE_MODEL_KEY_FIELD() {
+        return "已使用的维度和度量与现有的连接树不能被修改";
+    }
+
+    public String getBROKEN_MODEL_DESC() {
+        return "损坏的模型描述 '%s'";
+    }
+
+    public String getMODEL_NOT_FOUND() {
+        return "找不到模型 '%s'";
+    }
+
+    public String getEMPTY_PROJECT_NAME() {
+        return "项目名称不可为空";
+    }
+
+    public String getEMPTY_NEW_MODEL_NAME() {
+        return "新模型名称不可为空";
+    }
+
+    public String getUPDATE_MODEL_NO_RIGHT() {
+        return "无权限更新此模型";
+    }
+
+    public String getMODEL_RENAME() {
+        return "模型不能被重命名";
+    }
+
+    // Job
+    public String getILLEGAL_TIME_FILTER() {
+        return "非法时间条件: %s";
+    }
+
+    public String getILLEGAL_EXECUTABLE_STATE() {
+        return "非法状态: %s";
+    }
+
+    public String getILLEGAL_JOB_TYPE() {
+        return "非法任务类型, id: %s.";
+    }
+
+    // Acl
+    public String getUSER_NOT_EXIST() {
+        return "用户 '%s' 不存在, 请确认用户是否曾经登陆";
+    }
+
+    // Project
+    public String getINVALID_PROJECT_NAME() {
+        return "非法项目名词 '%s', 仅支持字母, 数字和下划线";
+    }
+
+    public String getPROJECT_ALREADY_EXIST() {
+        return "项目 '%s' 已存在";
+    }
+
+    public String getPROJECT_NOT_FOUND() {
+        return "找不到项目 '%s'";
+    }
+
+    // Table
+    public String getHIVE_TABLE_NOT_FOUND() {
+        return "找不到 Hive 表 '%s'";
+    }
+
+    public String getTABLE_DESC_NOT_FOUND() {
+        return "找不到表 '%s'";
+    }
+
+    public String getTABLE_IN_USE_BY_MODEL() {
+        return "表已被模型 '%s' 使用";
+    }
+
+    // Cube Desc
+    public String getCUBE_DESC_NOT_FOUND() {
+        return "找不到 cube '%s'";
+    }
+
+    // Streaming
+    public String getINVALID_TABLE_DESC_DEFINITION() {
+        return "非法表定义";
+    }
+
+    public String getINVALID_STREAMING_CONFIG_DEFINITION() {
+        return "非法 StreamingConfig 定义";
+    }
+
+    public String getINVALID_KAFKA_CONFIG_DEFINITION() {
+        return "非法 KafkaConfig 定义";
+    }
+
+    public String getADD_STREAMING_TABLE_FAIL() {
+        return "添加流式表失败";
+    }
+
+    public String getEMPTY_STREAMING_CONFIG_NAME() {
+        return "StreamingConfig 名称不可为空";
+    }
+
+    public String getSTREAMING_CONFIG_ALREADY_EXIST() {
+        return "StreamingConfig '%s' 已存在";
+    }
+
+    public String getSAVE_STREAMING_CONFIG_FAIL() {
+        return "保存 StreamingConfig 失败";
+    }
+
+    public String getKAFKA_CONFIG_ALREADY_EXIST() {
+        return "KafkaConfig '%s' 已存在";
+    }
+
+    public String getCREATE_KAFKA_CONFIG_FAIL() {
+        return "StreamingConfig 已创建, 但 KafkaConfig 创建失败";
+    }
+
+    public String getSAVE_KAFKA_CONFIG_FAIL() {
+        return "KafkaConfig 保存失败";
+    }
+
+    public String getROLLBACK_STREAMING_CONFIG_FAIL() {
+        return "操作失败, 并且回滚已创建的 StreamingConfig 失败";
+    }
+
+    public String getROLLBACK_KAFKA_CONFIG_FAIL() {
+        return "操作失败, 并且回滚已创建的 KafkaConfig 失败";
+    }
+
+    public String getUPDATE_STREAMING_CONFIG_NO_RIGHT() {
+        return "无权限更新此 StreamingConfig";
+    }
+
+    public String getUPDATE_KAFKA_CONFIG_NO_RIGHT() {
+        return "无权限更新此 KafkaConfig";
+    }
+
+    public String getSTREAMING_CONFIG_NOT_FOUND() {
+        return "找不到 StreamingConfig '%s'";
+    }
+
+    // Query
+    public String getQUERY_NOT_ALLOWED() {
+        return "'%s' 模式不支持查询";
+    }
+
+    public String getNOT_SUPPORTED_SQL() {
+        return "不支持的 SQL";
+    }
+
+    public String getTABLE_META_INCONSISTENT() {
+        return "表元数据与JDBC 元数据不一致";
+    }
+
+    public String getCOLUMN_META_INCONSISTENT() {
+        return "列元数据与JDBC 元数据不一致";
+    }
+
+    // Access
+    public String getACL_PERMISSION_REQUIRED() {
+        return "需要授权";
+    }
+
+    public String getSID_REQUIRED() {
+        return "找不到 Sid";
+    }
+
+    public String getREVOKE_ADMIN_PERMISSION() {
+        return "不能取消创建者的管理员权限";
+    }
+
+    public String getACE_ID_REQUIRED() {
+        return "找不到 Ace id";
+    }
+
+    // Admin
+    public String getGET_ENV_CONFIG_FAIL() {
+        return "无法获取 Kylin env Config";
+    }
+
+    // User
+    public String getAUTH_INFO_NOT_FOUND() {
+        return "找不到权限信息";
+    }
+
+    public String getUSER_NOT_FOUND() {
+        return "找不到用户 '%s'";
+    }
+
+    // Diagnosis
+    public String getDIAG_NOT_FOUND() {
+        return "在 %s 找不到 diag.sh";
+    }
+
+    public String getGENERATE_DIAG_PACKAGE_FAIL() {
+        return "无法生成诊断包";
+    }
+
+    public String getDIAG_PACKAGE_NOT_AVAILABLE() {
+        return "诊断包不可用, 路径: %s";
+    }
+
+    public String getDIAG_PACKAGE_NOT_FOUND() {
+        return "找不到诊断包, 路径: %s";
+    }
+
+    // Encoding
+    public String getVALID_ENCODING_NOT_AVAILABLE() {
+        return "无法为数据类型: %s 提供合法的编码";
+    }
+
+    // ExternalFilter
+    public String getFILTER_ALREADY_EXIST() {
+        return "Filter '%s' 已存在";
+    }
+
+    public String getFILTER_NOT_FOUND() {
+        return "找不到 filter '%s'";
+    }
+
+    // Basic
+    public String getHBASE_FAIL() {
+        return "HBase 遇到错误: '%s'";
+    }
+
+    public String getHBASE_FAIL_WITHOUT_DETAIL() {
+        return "HBase 遇到错误";
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/msg/Message.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/msg/Message.java b/server-base/src/main/java/org/apache/kylin/rest/msg/Message.java
index 3317373..f4bcda7 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/msg/Message.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/msg/Message.java
@@ -37,549 +37,378 @@ public class Message {
     }
 
     // Cube
-    private final String CUBE_NOT_FOUND = "Cannot find cube '%s'.";
-    private final String SEG_NOT_FOUND = "Cannot find segment '%s'.";
-    private final String KAFKA_DEP_NOT_FOUND = "Could not find Kafka dependency.";
-    private final String BUILD_DRAFT_CUBE = "Could not build draft cube.";
-    private final String BUILD_BROKEN_CUBE = "Broken cube '%s' can't be built.";
-    private final String INCONSISTENT_CUBE_DESC_SIGNATURE = "Inconsistent cube desc signature for '%s', if it's right after an upgrade, please try 'Edit CubeDesc' to delete the 'signature' field. Or use 'bin/metastore.sh refresh-cube-signature' to batch refresh all cubes' signatures, then reload metadata to take effect.";
-    private final String DELETE_NOT_FIRST_LAST_SEG = "Cannot delete segment '%s' as it is neither the first nor the last segment.";
-    private final String DELETE_NOT_READY_SEG = "Cannot delete segment '%s' as its status is not READY. Discard the on-going job for it.";
-    private final String INVALID_BUILD_TYPE = "Invalid build type: '%s'.";
-    private final String NO_ACL_ENTRY = "There should have been an Acl entry for ObjectIdentity '%s'.";
-    private final String ACL_INFO_NOT_FOUND = "Unable to find ACL information for object identity '%s'.";
-    private final String ACL_DOMAIN_NOT_FOUND = "Acl domain object required.";
-    private final String PARENT_ACL_NOT_FOUND = "Parent acl required.";
-    private final String DISABLE_NOT_READY_CUBE = "Only ready cube can be disabled, status of '%s' is %s.";
-    private final String PURGE_NOT_DISABLED_CUBE = "Only disabled cube can be purged, status of '%s' is %s.";
-    private final String CLONE_BROKEN_CUBE = "Broken cube '%s' can't be cloned.";
-    private final String INVALID_CUBE_NAME = "Invalid Cube name '%s', only letters, numbers and underline supported.";
-    private final String CUBE_ALREADY_EXIST = "The cube named '%s' already exists.";
-    private final String CUBE_DESC_ALREADY_EXIST = "The cube desc named '%s' already exists.";
-    private final String BROKEN_CUBE_DESC = "Broken cube desc named '%s'.";
-    private final String ENABLE_NOT_DISABLED_CUBE = "Only disabled cube can be enabled, status of '%s' is %s.";
-    private final String NO_READY_SEGMENT = "Cube '%s' doesn't contain any READY segment.";
-    private final String ENABLE_WITH_RUNNING_JOB = "Enable is not allowed with a running job.";
-    private final String DISCARD_JOB_FIRST = "The cube '%s' has running or failed job, please discard it and try again.";
-    private final String IDENTITY_EXIST_CHILDREN = "Children exists for '%s'.";
-    private final String INVALID_CUBE_DEFINITION = "The cube definition is invalid.";
-    private final String EMPTY_CUBE_NAME = "Cube name should not be empty.";
-    private final String USE_DRAFT_MODEL = "Cannot use draft model '%s'.";
-    private final String UNEXPECTED_CUBE_DESC_STATUS = "CubeDesc status should not be %s.";
-    private final String EXPECTED_CUBE_DESC_STATUS = "CubeDesc status should be %s.";
-    private final String CUBE_DESC_RENAME = "Cube Desc renaming is not allowed: desc.getName(): '%s', cubeRequest.getCubeName(): '%s'.";
-    private final String INCONSISTENT_CUBE_DESC = "CubeDesc '%s' is inconsistent with existing. Try purge that cube first or avoid updating key cube desc fields.";
-    private final String UPDATE_CUBE_NO_RIGHT = "You don't have right to update this cube.";
-    private final String NOT_STREAMING_CUBE = "Cube '%s' is not a Streaming Cube.";
-    private final String NO_DRAFT_CUBE_TO_UPDATE = "Cube '%s' has no draft to update.";
-    private final String NON_DRAFT_CUBE_ALREADY_EXIST = "A non-draft cube with name '%s' already exists.";
-    private final String CUBE_RENAME = "Cube renaming is not allowed.";
-    private final String ORIGIN_CUBE_NOT_FOUND = "Origin cube not found.";
-
-    // Model
-    private final String INVALID_MODEL_DEFINITION = "The data model definition is invalid.";
-    private final String EMPTY_MODEL_NAME = "Model name should not be empty.";
-    private final String INVALID_MODEL_NAME = "Invalid Model name '%s', only letters, numbers and underline supported.";
-    private final String UNEXPECTED_MODEL_STATUS = "Model status should not be %s.";
-    private final String EXPECTED_MODEL_STATUS = "Model status should be %s.";
-    private final String DUPLICATE_MODEL_NAME = "Model name '%s' is duplicated, could not be created.";
-    private final String DROP_REFERENCED_MODEL = "Model is referenced by Cube '%s' , could not dropped";
-    private final String UPDATE_MODEL_KEY_FIELD = "Dimensions and measures in use and existing join tree cannot be modified.";
-    private final String BROKEN_MODEL_DESC = "Broken model desc named '%s'.";
-    private final String MODEL_NOT_FOUND = "Data Model with name '%s' not found.";
-    private final String EMPTY_PROJECT_NAME = "Project name should not be empty.";
-    private final String EMPTY_NEW_MODEL_NAME = "New model name should not be empty";
-    private final String UPDATE_MODEL_NO_RIGHT = "You don't have right to update this model.";
-    private final String NO_DRAFT_MODEL_TO_UPDATE = "Model '%s' has no draft to update.";
-    private final String NON_DRAFT_MODEL_ALREADY_EXIST = "A non-draft model with name '%s' already exists.";
-    private final String MODEL_RENAME = "Model renaming is not allowed.";
-    private final String ORIGIN_MODEL_NOT_FOUND = "Origin model not found.";
-
-    // Job
-    private final String ILLEGAL_TIME_FILTER = "Illegal timeFilter for job history: %s.";
-    private final String ILLEGAL_EXECUTABLE_STATE = "Illegal status: %s.";
-    private final String INVALID_JOB_STATE = "Invalid state: %s.";
-    private final String ILLEGAL_JOB_TYPE = "Illegal job type, id: %s.";
-    private final String INVALID_JOB_STEP_STATE = "Invalid state: %s.";
-
-    // Acl
-    private final String USER_NOT_EXIST = "User '%s' does not exist. Please make sure the user has logged in before";
-
-    // Project
-    private final String INVALID_PROJECT_NAME = "Invalid Project name '%s', only letters, numbers and underline supported.";
-    private final String PROJECT_ALREADY_EXIST = "The project named '%s' already exists.";
-    private final String PROJECT_NOT_FOUND = "Cannot find project '%s'.";
-
-    // Table
-    private final String HIVE_TABLE_NOT_FOUND = "Cannot find Hive table '%s'. ";
-    private final String TABLE_DESC_NOT_FOUND = "Cannot find table descriptor '%s'.";
-    private final String TABLE_IN_USE_BY_MODEL = "Table is already in use by models '%s'.";
-
-    // Cube Desc
-    private final String CUBE_DESC_NOT_FOUND = "Cannot find cube desc '%s'.";
-
-    // Streaming
-    private final String INVALID_TABLE_DESC_DEFINITION = "The TableDesc definition is invalid.";
-    private final String INVALID_STREAMING_CONFIG_DEFINITION = "The StreamingConfig definition is invalid.";
-    private final String INVALID_KAFKA_CONFIG_DEFINITION = "The KafkaConfig definition is invalid.";
-    private final String ADD_STREAMING_TABLE_FAIL = "Failed to add streaming table.";
-    private final String EMPTY_STREAMING_CONFIG_NAME = "StreamingConfig name should not be empty.";
-    private final String STREAMING_CONFIG_ALREADY_EXIST = "The streamingConfig named '%s' already exists.";
-    private final String SAVE_STREAMING_CONFIG_FAIL = "Failed to save StreamingConfig.";
-    private final String KAFKA_CONFIG_ALREADY_EXIST = "The kafkaConfig named '%s' already exists.";
-    private final String CREATE_KAFKA_CONFIG_FAIL = "StreamingConfig is created, but failed to create KafkaConfig.";
-    private final String SAVE_KAFKA_CONFIG_FAIL = "Failed to save KafkaConfig.";
-    private final String ROLLBACK_STREAMING_CONFIG_FAIL = "Action failed and failed to rollback the created streaming config.";
-    private final String ROLLBACK_KAFKA_CONFIG_FAIL = "Action failed and failed to rollback the created kafka config.";
-    private final String UPDATE_STREAMING_CONFIG_NO_RIGHT = "You don't have right to update this StreamingConfig.";
-    private final String UPDATE_KAFKA_CONFIG_NO_RIGHT = "You don't have right to update this KafkaConfig.";
-    private final String STREAMING_CONFIG_NOT_FOUND = "StreamingConfig with name '%s' not found.";
-
-    // Query
-    private final String QUERY_NOT_ALLOWED = "Query is not allowed in '%s' mode.";
-    private final String NOT_SUPPORTED_SQL = "Not Supported SQL.";
-    private final String TABLE_META_INCONSISTENT = "Table metadata inconsistent with JDBC meta.";
-    private final String COLUMN_META_INCONSISTENT = "Column metadata inconsistent with JDBC meta.";
-
-    // Access
-    private final String ACL_PERMISSION_REQUIRED = "Acl permission required.";
-    private final String SID_REQUIRED = "Sid required.";
-    private final String REVOKE_ADMIN_PERMISSION = "Can't revoke admin permission of owner.";
-    private final String ACE_ID_REQUIRED = "Ace id required.";
-
-    // Admin
-    private final String GET_ENV_CONFIG_FAIL = "Failed to get Kylin env Config.";
-
-    // User
-    private final String AUTH_INFO_NOT_FOUND = "Can not find authentication information.";
-    private final String USER_NOT_FOUND = "User '%s' not found.";
-
-    // Diagnosis
-    private final String DIAG_NOT_FOUND = "diag.sh not found at %s.";
-    private final String GENERATE_DIAG_PACKAGE_FAIL = "Failed to generate diagnosis package.";
-    private final String DIAG_PACKAGE_NOT_AVAILABLE = "Diagnosis package is not available in directory: %s.";
-    private final String DIAG_PACKAGE_NOT_FOUND = "Diagnosis package not found in directory: %s.";
-
-    // Encoding
-    private final String VALID_ENCODING_NOT_AVAILABLE = "can't provide valid encodings for datatype: %s.";
-
-    // ExternalFilter
-    private final String FILTER_ALREADY_EXIST = "The filter named '%s' already exists.";
-    private final String FILTER_NOT_FOUND = "The filter named '%s' does not exist.";
-
-    // Basic
-    private final String HBASE_FAIL = "HBase failed: '%s'";
-    private final String HBASE_FAIL_WITHOUT_DETAIL = "HBase failed.";
-
     public String getCUBE_NOT_FOUND() {
-        return CUBE_NOT_FOUND;
+        return "Cannot find cube '%s'.";
     }
 
     public String getSEG_NOT_FOUND() {
-        return SEG_NOT_FOUND;
+        return "Cannot find segment '%s'.";
     }
 
     public String getKAFKA_DEP_NOT_FOUND() {
-        return KAFKA_DEP_NOT_FOUND;
+        return "Could not find Kafka dependency.";
     }
 
     public String getBUILD_DRAFT_CUBE() {
-        return BUILD_DRAFT_CUBE;
+        return "Could not build draft cube.";
     }
 
     public String getBUILD_BROKEN_CUBE() {
-        return BUILD_BROKEN_CUBE;
+        return "Broken cube '%s' can't be built.";
     }
 
     public String getINCONSISTENT_CUBE_DESC_SIGNATURE() {
-        return INCONSISTENT_CUBE_DESC_SIGNATURE;
+        return "Inconsistent cube desc signature for '%s', if it's right after an upgrade, please try 'Edit CubeDesc' to delete the 'signature' field. Or use 'bin/metastore.sh refresh-cube-signature' to batch refresh all cubes' signatures, then reload metadata to take effect.";
     }
 
     public String getDELETE_NOT_FIRST_LAST_SEG() {
-        return DELETE_NOT_FIRST_LAST_SEG;
+        return "Cannot delete segment '%s' as it is neither the first nor the last segment.";
     }
 
     public String getDELETE_NOT_READY_SEG() {
-        return DELETE_NOT_READY_SEG;
+        return "Cannot delete segment '%s' as its status is not READY. Discard the on-going job for it.";
     }
 
     public String getINVALID_BUILD_TYPE() {
-        return INVALID_BUILD_TYPE;
+        return "Invalid build type: '%s'.";
     }
 
     public String getNO_ACL_ENTRY() {
-        return NO_ACL_ENTRY;
+        return "There should have been an Acl entry for ObjectIdentity '%s'.";
     }
 
     public String getACL_INFO_NOT_FOUND() {
-        return ACL_INFO_NOT_FOUND;
+        return "Unable to find ACL information for object identity '%s'.";
     }
 
     public String getACL_DOMAIN_NOT_FOUND() {
-        return ACL_DOMAIN_NOT_FOUND;
+        return "Acl domain object required.";
     }
 
     public String getPARENT_ACL_NOT_FOUND() {
-        return PARENT_ACL_NOT_FOUND;
+        return "Parent acl required.";
     }
 
     public String getDISABLE_NOT_READY_CUBE() {
-        return DISABLE_NOT_READY_CUBE;
+        return "Only ready cube can be disabled, status of '%s' is %s.";
     }
 
     public String getPURGE_NOT_DISABLED_CUBE() {
-        return PURGE_NOT_DISABLED_CUBE;
+        return "Only disabled cube can be purged, status of '%s' is %s.";
     }
 
     public String getCLONE_BROKEN_CUBE() {
-        return CLONE_BROKEN_CUBE;
+        return "Broken cube '%s' can't be cloned.";
     }
 
     public String getINVALID_CUBE_NAME() {
-        return INVALID_CUBE_NAME;
+        return "Invalid Cube name '%s', only letters, numbers and underline supported.";
     }
 
     public String getCUBE_ALREADY_EXIST() {
-        return CUBE_ALREADY_EXIST;
+        return "The cube named '%s' already exists.";
     }
 
     public String getCUBE_DESC_ALREADY_EXIST() {
-        return CUBE_DESC_ALREADY_EXIST;
+        return "The cube desc named '%s' already exists.";
     }
 
     public String getBROKEN_CUBE_DESC() {
-        return BROKEN_CUBE_DESC;
+        return "Broken cube desc named '%s'.";
     }
 
     public String getENABLE_NOT_DISABLED_CUBE() {
-        return ENABLE_NOT_DISABLED_CUBE;
+        return "Only disabled cube can be enabled, status of '%s' is %s.";
     }
 
     public String getNO_READY_SEGMENT() {
-        return NO_READY_SEGMENT;
+        return "Cube '%s' doesn't contain any READY segment.";
     }
 
     public String getENABLE_WITH_RUNNING_JOB() {
-        return ENABLE_WITH_RUNNING_JOB;
+        return "Enable is not allowed with a running job.";
     }
 
     public String getDISCARD_JOB_FIRST() {
-        return DISCARD_JOB_FIRST;
+        return "The cube '%s' has running or failed job, please discard it and try again.";
     }
 
     public String getIDENTITY_EXIST_CHILDREN() {
-        return IDENTITY_EXIST_CHILDREN;
+        return "Children exists for '%s'.";
     }
 
     public String getINVALID_CUBE_DEFINITION() {
-        return INVALID_CUBE_DEFINITION;
+        return "The cube definition is invalid.";
     }
 
     public String getEMPTY_CUBE_NAME() {
-        return EMPTY_CUBE_NAME;
+        return "Cube name should not be empty.";
     }
 
     public String getUSE_DRAFT_MODEL() {
-        return USE_DRAFT_MODEL;
-    }
-
-    public String getUNEXPECTED_CUBE_DESC_STATUS() {
-        return UNEXPECTED_CUBE_DESC_STATUS;
-    }
-
-    public String getEXPECTED_CUBE_DESC_STATUS() {
-        return EXPECTED_CUBE_DESC_STATUS;
-    }
-
-    public String getCUBE_DESC_RENAME() {
-        return CUBE_DESC_RENAME;
+        return "Cannot use draft model '%s'.";
     }
 
     public String getINCONSISTENT_CUBE_DESC() {
-        return INCONSISTENT_CUBE_DESC;
+        return "CubeDesc '%s' is inconsistent with existing. Try purge that cube first or avoid updating key cube desc fields.";
     }
 
     public String getUPDATE_CUBE_NO_RIGHT() {
-        return UPDATE_CUBE_NO_RIGHT;
+        return "You don't have right to update this cube.";
     }
 
     public String getNOT_STREAMING_CUBE() {
-        return NOT_STREAMING_CUBE;
-    }
-
-    public String getNO_DRAFT_CUBE_TO_UPDATE() {
-        return NO_DRAFT_CUBE_TO_UPDATE;
-    }
-
-    public String getNON_DRAFT_CUBE_ALREADY_EXIST() {
-        return NON_DRAFT_CUBE_ALREADY_EXIST;
+        return "Cube '%s' is not a Streaming Cube.";
     }
 
     public String getCUBE_RENAME() {
-        return CUBE_RENAME;
-    }
-
-    public String getORIGIN_CUBE_NOT_FOUND() {
-        return ORIGIN_CUBE_NOT_FOUND;
+        return "Cube renaming is not allowed.";
     }
 
+    // Model
     public String getINVALID_MODEL_DEFINITION() {
-        return INVALID_MODEL_DEFINITION;
+        return "The data model definition is invalid.";
     }
 
     public String getEMPTY_MODEL_NAME() {
-        return EMPTY_MODEL_NAME;
+        return "Model name should not be empty.";
     }
 
     public String getINVALID_MODEL_NAME() {
-        return INVALID_MODEL_NAME;
-    }
-
-    public String getUNEXPECTED_MODEL_STATUS() {
-        return UNEXPECTED_MODEL_STATUS;
-    }
-
-    public String getEXPECTED_MODEL_STATUS() {
-        return EXPECTED_MODEL_STATUS;
+        return "Invalid Model name '%s', only letters, numbers and underline supported.";
     }
 
     public String getDUPLICATE_MODEL_NAME() {
-        return DUPLICATE_MODEL_NAME;
+        return "Model name '%s' is duplicated, could not be created.";
     }
 
     public String getDROP_REFERENCED_MODEL() {
-        return DROP_REFERENCED_MODEL;
+        return "Model is referenced by Cube '%s' , could not dropped";
     }
 
     public String getUPDATE_MODEL_KEY_FIELD() {
-        return UPDATE_MODEL_KEY_FIELD;
+        return "Dimensions and measures in use and existing join tree cannot be modified.";
     }
 
     public String getBROKEN_MODEL_DESC() {
-        return BROKEN_MODEL_DESC;
+        return "Broken model desc named '%s'.";
     }
 
     public String getMODEL_NOT_FOUND() {
-        return MODEL_NOT_FOUND;
+        return "Data Model with name '%s' not found.";
     }
 
     public String getEMPTY_PROJECT_NAME() {
-        return EMPTY_PROJECT_NAME;
+        return "Project name should not be empty.";
     }
 
     public String getEMPTY_NEW_MODEL_NAME() {
-        return EMPTY_NEW_MODEL_NAME;
+        return "New model name should not be empty.";
     }
 
     public String getUPDATE_MODEL_NO_RIGHT() {
-        return UPDATE_MODEL_NO_RIGHT;
-    }
-
-    public String getNO_DRAFT_MODEL_TO_UPDATE() {
-        return NO_DRAFT_MODEL_TO_UPDATE;
-    }
-
-    public String getNON_DRAFT_MODEL_ALREADY_EXIST() {
-        return NON_DRAFT_MODEL_ALREADY_EXIST;
+        return "You don't have right to update this model.";
     }
 
     public String getMODEL_RENAME() {
-        return MODEL_RENAME;
-    }
-
-    public String getORIGIN_MODEL_NOT_FOUND() {
-        return ORIGIN_MODEL_NOT_FOUND;
+        return "Model renaming is not allowed.";
     }
 
+    // Job
     public String getILLEGAL_TIME_FILTER() {
-        return ILLEGAL_TIME_FILTER;
+        return "Illegal timeFilter: %s.";
     }
 
     public String getILLEGAL_EXECUTABLE_STATE() {
-        return ILLEGAL_EXECUTABLE_STATE;
-    }
-
-    public String getINVALID_JOB_STATE() {
-        return INVALID_JOB_STATE;
+        return "Illegal status: %s.";
     }
 
     public String getILLEGAL_JOB_TYPE() {
-        return ILLEGAL_JOB_TYPE;
-    }
-
-    public String getINVALID_JOB_STEP_STATE() {
-        return INVALID_JOB_STEP_STATE;
+        return "Illegal job type, id: %s.";
     }
 
+    // Acl
     public String getUSER_NOT_EXIST() {
-        return USER_NOT_EXIST;
+        return "User '%s' does not exist. Please make sure the user has logged in before";
     }
 
+    // Project
     public String getINVALID_PROJECT_NAME() {
-        return INVALID_PROJECT_NAME;
+        return "Invalid Project name '%s', only letters, numbers and underline supported.";
     }
 
     public String getPROJECT_ALREADY_EXIST() {
-        return PROJECT_ALREADY_EXIST;
+        return "The project named '%s' already exists.";
     }
 
     public String getPROJECT_NOT_FOUND() {
-        return PROJECT_NOT_FOUND;
+        return "Cannot find project '%s'.";
     }
 
+    // Table
     public String getHIVE_TABLE_NOT_FOUND() {
-        return HIVE_TABLE_NOT_FOUND;
+        return "Cannot find Hive table '%s'.";
     }
 
     public String getTABLE_DESC_NOT_FOUND() {
-        return TABLE_DESC_NOT_FOUND;
+        return "Cannot find table descriptor '%s'.";
     }
 
     public String getTABLE_IN_USE_BY_MODEL() {
-        return TABLE_IN_USE_BY_MODEL;
+        return "Table is already in use by models '%s'.";
     }
 
+    // Cube Desc
     public String getCUBE_DESC_NOT_FOUND() {
-        return CUBE_DESC_NOT_FOUND;
+        return "Cannot find cube desc '%s'.";
     }
 
+    // Streaming
     public String getINVALID_TABLE_DESC_DEFINITION() {
-        return INVALID_TABLE_DESC_DEFINITION;
+        return "The TableDesc definition is invalid.";
     }
 
     public String getINVALID_STREAMING_CONFIG_DEFINITION() {
-        return INVALID_STREAMING_CONFIG_DEFINITION;
+        return "The StreamingConfig definition is invalid.";
     }
 
     public String getINVALID_KAFKA_CONFIG_DEFINITION() {
-        return INVALID_KAFKA_CONFIG_DEFINITION;
+        return "The KafkaConfig definition is invalid.";
     }
 
     public String getADD_STREAMING_TABLE_FAIL() {
-        return ADD_STREAMING_TABLE_FAIL;
+        return "Failed to add streaming table.";
     }
 
     public String getEMPTY_STREAMING_CONFIG_NAME() {
-        return EMPTY_STREAMING_CONFIG_NAME;
+        return "StreamingConfig name should not be empty.";
     }
 
     public String getSTREAMING_CONFIG_ALREADY_EXIST() {
-        return STREAMING_CONFIG_ALREADY_EXIST;
+        return "The streamingConfig named '%s' already exists.";
     }
 
     public String getSAVE_STREAMING_CONFIG_FAIL() {
-        return SAVE_STREAMING_CONFIG_FAIL;
+        return "Failed to save StreamingConfig.";
     }
 
     public String getKAFKA_CONFIG_ALREADY_EXIST() {
-        return KAFKA_CONFIG_ALREADY_EXIST;
+        return "The kafkaConfig named '%s' already exists.";
     }
 
     public String getCREATE_KAFKA_CONFIG_FAIL() {
-        return CREATE_KAFKA_CONFIG_FAIL;
+        return "StreamingConfig is created, but failed to create KafkaConfig.";
     }
 
     public String getSAVE_KAFKA_CONFIG_FAIL() {
-        return SAVE_KAFKA_CONFIG_FAIL;
+        return "Failed to save KafkaConfig.";
     }
 
     public String getROLLBACK_STREAMING_CONFIG_FAIL() {
-        return ROLLBACK_STREAMING_CONFIG_FAIL;
+        return "Action failed and failed to rollback the created streaming config.";
     }
 
     public String getROLLBACK_KAFKA_CONFIG_FAIL() {
-        return ROLLBACK_KAFKA_CONFIG_FAIL;
+        return "Action failed and failed to rollback the created kafka config.";
     }
 
     public String getUPDATE_STREAMING_CONFIG_NO_RIGHT() {
-        return UPDATE_STREAMING_CONFIG_NO_RIGHT;
+        return "You don't have right to update this StreamingConfig.";
     }
 
     public String getUPDATE_KAFKA_CONFIG_NO_RIGHT() {
-        return UPDATE_KAFKA_CONFIG_NO_RIGHT;
+        return "You don't have right to update this KafkaConfig.";
     }
 
     public String getSTREAMING_CONFIG_NOT_FOUND() {
-        return STREAMING_CONFIG_NOT_FOUND;
+        return "StreamingConfig with name '%s' not found.";
     }
 
+    // Query
     public String getQUERY_NOT_ALLOWED() {
-        return QUERY_NOT_ALLOWED;
+        return "Query is not allowed in '%s' mode.";
     }
 
     public String getNOT_SUPPORTED_SQL() {
-        return NOT_SUPPORTED_SQL;
+        return "Not Supported SQL.";
     }
 
     public String getTABLE_META_INCONSISTENT() {
-        return TABLE_META_INCONSISTENT;
+        return "Table metadata inconsistent with JDBC meta.";
     }
 
     public String getCOLUMN_META_INCONSISTENT() {
-        return COLUMN_META_INCONSISTENT;
+        return "Column metadata inconsistent with JDBC meta.";
     }
 
+    // Access
     public String getACL_PERMISSION_REQUIRED() {
-        return ACL_PERMISSION_REQUIRED;
+        return "Acl permission required.";
     }
 
     public String getSID_REQUIRED() {
-        return SID_REQUIRED;
+        return "Sid required.";
     }
 
     public String getREVOKE_ADMIN_PERMISSION() {
-        return REVOKE_ADMIN_PERMISSION;
+        return "Can't revoke admin permission of owner.";
     }
 
     public String getACE_ID_REQUIRED() {
-        return ACE_ID_REQUIRED;
+        return "Ace id required.";
     }
 
+    // Admin
     public String getGET_ENV_CONFIG_FAIL() {
-        return GET_ENV_CONFIG_FAIL;
+        return "Failed to get Kylin env Config.";
     }
 
+    // User
     public String getAUTH_INFO_NOT_FOUND() {
-        return AUTH_INFO_NOT_FOUND;
+        return "Can not find authentication information.";
     }
 
     public String getUSER_NOT_FOUND() {
-        return USER_NOT_FOUND;
+        return "User '%s' not found.";
     }
 
+    // Diagnosis
     public String getDIAG_NOT_FOUND() {
-        return DIAG_NOT_FOUND;
+        return "diag.sh not found at %s.";
     }
 
     public String getGENERATE_DIAG_PACKAGE_FAIL() {
-        return GENERATE_DIAG_PACKAGE_FAIL;
+        return "Failed to generate diagnosis package.";
     }
 
     public String getDIAG_PACKAGE_NOT_AVAILABLE() {
-        return DIAG_PACKAGE_NOT_AVAILABLE;
+        return "Diagnosis package is not available in directory: %s.";
     }
 
     public String getDIAG_PACKAGE_NOT_FOUND() {
-        return DIAG_PACKAGE_NOT_FOUND;
+        return "Diagnosis package not found in directory: %s.";
     }
 
+    // Encoding
     public String getVALID_ENCODING_NOT_AVAILABLE() {
-        return VALID_ENCODING_NOT_AVAILABLE;
+        return "Can not provide valid encodings for datatype: %s.";
     }
 
+    // ExternalFilter
     public String getFILTER_ALREADY_EXIST() {
-        return FILTER_ALREADY_EXIST;
+        return "The filter named '%s' already exists.";
     }
 
     public String getFILTER_NOT_FOUND() {
-        return FILTER_NOT_FOUND;
+        return "The filter named '%s' does not exist.";
     }
 
+    // Basic
     public String getHBASE_FAIL() {
-        return HBASE_FAIL;
+        return "HBase failed: '%s'";
     }
 
     public String getHBASE_FAIL_WITHOUT_DETAIL() {
-        return HBASE_FAIL_WITHOUT_DETAIL;
+        return "HBase failed.";
     }
 }
\ No newline at end of file


[26/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/atopcalcite/src/main/java/org/apache/calcite/tools/Programs.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/tools/Programs.java b/atopcalcite/src/main/java/org/apache/calcite/tools/Programs.java
index ddc1c12..ec33c4c 100644
--- a/atopcalcite/src/main/java/org/apache/calcite/tools/Programs.java
+++ b/atopcalcite/src/main/java/org/apache/calcite/tools/Programs.java
@@ -84,285 +84,355 @@ import com.google.common.collect.Lists;
  */
 
 public class Programs {
-    private static final Function<RuleSet, Program> RULE_SET_TO_PROGRAM = new Function<RuleSet, Program>() {
+  private static final Function<RuleSet, Program> RULE_SET_TO_PROGRAM =
+      new Function<RuleSet, Program>() {
         public Program apply(RuleSet ruleSet) {
-            return of(ruleSet);
+          return of(ruleSet);
         }
-    };
-
-    public static final ImmutableList<RelOptRule> CALC_RULES = ImmutableList.of(NoneToBindableConverterRule.INSTANCE,
-            EnumerableRules.ENUMERABLE_CALC_RULE, EnumerableRules.ENUMERABLE_FILTER_TO_CALC_RULE,
-            EnumerableRules.ENUMERABLE_PROJECT_TO_CALC_RULE, CalcMergeRule.INSTANCE, FilterCalcMergeRule.INSTANCE,
-            ProjectCalcMergeRule.INSTANCE, FilterToCalcRule.INSTANCE, ProjectToCalcRule.INSTANCE,
-            CalcMergeRule.INSTANCE,
-
-            // REVIEW jvs 9-Apr-2006: Do we still need these two?  Doesn't the
-            // combination of CalcMergeRule, FilterToCalcRule, and
-            // ProjectToCalcRule have the same effect?
-            FilterCalcMergeRule.INSTANCE, ProjectCalcMergeRule.INSTANCE);
-
-    /** Program that converts filters and projects to {@link Calc}s. */
-    public static final Program CALC_PROGRAM = calc(DefaultRelMetadataProvider.INSTANCE);
-
-    /** Program that expands sub-queries. */
-    public static final Program SUB_QUERY_PROGRAM = subQuery(DefaultRelMetadataProvider.INSTANCE);
-
-    public static final ImmutableSet<RelOptRule> RULE_SET = ImmutableSet.of(EnumerableRules.ENUMERABLE_JOIN_RULE,
-            EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE, EnumerableRules.ENUMERABLE_SEMI_JOIN_RULE,
-            EnumerableRules.ENUMERABLE_CORRELATE_RULE, EnumerableRules.ENUMERABLE_PROJECT_RULE,
-            EnumerableRules.ENUMERABLE_FILTER_RULE, EnumerableRules.ENUMERABLE_AGGREGATE_RULE,
-            EnumerableRules.ENUMERABLE_SORT_RULE, EnumerableRules.ENUMERABLE_LIMIT_RULE,
-            EnumerableRules.ENUMERABLE_UNION_RULE, EnumerableRules.ENUMERABLE_INTERSECT_RULE,
-            EnumerableRules.ENUMERABLE_MINUS_RULE, EnumerableRules.ENUMERABLE_TABLE_MODIFICATION_RULE,
-            EnumerableRules.ENUMERABLE_VALUES_RULE, EnumerableRules.ENUMERABLE_WINDOW_RULE, SemiJoinRule.PROJECT,
-            SemiJoinRule.JOIN, TableScanRule.INSTANCE,
-            CalcitePrepareImpl.COMMUTE ? JoinAssociateRule.INSTANCE : ProjectMergeRule.INSTANCE,
-            AggregateStarTableRule.INSTANCE, AggregateStarTableRule.INSTANCE2, FilterTableScanRule.INSTANCE,
-            FilterProjectTransposeRule.INSTANCE, FilterJoinRule.FILTER_ON_JOIN,
-            AggregateExpandDistinctAggregatesRule.INSTANCE, AggregateReduceFunctionsRule.INSTANCE,
-            FilterAggregateTransposeRule.INSTANCE, JoinCommuteRule.INSTANCE, JoinPushThroughJoinRule.RIGHT,
-            JoinPushThroughJoinRule.LEFT, SortProjectTransposeRule.INSTANCE);
-
-    // private constructor for utility class
-    private Programs() {
-    }
-
-    /** Creates a program that executes a rule set. */
-    public static Program of(RuleSet ruleSet) {
-        return new RuleSetProgram(ruleSet);
-    }
-
-    /** Creates a list of programs based on an array of rule sets. */
-    public static List<Program> listOf(RuleSet... ruleSets) {
-        return Lists.transform(Arrays.asList(ruleSets), RULE_SET_TO_PROGRAM);
-    }
-
-    /** Creates a list of programs based on a list of rule sets. */
-    public static List<Program> listOf(List<RuleSet> ruleSets) {
-        return Lists.transform(ruleSets, RULE_SET_TO_PROGRAM);
+      };
+
+  public static final ImmutableList<RelOptRule> CALC_RULES =
+      ImmutableList.of(
+          NoneToBindableConverterRule.INSTANCE,
+          EnumerableRules.ENUMERABLE_CALC_RULE,
+          EnumerableRules.ENUMERABLE_FILTER_TO_CALC_RULE,
+          EnumerableRules.ENUMERABLE_PROJECT_TO_CALC_RULE,
+          CalcMergeRule.INSTANCE,
+          FilterCalcMergeRule.INSTANCE,
+          ProjectCalcMergeRule.INSTANCE,
+          FilterToCalcRule.INSTANCE,
+          ProjectToCalcRule.INSTANCE,
+          CalcMergeRule.INSTANCE,
+
+          // REVIEW jvs 9-Apr-2006: Do we still need these two?  Doesn't the
+          // combination of CalcMergeRule, FilterToCalcRule, and
+          // ProjectToCalcRule have the same effect?
+          FilterCalcMergeRule.INSTANCE,
+          ProjectCalcMergeRule.INSTANCE);
+
+  /** Program that converts filters and projects to {@link Calc}s. */
+  public static final Program CALC_PROGRAM =
+      calc(DefaultRelMetadataProvider.INSTANCE);
+
+  /** Program that expands sub-queries. */
+  public static final Program SUB_QUERY_PROGRAM =
+      subQuery(DefaultRelMetadataProvider.INSTANCE);
+
+  public static final ImmutableSet<RelOptRule> RULE_SET =
+      ImmutableSet.of(
+          EnumerableRules.ENUMERABLE_JOIN_RULE,
+          EnumerableRules.ENUMERABLE_MERGE_JOIN_RULE,
+          EnumerableRules.ENUMERABLE_SEMI_JOIN_RULE,
+          EnumerableRules.ENUMERABLE_CORRELATE_RULE,
+          EnumerableRules.ENUMERABLE_PROJECT_RULE,
+          EnumerableRules.ENUMERABLE_FILTER_RULE,
+          EnumerableRules.ENUMERABLE_AGGREGATE_RULE,
+          EnumerableRules.ENUMERABLE_SORT_RULE,
+          EnumerableRules.ENUMERABLE_LIMIT_RULE,
+          EnumerableRules.ENUMERABLE_UNION_RULE,
+          EnumerableRules.ENUMERABLE_INTERSECT_RULE,
+          EnumerableRules.ENUMERABLE_MINUS_RULE,
+          EnumerableRules.ENUMERABLE_TABLE_MODIFICATION_RULE,
+          EnumerableRules.ENUMERABLE_VALUES_RULE,
+          EnumerableRules.ENUMERABLE_WINDOW_RULE,
+          SemiJoinRule.PROJECT,
+          SemiJoinRule.JOIN,
+          TableScanRule.INSTANCE,
+          CalcitePrepareImpl.COMMUTE
+              ? JoinAssociateRule.INSTANCE
+              : ProjectMergeRule.INSTANCE,
+          AggregateStarTableRule.INSTANCE,
+          AggregateStarTableRule.INSTANCE2,
+          FilterTableScanRule.INSTANCE,
+          FilterProjectTransposeRule.INSTANCE,
+          FilterJoinRule.FILTER_ON_JOIN,
+          AggregateExpandDistinctAggregatesRule.INSTANCE,
+          AggregateReduceFunctionsRule.INSTANCE,
+          FilterAggregateTransposeRule.INSTANCE,
+          JoinCommuteRule.INSTANCE,
+          JoinPushThroughJoinRule.RIGHT,
+          JoinPushThroughJoinRule.LEFT,
+          SortProjectTransposeRule.INSTANCE);
+
+  // private constructor for utility class
+  private Programs() {}
+
+  /** Creates a program that executes a rule set. */
+  public static Program of(RuleSet ruleSet) {
+    return new RuleSetProgram(ruleSet);
+  }
+
+  /** Creates a list of programs based on an array of rule sets. */
+  public static List<Program> listOf(RuleSet... ruleSets) {
+    return Lists.transform(Arrays.asList(ruleSets), RULE_SET_TO_PROGRAM);
+  }
+
+  /** Creates a list of programs based on a list of rule sets. */
+  public static List<Program> listOf(List<RuleSet> ruleSets) {
+    return Lists.transform(ruleSets, RULE_SET_TO_PROGRAM);
+  }
+
+  /** Creates a program from a list of rules. */
+  public static Program ofRules(RelOptRule... rules) {
+    return of(RuleSets.ofList(rules));
+  }
+
+  /** Creates a program from a list of rules. */
+  public static Program ofRules(Iterable<? extends RelOptRule> rules) {
+    return of(RuleSets.ofList(rules));
+  }
+
+  /** Creates a program that executes a sequence of programs. */
+  public static Program sequence(Program... programs) {
+    return new SequenceProgram(ImmutableList.copyOf(programs));
+  }
+
+  /** Creates a program that executes a list of rules in a HEP planner. */
+  public static Program hep(Iterable<? extends RelOptRule> rules,
+      boolean noDag, RelMetadataProvider metadataProvider) {
+    final HepProgramBuilder builder = HepProgram.builder();
+    for (RelOptRule rule : rules) {
+      builder.addRuleInstance(rule);
     }
-
-    /** Creates a program from a list of rules. */
-    public static Program ofRules(RelOptRule... rules) {
-        return of(RuleSets.ofList(rules));
-    }
-
-    /** Creates a program from a list of rules. */
-    public static Program ofRules(Iterable<? extends RelOptRule> rules) {
-        return of(RuleSets.ofList(rules));
-    }
-
-    /** Creates a program that executes a sequence of programs. */
-    public static Program sequence(Program... programs) {
-        return new SequenceProgram(ImmutableList.copyOf(programs));
-    }
-
-    /** Creates a program that executes a list of rules in a HEP planner. */
-    public static Program hep(Iterable<? extends RelOptRule> rules, boolean noDag,
-            RelMetadataProvider metadataProvider) {
-        final HepProgramBuilder builder = HepProgram.builder();
-        for (RelOptRule rule : rules) {
-            builder.addRuleInstance(rule);
+    return of(builder.build(), noDag, metadataProvider);
+  }
+
+  /** Creates a program that executes a {@link HepProgram}. */
+  public static Program of(final HepProgram hepProgram, final boolean noDag,
+      final RelMetadataProvider metadataProvider) {
+    return new Program() {
+      public RelNode run(RelOptPlanner planner, RelNode rel,
+          RelTraitSet requiredOutputTraits,
+          List<RelOptMaterialization> materializations,
+          List<RelOptLattice> lattices) {
+        final HepPlanner hepPlanner = new HepPlanner(hepProgram,
+            null, noDag, null, RelOptCostImpl.FACTORY);
+
+        List<RelMetadataProvider> list = Lists.newArrayList();
+        if (metadataProvider != null) {
+          list.add(metadataProvider);
         }
-        return of(builder.build(), noDag, metadataProvider);
-    }
+        hepPlanner.registerMetadataProviders(list);
+        RelMetadataProvider plannerChain =
+            ChainedRelMetadataProvider.of(list);
+        rel.getCluster().setMetadataProvider(plannerChain);
+
+        hepPlanner.setRoot(rel);
+        return hepPlanner.findBestExp();
+      }
+    };
+  }
+
+  /** Creates a program that invokes heuristic join-order optimization
+   * (via {@link org.apache.calcite.rel.rules.JoinToMultiJoinRule},
+   * {@link org.apache.calcite.rel.rules.MultiJoin} and
+   * {@link org.apache.calcite.rel.rules.LoptOptimizeJoinRule})
+   * if there are 6 or more joins (7 or more relations). */
+  public static Program heuristicJoinOrder(
+      final Iterable<? extends RelOptRule> rules,
+      final boolean bushy, final int minJoinCount) {
+    return new Program() {
+      public RelNode run(RelOptPlanner planner, RelNode rel,
+          RelTraitSet requiredOutputTraits,
+          List<RelOptMaterialization> materializations,
+          List<RelOptLattice> lattices) {
+        final int joinCount = RelOptUtil.countJoins(rel);
+        final Program program;
+        if (joinCount < minJoinCount) {
+          program = ofRules(rules);
+        } else {
+          // Create a program that gathers together joins as a MultiJoin.
+          final HepProgram hep = new HepProgramBuilder()
+              .addRuleInstance(FilterJoinRule.FILTER_ON_JOIN)
+              .addMatchOrder(HepMatchOrder.BOTTOM_UP)
+              .addRuleInstance(JoinToMultiJoinRule.INSTANCE)
+              .build();
+          final Program program1 =
+              of(hep, false, DefaultRelMetadataProvider.INSTANCE);
+
+          // Create a program that contains a rule to expand a MultiJoin
+          // into heuristically ordered joins.
+          // We use the rule set passed in, but remove JoinCommuteRule and
+          // JoinPushThroughJoinRule, because they cause exhaustive search.
+          final List<RelOptRule> list = Lists.newArrayList(rules);
+          list.removeAll(
+              ImmutableList.of(JoinCommuteRule.INSTANCE,
+                  JoinAssociateRule.INSTANCE,
+                  JoinPushThroughJoinRule.LEFT,
+                  JoinPushThroughJoinRule.RIGHT));
+          list.add(bushy
+              ? MultiJoinOptimizeBushyRule.INSTANCE
+              : LoptOptimizeJoinRule.INSTANCE);
+          final Program program2 = ofRules(list);
+
+          program = sequence(program1, program2);
+        }
+        return program.run(
+            planner, rel, requiredOutputTraits, materializations, lattices);
+      }
+    };
+  }
+
+  public static Program calc(RelMetadataProvider metadataProvider) {
+    return hep(CALC_RULES, true, metadataProvider);
+  }
+
+  @Deprecated // to be removed before 2.0
+  public static Program subquery(RelMetadataProvider metadataProvider) {
+    return subQuery(metadataProvider);
+  }
+
+  public static Program subQuery(RelMetadataProvider metadataProvider) {
+    return hep(
+        ImmutableList.of((RelOptRule) SubQueryRemoveRule.FILTER, 
+            SubQueryRemoveRule.PROJECT,
+            SubQueryRemoveRule.JOIN, OLAPJoinPushThroughJoinRule.INSTANCE,
+            OLAPJoinPushThroughJoinRule2.INSTANCE
+        ), true, metadataProvider);
+  }
+
+  public static Program getProgram() {
+    return new Program() {
+      public RelNode run(RelOptPlanner planner, RelNode rel,
+          RelTraitSet requiredOutputTraits,
+          List<RelOptMaterialization> materializations,
+          List<RelOptLattice> lattices) {
+        return null;
+      }
+    };
+  }
+
+  /** Returns the standard program used by Prepare. */
+  public static Program standard() {
+    return standard(DefaultRelMetadataProvider.INSTANCE);
+  }
+
+  /** Returns the standard program with user metadata provider. */
+  public static Program standard(RelMetadataProvider metadataProvider) {
+
+    final Program program1 =
+        new Program() {
+          public RelNode run(RelOptPlanner planner, RelNode rel,
+              RelTraitSet requiredOutputTraits,
+              List<RelOptMaterialization> materializations,
+              List<RelOptLattice> lattices) {
+            planner.setRoot(rel);
 
-    /** Creates a program that executes a {@link HepProgram}. */
-    public static Program of(final HepProgram hepProgram, final boolean noDag,
-            final RelMetadataProvider metadataProvider) {
-        return new Program() {
-            public RelNode run(RelOptPlanner planner, RelNode rel, RelTraitSet requiredOutputTraits,
-                    List<RelOptMaterialization> materializations, List<RelOptLattice> lattices) {
-                final HepPlanner hepPlanner = new HepPlanner(hepProgram, null, noDag, null, RelOptCostImpl.FACTORY);
-
-                List<RelMetadataProvider> list = Lists.newArrayList();
-                if (metadataProvider != null) {
-                    list.add(metadataProvider);
-                }
-                hepPlanner.registerMetadataProviders(list);
-                RelMetadataProvider plannerChain = ChainedRelMetadataProvider.of(list);
-                rel.getCluster().setMetadataProvider(plannerChain);
-
-                hepPlanner.setRoot(rel);
-                return hepPlanner.findBestExp();
+            for (RelOptMaterialization materialization : materializations) {
+              planner.addMaterialization(materialization);
             }
-        };
-    }
-
-    /** Creates a program that invokes heuristic join-order optimization
-     * (via {@link org.apache.calcite.rel.rules.JoinToMultiJoinRule},
-     * {@link org.apache.calcite.rel.rules.MultiJoin} and
-     * {@link org.apache.calcite.rel.rules.LoptOptimizeJoinRule})
-     * if there are 6 or more joins (7 or more relations). */
-    public static Program heuristicJoinOrder(final Iterable<? extends RelOptRule> rules, final boolean bushy,
-            final int minJoinCount) {
-        return new Program() {
-            public RelNode run(RelOptPlanner planner, RelNode rel, RelTraitSet requiredOutputTraits,
-                    List<RelOptMaterialization> materializations, List<RelOptLattice> lattices) {
-                final int joinCount = RelOptUtil.countJoins(rel);
-                final Program program;
-                if (joinCount < minJoinCount) {
-                    program = ofRules(rules);
-                } else {
-                    // Create a program that gathers together joins as a MultiJoin.
-                    final HepProgram hep = new HepProgramBuilder().addRuleInstance(FilterJoinRule.FILTER_ON_JOIN)
-                            .addMatchOrder(HepMatchOrder.BOTTOM_UP).addRuleInstance(JoinToMultiJoinRule.INSTANCE)
-                            .build();
-                    final Program program1 = of(hep, false, DefaultRelMetadataProvider.INSTANCE);
-
-                    // Create a program that contains a rule to expand a MultiJoin
-                    // into heuristically ordered joins.
-                    // We use the rule set passed in, but remove JoinCommuteRule and
-                    // JoinPushThroughJoinRule, because they cause exhaustive search.
-                    final List<RelOptRule> list = Lists.newArrayList(rules);
-                    list.removeAll(ImmutableList.of(JoinCommuteRule.INSTANCE, JoinAssociateRule.INSTANCE,
-                            JoinPushThroughJoinRule.LEFT, JoinPushThroughJoinRule.RIGHT));
-                    list.add(bushy ? MultiJoinOptimizeBushyRule.INSTANCE : LoptOptimizeJoinRule.INSTANCE);
-                    final Program program2 = ofRules(list);
-
-                    program = sequence(program1, program2);
-                }
-                return program.run(planner, rel, requiredOutputTraits, materializations, lattices);
+            for (RelOptLattice lattice : lattices) {
+              planner.addLattice(lattice);
             }
-        };
-    }
-
-    public static Program calc(RelMetadataProvider metadataProvider) {
-        return hep(CALC_RULES, true, metadataProvider);
-    }
 
-    @Deprecated // to be removed before 2.0
-    public static Program subquery(RelMetadataProvider metadataProvider) {
-        return subQuery(metadataProvider);
-    }
-
-    public static Program subQuery(RelMetadataProvider metadataProvider) {
-        return hep(ImmutableList.of((RelOptRule) SubQueryRemoveRule.FILTER, SubQueryRemoveRule.PROJECT,
-                SubQueryRemoveRule.JOIN, OLAPJoinPushThroughJoinRule.INSTANCE, OLAPJoinPushThroughJoinRule2.INSTANCE),
-                true, metadataProvider);
-    }
-
-    public static Program getProgram() {
-        return new Program() {
-            public RelNode run(RelOptPlanner planner, RelNode rel, RelTraitSet requiredOutputTraits,
-                    List<RelOptMaterialization> materializations, List<RelOptLattice> lattices) {
-                return null;
-            }
+            final RelNode rootRel2 =
+                rel.getTraitSet().equals(requiredOutputTraits)
+                ? rel
+                : planner.changeTraits(rel, requiredOutputTraits);
+            assert rootRel2 != null;
+
+            planner.setRoot(rootRel2);
+            final RelOptPlanner planner2 = planner.chooseDelegate();
+            final RelNode rootRel3 = planner2.findBestExp();
+            assert rootRel3 != null : "could not implement exp";
+            return rootRel3;
+          }
         };
-    }
 
-    /** Returns the standard program used by Prepare. */
-    public static Program standard() {
-        return standard(DefaultRelMetadataProvider.INSTANCE);
-    }
+    return sequence(subQuery(metadataProvider),
+        new DecorrelateProgram(),
+        new TrimFieldsProgram(),
+        program1,
 
-    /** Returns the standard program with user metadata provider. */
-    public static Program standard(RelMetadataProvider metadataProvider) {
-
-        final Program program1 = new Program() {
-            public RelNode run(RelOptPlanner planner, RelNode rel, RelTraitSet requiredOutputTraits,
-                    List<RelOptMaterialization> materializations, List<RelOptLattice> lattices) {
-                planner.setRoot(rel);
-
-                for (RelOptMaterialization materialization : materializations) {
-                    planner.addMaterialization(materialization);
-                }
-                for (RelOptLattice lattice : lattices) {
-                    planner.addLattice(lattice);
-                }
-
-                final RelNode rootRel2 = rel.getTraitSet().equals(requiredOutputTraits) ? rel
-                        : planner.changeTraits(rel, requiredOutputTraits);
-                assert rootRel2 != null;
-
-                planner.setRoot(rootRel2);
-                final RelOptPlanner planner2 = planner.chooseDelegate();
-                final RelNode rootRel3 = planner2.findBestExp();
-                assert rootRel3 != null : "could not implement exp";
-                return rootRel3;
-            }
-        };
+        // Second planner pass to do physical "tweaks". This the first time that
+        // EnumerableCalcRel is introduced.
+        calc(metadataProvider));
+  }
 
-        return sequence(subQuery(metadataProvider), new DecorrelateProgram(), new TrimFieldsProgram(), program1,
+  /** Program backed by a {@link RuleSet}. */
+  static class RuleSetProgram implements Program {
+    final RuleSet ruleSet;
 
-                // Second planner pass to do physical "tweaks". This the first time that
-                // EnumerableCalcRel is introduced.
-                calc(metadataProvider));
+    private RuleSetProgram(RuleSet ruleSet) {
+      this.ruleSet = ruleSet;
     }
 
-    /** Program backed by a {@link RuleSet}. */
-    static class RuleSetProgram implements Program {
-        final RuleSet ruleSet;
-
-        private RuleSetProgram(RuleSet ruleSet) {
-            this.ruleSet = ruleSet;
-        }
+    public RelNode run(RelOptPlanner planner, RelNode rel,
+        RelTraitSet requiredOutputTraits,
+        List<RelOptMaterialization> materializations,
+        List<RelOptLattice> lattices) {
+      planner.clear();
+      for (RelOptRule rule : ruleSet) {
+        planner.addRule(rule);
+      }
+      for (RelOptMaterialization materialization : materializations) {
+        planner.addMaterialization(materialization);
+      }
+      for (RelOptLattice lattice : lattices) {
+        planner.addLattice(lattice);
+      }
+      if (!rel.getTraitSet().equals(requiredOutputTraits)) {
+        rel = planner.changeTraits(rel, requiredOutputTraits);
+      }
+      planner.setRoot(rel);
+      return planner.findBestExp();
 
-        public RelNode run(RelOptPlanner planner, RelNode rel, RelTraitSet requiredOutputTraits,
-                List<RelOptMaterialization> materializations, List<RelOptLattice> lattices) {
-            planner.clear();
-            for (RelOptRule rule : ruleSet) {
-                planner.addRule(rule);
-            }
-            for (RelOptMaterialization materialization : materializations) {
-                planner.addMaterialization(materialization);
-            }
-            for (RelOptLattice lattice : lattices) {
-                planner.addLattice(lattice);
-            }
-            if (!rel.getTraitSet().equals(requiredOutputTraits)) {
-                rel = planner.changeTraits(rel, requiredOutputTraits);
-            }
-            planner.setRoot(rel);
-            return planner.findBestExp();
-
-        }
     }
+  }
 
-    /** Program that runs sub-programs, sending the output of the previous as
-     * input to the next. */
-    private static class SequenceProgram implements Program {
-        private final ImmutableList<Program> programs;
-
-        SequenceProgram(ImmutableList<Program> programs) {
-            this.programs = programs;
-        }
+  /** Program that runs sub-programs, sending the output of the previous as
+   * input to the next. */
+  private static class SequenceProgram implements Program {
+    private final ImmutableList<Program> programs;
 
-        public RelNode run(RelOptPlanner planner, RelNode rel, RelTraitSet requiredOutputTraits,
-                List<RelOptMaterialization> materializations, List<RelOptLattice> lattices) {
-            for (Program program : programs) {
-                rel = program.run(planner, rel, requiredOutputTraits, materializations, lattices);
-            }
-            return rel;
-        }
+    SequenceProgram(ImmutableList<Program> programs) {
+      this.programs = programs;
     }
 
-    /** Program that de-correlates a query.
-     *
-     * <p>To work around
-     * <a href="https://issues.apache.org/jira/browse/CALCITE-842">[CALCITE-842]
-     * Decorrelator gets field offsets confused if fields have been trimmed</a>,
-     * disable field-trimming in {@link SqlToRelConverter}, and run
-     * {@link TrimFieldsProgram} after this program. */
-    private static class DecorrelateProgram implements Program {
-        public RelNode run(RelOptPlanner planner, RelNode rel, RelTraitSet requiredOutputTraits,
-                List<RelOptMaterialization> materializations, List<RelOptLattice> lattices) {
-            final CalciteConnectionConfig config = planner.getContext().unwrap(CalciteConnectionConfig.class);
-            if (config != null && config.forceDecorrelate()) {
-                return RelDecorrelator.decorrelateQuery(rel);
-            }
-            return rel;
-        }
+    public RelNode run(RelOptPlanner planner, RelNode rel,
+        RelTraitSet requiredOutputTraits,
+        List<RelOptMaterialization> materializations,
+        List<RelOptLattice> lattices) {
+      for (Program program : programs) {
+        rel = program.run(
+            planner, rel, requiredOutputTraits, materializations, lattices);
+      }
+      return rel;
     }
-
-    /** Program that trims fields. */
-    private static class TrimFieldsProgram implements Program {
-        public RelNode run(RelOptPlanner planner, RelNode rel, RelTraitSet requiredOutputTraits,
-                List<RelOptMaterialization> materializations, List<RelOptLattice> lattices) {
-            final RelBuilder relBuilder = RelFactories.LOGICAL_BUILDER.create(rel.getCluster(), null);
-            return new RelFieldTrimmer(null, relBuilder).trim(rel);
-        }
+  }
+
+  /** Program that de-correlates a query.
+   *
+   * <p>To work around
+   * <a href="https://issues.apache.org/jira/browse/CALCITE-842">[CALCITE-842]
+   * Decorrelator gets field offsets confused if fields have been trimmed</a>,
+   * disable field-trimming in {@link SqlToRelConverter}, and run
+   * {@link TrimFieldsProgram} after this program. */
+  private static class DecorrelateProgram implements Program {
+    public RelNode run(RelOptPlanner planner, RelNode rel,
+        RelTraitSet requiredOutputTraits,
+        List<RelOptMaterialization> materializations,
+        List<RelOptLattice> lattices) {
+      final CalciteConnectionConfig config =
+          planner.getContext().unwrap(CalciteConnectionConfig.class);
+      if (config != null && config.forceDecorrelate()) {
+        return RelDecorrelator.decorrelateQuery(rel);
+      }
+      return rel;
+    }
+  }
+
+  /** Program that trims fields. */
+  private static class TrimFieldsProgram implements Program {
+    public RelNode run(RelOptPlanner planner, RelNode rel,
+        RelTraitSet requiredOutputTraits,
+        List<RelOptMaterialization> materializations,
+        List<RelOptLattice> lattices) {
+      final RelBuilder relBuilder =
+          RelFactories.LOGICAL_BUILDER.create(rel.getCluster(), null);
+      return new RelFieldTrimmer(null, relBuilder).trim(rel);
     }
+  }
 }
 
 // End Programs.java

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java b/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
index f7fceec..426ebb9 100644
--- a/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
@@ -73,14 +73,13 @@ public class BackwardCompatibilityConfig {
         for (Entry<Object, Object> kv : props.entrySet()) {
             String key = (String) kv.getKey();
             String value = (String) kv.getValue();
-
+            
             if (key.equals(value))
                 continue; // no change
-
+            
             if (value.contains(key))
-                throw new IllegalStateException("New key '" + value + "' contains old key '" + key
-                        + "' causes trouble to repeated find & replace");
-
+                throw new IllegalStateException("New key '" + value + "' contains old key '" + key + "' causes trouble to repeated find & replace");
+            
             if (value.endsWith("."))
                 old2newPrefix.put(key, value);
             else
@@ -123,7 +122,7 @@ public class BackwardCompatibilityConfig {
         return result;
     }
 
-    public OrderedProperties check(OrderedProperties props) {
+    public OrderedProperties check(OrderedProperties props){
         OrderedProperties result = new OrderedProperties();
         for (Entry<String, String> kv : props.entrySet()) {
             result.setProperty(check(kv.getKey()), kv.getValue());
@@ -181,7 +180,7 @@ public class BackwardCompatibilityConfig {
         } finally {
             IOUtils.closeQuietly(out);
         }
-
+        
         System.out.println("Files generated:");
         System.out.println(shFile);
         System.out.println(sedFile);
@@ -212,7 +211,6 @@ public class BackwardCompatibilityConfig {
         else if (name.endsWith("-site.xml"))
             return false;
         else
-            return name.endsWith(".java") || name.endsWith(".js") || name.endsWith(".sh")
-                    || name.endsWith(".properties") || name.endsWith(".xml");
+            return name.endsWith(".java") || name.endsWith(".js") || name.endsWith(".sh") || name.endsWith(".properties") || name.endsWith(".xml");
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
index 5d08338..04af9f5 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
@@ -70,8 +70,7 @@ public class KylinConfig extends KylinConfigBase {
                     config = new KylinConfig();
                     config.reloadKylinConfig(getKylinProperties());
 
-                    logger.info("Initialized a new KylinConfig from getInstanceFromEnv : "
-                            + System.identityHashCode(config));
+                    logger.info("Initialized a new KylinConfig from getInstanceFromEnv : " + System.identityHashCode(config));
                     SYS_ENV_INSTANCE = config;
                 } catch (IllegalArgumentException e) {
                     throw new IllegalStateException("Failed to find KylinConfig ", e);
@@ -109,12 +108,10 @@ public class KylinConfig extends KylinConfigBase {
                     if (file.getName().equalsIgnoreCase(KYLIN_CONF_PROPERTIES_FILE)) {
                         return UriType.PROPERTIES_FILE;
                     } else {
-                        throw new IllegalStateException(
-                                "Metadata uri : " + metaUri + " is a local file but not kylin.properties");
+                        throw new IllegalStateException("Metadata uri : " + metaUri + " is a local file but not kylin.properties");
                     }
                 } else {
-                    throw new IllegalStateException(
-                            "Metadata uri : " + metaUri + " looks like a file but it's neither a file nor a directory");
+                    throw new IllegalStateException("Metadata uri : " + metaUri + " looks like a file but it's neither a file nor a directory");
                 }
             } else {
                 if (RestClient.matchFullRestPattern(metaUri))
@@ -261,8 +258,7 @@ public class KylinConfig extends KylinConfigBase {
         return conf;
     }
 
-    private static OrderedProperties getKylinOrderedProperties()
-            throws FileNotFoundException, UnsupportedEncodingException {
+    private static OrderedProperties getKylinOrderedProperties() throws FileNotFoundException, UnsupportedEncodingException {
         File propFile = getKylinPropertiesFile();
         if (propFile == null || !propFile.exists()) {
             logger.error("fail to locate " + KYLIN_CONF_PROPERTIES_FILE);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index e495aaa..ad08108 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -198,8 +198,7 @@ abstract public class KylinConfigBase implements Serializable {
                 root = "hdfs://" + root;
         }
 
-        return new StringBuffer(root).append(StringUtils.replaceChars(getMetadataUrlPrefix(), ':', '-')).append("/")
-                .toString();
+        return new StringBuffer(root).append(StringUtils.replaceChars(getMetadataUrlPrefix(), ':', '-')).append("/").toString();
     }
 
     // ============================================================================
@@ -243,8 +242,7 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public DistributedLockFactory getDistributedLockFactory() {
-        String clsName = getOptional("kylin.metadata.distributed-lock-impl",
-                "org.apache.kylin.storage.hbase.util.ZookeeperDistributedLock$Factory");
+        String clsName = getOptional("kylin.metadata.distributed-lock-impl", "org.apache.kylin.storage.hbase.util.ZookeeperDistributedLock$Factory");
         return (DistributedLockFactory) ClassUtil.newInstance(clsName);
     }
 
@@ -348,8 +346,7 @@ abstract public class KylinConfigBase implements Serializable {
     public CliCommandExecutor getCliCommandExecutor() throws IOException {
         CliCommandExecutor exec = new CliCommandExecutor();
         if (getRunAsRemoteCommand()) {
-            exec.setRunAtRemote(getRemoteHadoopCliHostname(), getRemoteHadoopCliPort(), getRemoteHadoopCliUsername(),
-                    getRemoteHadoopCliPassword());
+            exec.setRunAtRemote(getRemoteHadoopCliHostname(), getRemoteHadoopCliPort(), getRemoteHadoopCliUsername(), getRemoteHadoopCliPassword());
         }
         return exec;
     }
@@ -398,8 +395,7 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public String getHiveDependencyFilterList() {
-        return this.getOptional("kylin.job.dependency-filter-list", "[^,]*hive-exec[^,]*?\\.jar" + "|"
-                + "[^,]*hive-metastore[^,]*?\\.jar" + "|" + "[^,]*hive-hcatalog-core[^,]*?\\.jar");
+        return this.getOptional("kylin.job.dependency-filter-list", "[^,]*hive-exec[^,]*?\\.jar" + "|" + "[^,]*hive-metastore[^,]*?\\.jar" + "|" + "[^,]*hive-hcatalog-core[^,]*?\\.jar");
     }
 
     public boolean isMailEnabled() {
@@ -657,8 +653,7 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public long getPartitionMaxScanBytes() {
-        long value = Long.parseLong(
-                this.getOptional("kylin.storage.partition.max-scan-bytes", String.valueOf(3L * 1024 * 1024 * 1024)));
+        long value = Long.parseLong(this.getOptional("kylin.storage.partition.max-scan-bytes", String.valueOf(3L * 1024 * 1024 * 1024)));
         return value > 0 ? value : Long.MAX_VALUE;
     }
 
@@ -675,8 +670,7 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public String getDefaultIGTStorage() {
-        return getOptional("kylin.storage.hbase.gtstorage",
-                "org.apache.kylin.storage.hbase.cube.v2.CubeHBaseEndpointRPC");
+        return getOptional("kylin.storage.hbase.gtstorage", "org.apache.kylin.storage.hbase.cube.v2.CubeHBaseEndpointRPC");
     }
 
     public int getHBaseScanCacheRows() {
@@ -950,7 +944,7 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public boolean isAdhocEnabled() {
-        return StringUtils.isNotEmpty(getAdHocRunnerClassName());
+        return StringUtils.isNotEmpty(getAdHocRunnerClassName()); 
     }
 
     public String getAdHocRunnerClassName() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java b/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java
index cd7b64e..6eba7c5 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinVersion.java
@@ -168,9 +168,7 @@ public class KylinVersion implements Comparable {
     public static String getKylinClientInformation() {
         StringBuilder buf = new StringBuilder();
 
-        buf.append("kylin.home: ").append(
-                KylinConfig.getKylinHome() == null ? "UNKNOWN" : new File(KylinConfig.getKylinHome()).getAbsolutePath())
-                .append("\n");
+        buf.append("kylin.home: ").append(KylinConfig.getKylinHome() == null ? "UNKNOWN" : new File(KylinConfig.getKylinHome()).getAbsolutePath()).append("\n");
         buf.append("kylin.version:").append(KylinVersion.getCurrentVersion()).append("\n");
         buf.append("commit:").append(getGitCommitInfo()).append("\n");
         buf.append("os.name:").append(System.getProperty("os.name")).append("\n");
@@ -201,4 +199,4 @@ public class KylinVersion implements Comparable {
         }
     }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/QueryContext.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/QueryContext.java b/core-common/src/main/java/org/apache/kylin/common/QueryContext.java
index a2342f0..0b8d519 100644
--- a/core-common/src/main/java/org/apache/kylin/common/QueryContext.java
+++ b/core-common/src/main/java/org/apache/kylin/common/QueryContext.java
@@ -39,7 +39,7 @@ public class QueryContext {
 
     private QueryContext() {
         // use QueryContext.current() instead
-
+        
         queryId = UUID.randomUUID().toString();
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLock.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLock.java b/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLock.java
index 91df47e..8f1fae0 100644
--- a/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLock.java
+++ b/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLock.java
@@ -32,14 +32,14 @@ public interface DistributedLock {
      * Returns the client that owns this instance.
      */
     String getClient();
-
+    
     /**
      * Acquire the lock at given path, non-blocking.
      * 
      * @return If the lock is acquired or not.
      */
     boolean lock(String lockPath);
-
+    
     /**
      * Acquire the lock at given path, block until given timeout.
      * 
@@ -51,12 +51,12 @@ public interface DistributedLock {
      * Returns if lock is available at given path.
      */
     boolean isLocked(String lockPath);
-
+    
     /**
      * Returns if lock is available at given path.
      */
     boolean isLockedByMe(String lockPath);
-
+    
     /**
      * Returns the owner of a lock path; returns null if the path is not locked by any one.
      */
@@ -73,7 +73,7 @@ public interface DistributedLock {
      * Purge all locks under given path. For clean up.
      */
     void purgeLocks(String lockPathRoot);
-
+    
     /**
      * Watch lock events under given path, notifies the watcher on all lock/unlock events under the given path root.
      * 
@@ -83,7 +83,6 @@ public interface DistributedLock {
 
     public interface Watcher {
         void onLock(String lockPath, String client);
-
         void onUnlock(String lockPath, String client);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java b/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java
index c96c1be..cd1c2b1 100644
--- a/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java
+++ b/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java
@@ -35,7 +35,7 @@ public abstract class DistributedLockFactory {
     private static String threadProcessAndHost() {
         return Thread.currentThread().getId() + "-" + processAndHost();
     }
-
+    
     private static String processAndHost() {
         byte[] bytes = ManagementFactory.getRuntimeMXBean().getName().getBytes();
         return new String(bytes);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/persistence/FileResourceStore.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/FileResourceStore.java b/core-common/src/main/java/org/apache/kylin/common/persistence/FileResourceStore.java
index a935702..82cf451 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/FileResourceStore.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/FileResourceStore.java
@@ -46,8 +46,7 @@ public class FileResourceStore extends ResourceStore {
         super(kylinConfig);
         root = new File(kylinConfig.getMetadataUrl().getIdentifier()).getAbsoluteFile();
         if (root.exists() == false)
-            throw new IllegalArgumentException(
-                    "File not exist by '" + kylinConfig.getMetadataUrl() + "': " + root.getAbsolutePath());
+            throw new IllegalArgumentException("File not exist by '" + kylinConfig.getMetadataUrl() + "': " + root.getAbsolutePath());
     }
 
     @Override
@@ -71,8 +70,7 @@ public class FileResourceStore extends ResourceStore {
     }
 
     @Override
-    synchronized protected List<RawResource> getAllResourcesImpl(String folderPath, long timeStart,
-            long timeEndExclusive) throws IOException {
+    synchronized protected List<RawResource> getAllResourcesImpl(String folderPath, long timeStart, long timeEndExclusive) throws IOException {
         NavigableSet<String> resources = listResources(folderPath);
         if (resources == null)
             return Collections.emptyList();
@@ -133,12 +131,10 @@ public class FileResourceStore extends ResourceStore {
     }
 
     @Override
-    synchronized protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS)
-            throws IOException, IllegalStateException {
+    synchronized protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS) throws IOException, IllegalStateException {
         File f = file(resPath);
         if ((f.exists() && f.lastModified() != oldTS) || (f.exists() == false && oldTS != 0))
-            throw new IllegalStateException(
-                    "Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but found " + f.lastModified());
+            throw new IllegalStateException("Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but found " + f.lastModified());
 
         putResourceImpl(resPath, new ByteArrayInputStream(content), newTS);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
index 651a6d1..7fb93e7 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
@@ -81,8 +81,7 @@ abstract public class ResourceStore {
             Class<? extends ResourceStore> cls = ClassUtil.forName(clsName, ResourceStore.class);
             ResourceStore store = cls.getConstructor(KylinConfig.class).newInstance(kylinConfig);
             if (!store.exists(METASTORE_UUID_TAG)) {
-                store.putResource(METASTORE_UUID_TAG, new StringEntity(store.createMetaStoreUUID()), 0,
-                        StringEntity.serializer);
+                store.putResource(METASTORE_UUID_TAG, new StringEntity(store.createMetaStoreUUID()), 0, StringEntity.serializer);
             }
             return store;
         } catch (Throwable e) {
@@ -128,11 +127,9 @@ abstract public class ResourceStore {
 
     public String getMetaStoreUUID() throws IOException {
         if (!exists(ResourceStore.METASTORE_UUID_TAG)) {
-            putResource(ResourceStore.METASTORE_UUID_TAG, new StringEntity(createMetaStoreUUID()), 0,
-                    StringEntity.serializer);
+            putResource(ResourceStore.METASTORE_UUID_TAG, new StringEntity(createMetaStoreUUID()), 0, StringEntity.serializer);
         }
-        StringEntity entity = getResource(ResourceStore.METASTORE_UUID_TAG, StringEntity.class,
-                StringEntity.serializer);
+        StringEntity entity = getResource(ResourceStore.METASTORE_UUID_TAG, StringEntity.class, StringEntity.serializer);
         return entity.toString();
     }
 
@@ -148,8 +145,7 @@ abstract public class ResourceStore {
     /**
      * Read a resource, return null in case of not found or is a folder
      */
-    final public <T extends RootPersistentEntity> T getResource(String resPath, Class<T> clz, Serializer<T> serializer)
-            throws IOException {
+    final public <T extends RootPersistentEntity> T getResource(String resPath, Class<T> clz, Serializer<T> serializer) throws IOException {
         resPath = norm(resPath);
         RawResource res = getResourceImpl(resPath);
         if (res == null)
@@ -177,16 +173,14 @@ abstract public class ResourceStore {
     /**
      * Read all resources under a folder. Return empty list if folder not exist.
      */
-    final public <T extends RootPersistentEntity> List<T> getAllResources(String folderPath, Class<T> clazz,
-            Serializer<T> serializer) throws IOException {
+    final public <T extends RootPersistentEntity> List<T> getAllResources(String folderPath, Class<T> clazz, Serializer<T> serializer) throws IOException {
         return getAllResources(folderPath, Long.MIN_VALUE, Long.MAX_VALUE, clazz, serializer);
     }
 
     /**
      * Read all resources under a folder having last modified time between given range. Return empty list if folder not exist.
      */
-    final public <T extends RootPersistentEntity> List<T> getAllResources(String folderPath, long timeStart,
-            long timeEndExclusive, Class<T> clazz, Serializer<T> serializer) throws IOException {
+    final public <T extends RootPersistentEntity> List<T> getAllResources(String folderPath, long timeStart, long timeEndExclusive, Class<T> clazz, Serializer<T> serializer) throws IOException {
         final List<RawResource> allResources = getAllResourcesImpl(folderPath, timeStart, timeEndExclusive);
         if (allResources == null || allResources.isEmpty()) {
             return Collections.emptyList();
@@ -207,8 +201,7 @@ abstract public class ResourceStore {
         }
     }
 
-    abstract protected List<RawResource> getAllResourcesImpl(String folderPath, long timeStart, long timeEndExclusive)
-            throws IOException;
+    abstract protected List<RawResource> getAllResourcesImpl(String folderPath, long timeStart, long timeEndExclusive) throws IOException;
 
     /**
      * returns null if not exists
@@ -239,16 +232,14 @@ abstract public class ResourceStore {
     /**
      * check & set, overwrite a resource
      */
-    final public <T extends RootPersistentEntity> long putResource(String resPath, T obj, Serializer<T> serializer)
-            throws IOException {
+    final public <T extends RootPersistentEntity> long putResource(String resPath, T obj, Serializer<T> serializer) throws IOException {
         return putResource(resPath, obj, System.currentTimeMillis(), serializer);
     }
 
     /**
      * check & set, overwrite a resource
      */
-    final public <T extends RootPersistentEntity> long putResource(String resPath, T obj, long newTS,
-            Serializer<T> serializer) throws IOException {
+    final public <T extends RootPersistentEntity> long putResource(String resPath, T obj, long newTS, Serializer<T> serializer) throws IOException {
         resPath = norm(resPath);
         //logger.debug("Saving resource " + resPath + " (Store " + kylinConfig.getMetadataUrl() + ")");
 
@@ -274,8 +265,7 @@ abstract public class ResourceStore {
         }
     }
 
-    private long checkAndPutResourceCheckpoint(String resPath, byte[] content, long oldTS, long newTS)
-            throws IOException {
+    private long checkAndPutResourceCheckpoint(String resPath, byte[] content, long oldTS, long newTS) throws IOException {
         beforeChange(resPath);
         return checkAndPutResourceImpl(resPath, content, oldTS, newTS);
     }
@@ -283,8 +273,7 @@ abstract public class ResourceStore {
     /**
      * checks old timestamp when overwriting existing
      */
-    abstract protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS)
-            throws IOException, IllegalStateException;
+    abstract protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS) throws IOException, IllegalStateException;
 
     /**
      * delete a resource, does nothing on a folder

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
index 4702f70..6ba68ae 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
@@ -134,16 +134,16 @@ public class ResourceTool {
         ResourceStore dst = ResourceStore.getStore(dstConfig);
 
         logger.info("Copy from {} to {}", src, dst);
-
+        
         copyR(src, dst, path);
     }
 
     public static void copy(KylinConfig srcConfig, KylinConfig dstConfig, List<String> paths) throws IOException {
         ResourceStore src = ResourceStore.getStore(srcConfig);
         ResourceStore dst = ResourceStore.getStore(dstConfig);
-
+        
         logger.info("Copy from {} to {}", src, dst);
-
+        
         for (String path : paths) {
             copyR(src, dst, path);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/persistence/StringEntity.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/StringEntity.java b/core-common/src/main/java/org/apache/kylin/common/persistence/StringEntity.java
index f25976f..84d7303 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/StringEntity.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/StringEntity.java
@@ -18,12 +18,12 @@
 
 package org.apache.kylin.common.persistence;
 
+import org.apache.commons.lang.StringUtils;
+
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 
-import org.apache.commons.lang.StringUtils;
-
 public class StringEntity extends RootPersistentEntity {
 
     public static final Serializer<StringEntity> serializer = new Serializer<StringEntity>() {
@@ -66,4 +66,4 @@ public class StringEntity extends RootPersistentEntity {
     public String toString() {
         return str;
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java b/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
index b0721d0..33a4e7a 100644
--- a/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
+++ b/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
@@ -27,8 +27,7 @@ import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import javax.xml.bind.DatatypeConverter;
-
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.commons.io.IOUtils;
 import org.apache.http.HttpResponse;
 import org.apache.http.auth.AuthScope;
@@ -47,7 +46,7 @@ import org.apache.http.params.HttpParams;
 import org.apache.http.util.EntityUtils;
 import org.apache.kylin.common.util.JsonUtil;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
+import javax.xml.bind.DatatypeConverter;
 
 /**
  * @author yangli9
@@ -123,8 +122,7 @@ public class RestClient {
             String msg = EntityUtils.toString(response.getEntity());
 
             if (response.getStatusLine().getStatusCode() != 200)
-                throw new IOException("Invalid response " + response.getStatusLine().getStatusCode()
-                        + " with cache wipe url " + url + "\n" + msg);
+                throw new IOException("Invalid response " + response.getStatusLine().getStatusCode() + " with cache wipe url " + url + "\n" + msg);
         } catch (Exception ex) {
             throw new IOException(ex);
         } finally {
@@ -142,8 +140,7 @@ public class RestClient {
             msg = map.get("config");
 
             if (response.getStatusLine().getStatusCode() != 200)
-                throw new IOException("Invalid response " + response.getStatusLine().getStatusCode()
-                        + " with cache wipe url " + url + "\n" + msg);
+                throw new IOException("Invalid response " + response.getStatusLine().getStatusCode() + " with cache wipe url " + url + "\n" + msg);
             return msg;
         } finally {
             request.releaseConnection();
@@ -170,8 +167,7 @@ public class RestClient {
         HttpResponse response = client.execute(put);
         String result = getContent(response);
         if (response.getStatusLine().getStatusCode() != 200) {
-            throw new IOException("Invalid response " + response.getStatusLine().getStatusCode()
-                    + " with build cube url " + url + "\n" + jsonMsg);
+            throw new IOException("Invalid response " + response.getStatusLine().getStatusCode() + " with build cube url " + url + "\n" + jsonMsg);
         } else {
             return true;
         }
@@ -205,8 +201,7 @@ public class RestClient {
         HttpResponse response = client.execute(put);
         String result = getContent(response);
         if (response.getStatusLine().getStatusCode() != 200) {
-            throw new IOException("Invalid response " + response.getStatusLine().getStatusCode() + " with url " + url
-                    + "\n" + jsonMsg);
+            throw new IOException("Invalid response " + response.getStatusLine().getStatusCode() + " with url " + url + "\n" + jsonMsg);
         } else {
             return true;
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java b/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java
index 50b8e62..fc501ea 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java
@@ -284,8 +284,7 @@ public class Bytes {
             len = b.length - off;
         for (int i = off; i < off + len; ++i) {
             int ch = b[i] & 0xFF;
-            if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z')
-                    || " `~!@#$%^&*()-_=+[]{}|;:'\",.<>/?".indexOf(ch) >= 0) {
+            if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z') || " `~!@#$%^&*()-_=+[]{}|;:'\",.<>/?".indexOf(ch) >= 0) {
                 result.append((char) ch);
             } else {
                 result.append(String.format("\\x%02X", ch));
@@ -442,14 +441,12 @@ public class Bytes {
         }
     }
 
-    private static IllegalArgumentException explainWrongLengthOrOffset(final byte[] bytes, final int offset,
-            final int length, final int expectedLength) {
+    private static IllegalArgumentException explainWrongLengthOrOffset(final byte[] bytes, final int offset, final int length, final int expectedLength) {
         String reason;
         if (length != expectedLength) {
             reason = "Wrong length: " + length + ", expected " + expectedLength;
         } else {
-            reason = "offset (" + offset + ") + length (" + length + ") exceed the" + " capacity of the array: "
-                    + bytes.length;
+            reason = "offset (" + offset + ") + length (" + length + ") exceed the" + " capacity of the array: " + bytes.length;
         }
         return new IllegalArgumentException(reason);
     }
@@ -466,8 +463,7 @@ public class Bytes {
      */
     public static int putLong(byte[] bytes, int offset, long val) {
         if (bytes.length - offset < SIZEOF_LONG) {
-            throw new IllegalArgumentException(
-                    "Not enough room to put a long at" + " offset " + offset + " in a " + bytes.length + " byte array");
+            throw new IllegalArgumentException("Not enough room to put a long at" + " offset " + offset + " in a " + bytes.length + " byte array");
         }
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.isAvailable()) {
             return putLongUnsafe(bytes, offset, val);
@@ -493,10 +489,7 @@ public class Bytes {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
             val = Long.reverseBytes(val);
         }
-        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putLong(bytes,
-                (long) offset
-                        + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET,
-                val);
+        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putLong(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET, val);
         return offset + SIZEOF_LONG;
     }
 
@@ -652,14 +645,9 @@ public class Bytes {
      */
     public static int toIntUnsafe(byte[] bytes, int offset) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
-            return Integer.reverseBytes(
-                    org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
-                            .getInt(bytes, (long) offset
-                                    + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
+            return Integer.reverseBytes(org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getInt(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
         } else {
-            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
-                    .getInt(bytes, (long) offset
-                            + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
+            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getInt(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
         }
     }
 
@@ -672,14 +660,9 @@ public class Bytes {
      */
     public static short toShortUnsafe(byte[] bytes, int offset) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
-            return Short.reverseBytes(
-                    org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
-                            .getShort(bytes, (long) offset
-                                    + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
+            return Short.reverseBytes(org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getShort(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
         } else {
-            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
-                    .getShort(bytes, (long) offset
-                            + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
+            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getShort(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
         }
     }
 
@@ -692,14 +675,9 @@ public class Bytes {
      */
     public static long toLongUnsafe(byte[] bytes, int offset) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
-            return Long.reverseBytes(
-                    org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
-                            .getLong(bytes, (long) offset
-                                    + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
+            return Long.reverseBytes(org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getLong(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
         } else {
-            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
-                    .getLong(bytes, (long) offset
-                            + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
+            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getLong(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
         }
     }
 
@@ -715,8 +693,7 @@ public class Bytes {
      */
     public static int readAsInt(byte[] bytes, int offset, final int length) {
         if (offset + length > bytes.length) {
-            throw new IllegalArgumentException("offset (" + offset + ") + length (" + length + ") exceed the"
-                    + " capacity of the array: " + bytes.length);
+            throw new IllegalArgumentException("offset (" + offset + ") + length (" + length + ") exceed the" + " capacity of the array: " + bytes.length);
         }
         int n = 0;
         for (int i = offset; i < (offset + length); i++) {
@@ -738,8 +715,7 @@ public class Bytes {
      */
     public static int putInt(byte[] bytes, int offset, int val) {
         if (bytes.length - offset < SIZEOF_INT) {
-            throw new IllegalArgumentException(
-                    "Not enough room to put an int at" + " offset " + offset + " in a " + bytes.length + " byte array");
+            throw new IllegalArgumentException("Not enough room to put an int at" + " offset " + offset + " in a " + bytes.length + " byte array");
         }
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.isAvailable()) {
             return putIntUnsafe(bytes, offset, val);
@@ -765,10 +741,7 @@ public class Bytes {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
             val = Integer.reverseBytes(val);
         }
-        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putInt(bytes,
-                (long) offset
-                        + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET,
-                val);
+        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putInt(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET, val);
         return offset + SIZEOF_INT;
     }
 
@@ -857,8 +830,7 @@ public class Bytes {
      */
     public static int putShort(byte[] bytes, int offset, short val) {
         if (bytes.length - offset < SIZEOF_SHORT) {
-            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a "
-                    + bytes.length + " byte array");
+            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a " + bytes.length + " byte array");
         }
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.isAvailable()) {
             return putShortUnsafe(bytes, offset, val);
@@ -882,10 +854,7 @@ public class Bytes {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
             val = Short.reverseBytes(val);
         }
-        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putShort(bytes,
-                (long) offset
-                        + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET,
-                val);
+        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putShort(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET, val);
         return offset + SIZEOF_SHORT;
     }
 
@@ -904,8 +873,7 @@ public class Bytes {
      */
     public static int putAsShort(byte[] bytes, int offset, int val) {
         if (bytes.length - offset < SIZEOF_SHORT) {
-            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a "
-                    + bytes.length + " byte array");
+            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a " + bytes.length + " byte array");
         }
         bytes[offset + 1] = (byte) val;
         val >>= 8;
@@ -996,8 +964,7 @@ public class Bytes {
      * @return 0 if equal, < 0 if left is less than right, etc.
      */
     public static int compareTo(byte[] buffer1, int offset1, int length1, byte[] buffer2, int offset2, int length2) {
-        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(buffer1, offset1, length1, buffer2, offset2,
-                length2);
+        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(buffer1, offset1, length1, buffer2, offset2, length2);
     }
 
     interface Comparer<T> {
@@ -1151,8 +1118,8 @@ public class Bytes {
                 }
                 final int minLength = Math.min(length1, length2);
                 final int minWords = minLength / SIZEOF_LONG;
-                final long offset1Adj = offset1 + (long) BYTE_ARRAY_BASE_OFFSET;
-                final long offset2Adj = offset2 + (long) BYTE_ARRAY_BASE_OFFSET;
+                final long offset1Adj = offset1 + (long)BYTE_ARRAY_BASE_OFFSET;
+                final long offset2Adj = offset2 + (long)BYTE_ARRAY_BASE_OFFSET;
 
                 /*
                  * Compare 8 bytes at a time. Benchmarking shows comparing 8 bytes at a
@@ -1235,8 +1202,7 @@ public class Bytes {
         return compareTo(left, right) == 0;
     }
 
-    public static boolean equals(final byte[] left, int leftOffset, int leftLen, final byte[] right, int rightOffset,
-            int rightLen) {
+    public static boolean equals(final byte[] left, int leftOffset, int leftLen, final byte[] right, int rightOffset, int rightLen) {
         // short circuit case
         if (left == right && leftOffset == rightOffset && leftLen == rightLen) {
             return true;
@@ -1255,8 +1221,7 @@ public class Bytes {
         if (left[leftOffset + leftLen - 1] != right[rightOffset + rightLen - 1])
             return false;
 
-        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(left, leftOffset, leftLen, right, rightOffset,
-                rightLen) == 0;
+        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(left, leftOffset, leftLen, right, rightOffset, rightLen) == 0;
     }
 
     /**
@@ -1287,9 +1252,7 @@ public class Bytes {
      * array on the left.
      */
     public static boolean startsWith(byte[] bytes, byte[] prefix) {
-        return bytes != null && prefix != null && bytes.length >= prefix.length
-                && LexicographicalComparerHolder.BEST_COMPARER.compareTo(bytes, 0, prefix.length, prefix, 0,
-                        prefix.length) == 0;
+        return bytes != null && prefix != null && bytes.length >= prefix.length && LexicographicalComparerHolder.BEST_COMPARER.compareTo(bytes, 0, prefix.length, prefix, 0, prefix.length) == 0;
     }
 
     /**
@@ -1677,8 +1640,7 @@ public class Bytes {
     public static void writeStringFixedSize(final DataOutput out, String s, int size) throws IOException {
         byte[] b = toBytes(s);
         if (b.length > size) {
-            throw new IOException("Trying to write " + b.length + " bytes (" + toStringBinary(b)
-                    + ") into a field of length " + size);
+            throw new IOException("Trying to write " + b.length + " bytes (" + toStringBinary(b) + ") into a field of length " + size);
         }
 
         out.writeBytes(s);
@@ -1980,8 +1942,7 @@ public class Bytes {
         hex = hex.toUpperCase();
         byte[] b = new byte[hex.length() / 2];
         for (int i = 0; i < b.length; i++) {
-            b[i] = (byte) ((toBinaryFromHex((byte) hex.charAt(2 * i)) << 4)
-                    + (toBinaryFromHex((byte) hex.charAt((2 * i + 1))) & 0xff));
+            b[i] = (byte) ((toBinaryFromHex((byte) hex.charAt(2 * i)) << 4) + (toBinaryFromHex((byte) hex.charAt((2 * i + 1))) & 0xff));
         }
         return b;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/BytesSplitter.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/BytesSplitter.java b/core-common/src/main/java/org/apache/kylin/common/util/BytesSplitter.java
index 8502b9c..c644890 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/BytesSplitter.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/BytesSplitter.java
@@ -29,8 +29,7 @@ import org.slf4j.LoggerFactory;
 public class BytesSplitter {
     private static final Logger logger = LoggerFactory.getLogger(BytesSplitter.class);
 
-    private static final int[] COMMON_DELIMS = new int[] { "\177".codePointAt(0), "|".codePointAt(0),
-            "\t".codePointAt(0), ",".codePointAt(0) };
+    private static final int[] COMMON_DELIMS = new int[] { "\177".codePointAt(0), "|".codePointAt(0), "\t".codePointAt(0), ",".codePointAt(0) };
 
     private SplittedBytes[] splitBuffers;
     private int bufferSize;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/ClassUtil.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/ClassUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/ClassUtil.java
index dfca33c..0eb1af5 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/ClassUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/ClassUtil.java
@@ -58,23 +58,15 @@ public class ClassUtil {
     private static final Map<String, String> classRenameMap;
     static {
         classRenameMap = new HashMap<>();
-        classRenameMap.put("org.apache.kylin.job.common.HadoopShellExecutable",
-                "org.apache.kylin.engine.mr.common.HadoopShellExecutable");
-        classRenameMap.put("org.apache.kylin.job.common.MapReduceExecutable",
-                "org.apache.kylin.engine.mr.common.MapReduceExecutable");
+        classRenameMap.put("org.apache.kylin.job.common.HadoopShellExecutable", "org.apache.kylin.engine.mr.common.HadoopShellExecutable");
+        classRenameMap.put("org.apache.kylin.job.common.MapReduceExecutable", "org.apache.kylin.engine.mr.common.MapReduceExecutable");
         classRenameMap.put("org.apache.kylin.job.cube.CubingJob", "org.apache.kylin.engine.mr.CubingJob");
-        classRenameMap.put("org.apache.kylin.job.cube.GarbageCollectionStep",
-                "org.apache.kylin.storage.hbase.steps.DeprecatedGCStep");
-        classRenameMap.put("org.apache.kylin.job.cube.MergeDictionaryStep",
-                "org.apache.kylin.engine.mr.steps.MergeDictionaryStep");
-        classRenameMap.put("org.apache.kylin.job.cube.UpdateCubeInfoAfterBuildStep",
-                "org.apache.kylin.engine.mr.steps.UpdateCubeInfoAfterBuildStep");
-        classRenameMap.put("org.apache.kylin.job.cube.UpdateCubeInfoAfterMergeStep",
-                "org.apache.kylin.engine.mr.steps.UpdateCubeInfoAfterMergeStep");
-        classRenameMap.put("org.apache.kylin.rest.util.KeywordDefaultDirtyHack",
-                "org.apache.kylin.query.util.KeywordDefaultDirtyHack");
-        classRenameMap.put("org.apache.kylin.rest.util.CognosParenthesesEscape",
-                "org.apache.kylin.query.util.CognosParenthesesEscape");
+        classRenameMap.put("org.apache.kylin.job.cube.GarbageCollectionStep", "org.apache.kylin.storage.hbase.steps.DeprecatedGCStep");
+        classRenameMap.put("org.apache.kylin.job.cube.MergeDictionaryStep", "org.apache.kylin.engine.mr.steps.MergeDictionaryStep");
+        classRenameMap.put("org.apache.kylin.job.cube.UpdateCubeInfoAfterBuildStep", "org.apache.kylin.engine.mr.steps.UpdateCubeInfoAfterBuildStep");
+        classRenameMap.put("org.apache.kylin.job.cube.UpdateCubeInfoAfterMergeStep", "org.apache.kylin.engine.mr.steps.UpdateCubeInfoAfterMergeStep");
+        classRenameMap.put("org.apache.kylin.rest.util.KeywordDefaultDirtyHack", "org.apache.kylin.query.util.KeywordDefaultDirtyHack");
+        classRenameMap.put("org.apache.kylin.rest.util.CognosParenthesesEscape", "org.apache.kylin.query.util.CognosParenthesesEscape");
     }
 
     @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/ClasspathScanner.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/ClasspathScanner.java b/core-common/src/main/java/org/apache/kylin/common/util/ClasspathScanner.java
index 2f7b9c5..365caa8 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/ClasspathScanner.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/ClasspathScanner.java
@@ -40,9 +40,9 @@ public class ClasspathScanner {
             }
             System.exit(0);
         }
-
+        
         final int[] hitCount = new int[1];
-
+        
         scanner.scan("", new ResourceVisitor() {
             public void accept(File dir, String relativeFileName) {
                 check(dir.getAbsolutePath(), relativeFileName.replace('\\', '/'));
@@ -64,7 +64,7 @@ public class ClasspathScanner {
                 }
             }
         });
-
+        
         int exitCode = hitCount[0] > 0 ? 0 : 1;
         System.exit(exitCode);
     }
@@ -118,9 +118,9 @@ public class ClasspathScanner {
                 break;
             loader = loader.getParent();
         }
-
+        
         List<File> roots = new ArrayList();
-
+        
         // parent first
         for (int i = loaders.size() - 1; i >= 0; i--) {
             ClassLoader l = loaders.get(i);
@@ -336,4 +336,4 @@ public class ClasspathScanner {
         return j == slen;
     }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/CompressionUtils.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/CompressionUtils.java b/core-common/src/main/java/org/apache/kylin/common/util/CompressionUtils.java
index 32c7104..7bd520c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/CompressionUtils.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/CompressionUtils.java
@@ -46,8 +46,7 @@ public class CompressionUtils {
         outputStream.close();
         byte[] output = outputStream.toByteArray();
 
-        logger.debug("Original: " + data.length + " bytes. " + "Compressed: " + output.length + " byte. Time: "
-                + (System.currentTimeMillis() - startTime));
+        logger.debug("Original: " + data.length + " bytes. " + "Compressed: " + output.length + " byte. Time: " + (System.currentTimeMillis() - startTime));
         return output;
     }
 
@@ -65,8 +64,7 @@ public class CompressionUtils {
         outputStream.close();
         byte[] output = outputStream.toByteArray();
 
-        logger.debug("Original: " + data.length + " bytes. " + "Decompressed: " + output.length + " bytes. Time: "
-                + (System.currentTimeMillis() - startTime));
+        logger.debug("Original: " + data.length + " bytes. " + "Decompressed: " + output.length + " bytes. Time: " + (System.currentTimeMillis() - startTime));
         return output;
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/DBUtils.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/DBUtils.java b/core-common/src/main/java/org/apache/kylin/common/util/DBUtils.java
index 0265da4..9ecd3c3 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/DBUtils.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/DBUtils.java
@@ -28,7 +28,7 @@ import org.slf4j.LoggerFactory;
 public class DBUtils {
 
     private static final Logger logger = LoggerFactory.getLogger(DBUtils.class);
-
+    
     /**
      * Closes an <code>ResultSet</code> unconditionally.
      * <p>

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/DateFormat.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/DateFormat.java b/core-common/src/main/java/org/apache/kylin/common/util/DateFormat.java
index eb4d570..29858f1 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/DateFormat.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/DateFormat.java
@@ -48,7 +48,7 @@ public class DateFormat {
         }
         return r;
     }
-
+    
     public static String formatToCompactDateStr(long millis) {
         return formatToDateStr(millis, COMPACT_DATE_PATTERN);
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/HadoopUtil.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/HadoopUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/HadoopUtil.java
index 70dfa30..f242515 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/HadoopUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/HadoopUtil.java
@@ -68,21 +68,21 @@ public class HadoopUtil {
     public static FileSystem getWorkingFileSystem() throws IOException {
         return getFileSystem(KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory());
     }
-
+    
     public static FileSystem getWorkingFileSystem(Configuration conf) throws IOException {
         Path workingPath = new Path(KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory());
         return getFileSystem(workingPath, conf);
     }
-
+    
     public static FileSystem getFileSystem(String path) throws IOException {
         return getFileSystem(new Path(makeURI(path)));
     }
-
+    
     public static FileSystem getFileSystem(Path path) throws IOException {
         Configuration conf = getCurrentConfiguration();
         return getFileSystem(path, conf);
     }
-
+    
     public static FileSystem getFileSystem(Path path, Configuration conf) {
         try {
             return path.getFileSystem(conf);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/ImplementationSwitch.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/ImplementationSwitch.java b/core-common/src/main/java/org/apache/kylin/common/util/ImplementationSwitch.java
index b349443..f14ac98 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/ImplementationSwitch.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/ImplementationSwitch.java
@@ -58,8 +58,7 @@ public class ImplementationSwitch<I> {
     public synchronized I get(int id) {
         String clzName = impls.get(id);
         if (clzName == null) {
-            throw new IllegalArgumentException(
-                    "Implementation class missing, ID " + id + ", interface " + interfaceClz.getName());
+            throw new IllegalArgumentException("Implementation class missing, ID " + id + ", interface " + interfaceClz.getName());
         }
 
         @SuppressWarnings("unchecked")
@@ -75,8 +74,7 @@ public class ImplementationSwitch<I> {
         }
 
         if (result == null)
-            throw new IllegalArgumentException(
-                    "Implementations missing, ID " + id + ", interface " + interfaceClz.getName());
+            throw new IllegalArgumentException("Implementations missing, ID " + id + ", interface " + interfaceClz.getName());
 
         return result;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/JsonUtil.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/JsonUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/JsonUtil.java
index 23f0fbc..216abae 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/JsonUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/JsonUtil.java
@@ -46,28 +46,23 @@ public class JsonUtil {
         indentMapper.configure(SerializationFeature.INDENT_OUTPUT, true);
     }
 
-    public static <T> T readValue(File src, Class<T> valueType)
-            throws IOException, JsonParseException, JsonMappingException {
+    public static <T> T readValue(File src, Class<T> valueType) throws IOException, JsonParseException, JsonMappingException {
         return mapper.readValue(src, valueType);
     }
 
-    public static <T> T readValue(String content, Class<T> valueType)
-            throws IOException, JsonParseException, JsonMappingException {
+    public static <T> T readValue(String content, Class<T> valueType) throws IOException, JsonParseException, JsonMappingException {
         return mapper.readValue(content, valueType);
     }
 
-    public static <T> T readValue(Reader src, Class<T> valueType)
-            throws IOException, JsonParseException, JsonMappingException {
+    public static <T> T readValue(Reader src, Class<T> valueType) throws IOException, JsonParseException, JsonMappingException {
         return mapper.readValue(src, valueType);
     }
 
-    public static <T> T readValue(InputStream src, Class<T> valueType)
-            throws IOException, JsonParseException, JsonMappingException {
+    public static <T> T readValue(InputStream src, Class<T> valueType) throws IOException, JsonParseException, JsonMappingException {
         return mapper.readValue(src, valueType);
     }
 
-    public static <T> T readValue(byte[] src, Class<T> valueType)
-            throws IOException, JsonParseException, JsonMappingException {
+    public static <T> T readValue(byte[] src, Class<T> valueType) throws IOException, JsonParseException, JsonMappingException {
         return mapper.readValue(src, valueType);
     }
 
@@ -77,13 +72,11 @@ public class JsonUtil {
         return mapper.readValue(content, typeRef);
     }
 
-    public static void writeValueIndent(OutputStream out, Object value)
-            throws IOException, JsonGenerationException, JsonMappingException {
+    public static void writeValueIndent(OutputStream out, Object value) throws IOException, JsonGenerationException, JsonMappingException {
         indentMapper.writeValue(out, value);
     }
 
-    public static void writeValue(OutputStream out, Object value)
-            throws IOException, JsonGenerationException, JsonMappingException {
+    public static void writeValue(OutputStream out, Object value) throws IOException, JsonGenerationException, JsonMappingException {
         mapper.writeValue(out, value);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/MailService.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/MailService.java b/core-common/src/main/java/org/apache/kylin/common/util/MailService.java
index cee33aa..25bc03c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/MailService.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/MailService.java
@@ -42,12 +42,10 @@ public class MailService {
     private String sender;
 
     public MailService(KylinConfig config) {
-        this(config.isMailEnabled(), config.isStarttlsEnabled(), config.getMailHost(), config.getSmtpPort(),
-                config.getMailUsername(), config.getMailPassword(), config.getMailSender());
+        this(config.isMailEnabled(), config.isStarttlsEnabled(), config.getMailHost(), config.getSmtpPort(), config.getMailUsername(), config.getMailPassword(), config.getMailSender());
     }
 
-    private MailService(boolean enabled, boolean starttlsEnabled, String host, String port, String username,
-            String password, String sender) {
+    private MailService(boolean enabled, boolean starttlsEnabled, String host, String port, String username, String password, String sender) {
         this.enabled = enabled;
         this.starttlsEnabled = starttlsEnabled;
         this.host = host;
@@ -97,7 +95,7 @@ public class MailService {
         } else {
             email.setSmtpPort(Integer.valueOf(port));
         }
-
+        
         if (username != null && username.trim().length() > 0) {
             email.setAuthentication(username, password);
         }


[57/67] [abbrv] kylin git commit: KYLIN-2603 allow smooth upgrade of hbase coprocessor

Posted by li...@apache.org.
KYLIN-2603 allow smooth upgrade of hbase coprocessor


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0c85fb72
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0c85fb72
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0c85fb72

Branch: refs/heads/master
Commit: 0c85fb721739490b0ec8f011598b9f9fd5e95bef
Parents: 38308bc
Author: Li Yang <li...@apache.org>
Authored: Thu Jun 1 10:29:21 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Thu Jun 1 14:34:07 2017 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/common/KylinConfigBase.java |  4 ++++
 .../apache/kylin/gridtable/GTScanRequest.java    | 19 +++++++++++++++++--
 2 files changed, 21 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/0c85fb72/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 77c2987..f465949 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -372,6 +372,10 @@ abstract public class KylinConfigBase implements Serializable {
     public boolean allowCubeAppearInMultipleProjects() {
         return Boolean.parseBoolean(getOptional("kylin.cube.allow-appear-in-multiple-projects", "false"));
     }
+    
+    public int getGTScanRequestSerializationLevel() {
+        return Integer.parseInt(getOptional("kylin.cube.gtscanrequest-serialization-level", "1"));
+    }
 
     // ============================================================================
     // JOB

http://git-wip-us.apache.org/repos/asf/kylin/blob/0c85fb72/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
index ffaa8bd..e65d2b8 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
@@ -26,6 +26,7 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.commons.io.IOUtils;
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.BytesSerializer;
 import org.apache.kylin.common.util.BytesUtil;
@@ -347,9 +348,14 @@ public class GTScanRequest {
         return Arrays.copyOf(byteBuffer.array(), byteBuffer.position());
     }
 
+    private static final int SERIAL_0_BASE = 0;
+    private static final int SERIAL_1_HAVING_FILTER = 1;
+    
     public static final BytesSerializer<GTScanRequest> serializer = new BytesSerializer<GTScanRequest>() {
         @Override
         public void serialize(GTScanRequest value, ByteBuffer out) {
+            final int serialLevel = KylinConfig.getInstanceFromEnv().getGTScanRequestSerializationLevel();
+            
             GTInfo.serializer.serialize(value.info, out);
 
             BytesUtil.writeVInt(value.ranges.size(), out);
@@ -364,7 +370,10 @@ public class GTScanRequest {
 
             ImmutableBitSet.serializer.serialize(value.columns, out);
             BytesUtil.writeByteArray(GTUtil.serializeGTFilter(value.filterPushDown, value.info), out);
-            BytesUtil.writeByteArray(TupleFilterSerializer.serialize(value.havingFilterPushDown, StringCodeSystem.INSTANCE), out);
+            
+            if (serialLevel >= SERIAL_1_HAVING_FILTER) {
+                BytesUtil.writeByteArray(TupleFilterSerializer.serialize(value.havingFilterPushDown, StringCodeSystem.INSTANCE), out);
+            }
 
             ImmutableBitSet.serializer.serialize(value.aggrGroupBy, out);
             ImmutableBitSet.serializer.serialize(value.aggrMetrics, out);
@@ -380,6 +389,8 @@ public class GTScanRequest {
 
         @Override
         public GTScanRequest deserialize(ByteBuffer in) {
+            final int serialLevel = KylinConfig.getInstanceFromEnv().getGTScanRequestSerializationLevel();
+            
             GTInfo sInfo = GTInfo.serializer.deserialize(in);
 
             List<GTScanRange> sRanges = Lists.newArrayList();
@@ -398,7 +409,11 @@ public class GTScanRequest {
 
             ImmutableBitSet sColumns = ImmutableBitSet.serializer.deserialize(in);
             TupleFilter sGTFilter = GTUtil.deserializeGTFilter(BytesUtil.readByteArray(in), sInfo);
-            TupleFilter sGTHavingFilter = TupleFilterSerializer.deserialize(BytesUtil.readByteArray(in), StringCodeSystem.INSTANCE);
+            
+            TupleFilter sGTHavingFilter = null;
+            if (serialLevel >= SERIAL_1_HAVING_FILTER) {
+                sGTHavingFilter = TupleFilterSerializer.deserialize(BytesUtil.readByteArray(in), StringCodeSystem.INSTANCE);
+            }
 
             ImmutableBitSet sAggGroupBy = ImmutableBitSet.serializer.deserialize(in);
             ImmutableBitSet sAggrMetrics = ImmutableBitSet.serializer.deserialize(in);


[15/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/DictGridTableTest.java
----------------------------------------------------------------------
diff --git a/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/DictGridTableTest.java b/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/DictGridTableTest.java
index 413fd8d..672f3e0 100644
--- a/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/DictGridTableTest.java
+++ b/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/DictGridTableTest.java
@@ -123,8 +123,7 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
 
         {
             LogicalTupleFilter filter = and(timeComp0, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd),
-                    info.colRef(0), filter);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(1, r.size());//scan range are [close,close]
             assertEquals("[null, 10]-[1421193600000, 10]", r.get(0).toString());
@@ -133,40 +132,33 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
         }
         {
             LogicalTupleFilter filter = and(timeComp2, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd),
-                    info.colRef(0), filter);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(0, r.size());
         }
         {
             LogicalTupleFilter filter = and(timeComp4, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd),
-                    info.colRef(0), filter);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(0, r.size());
         }
         {
             LogicalTupleFilter filter = and(timeComp5, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd),
-                    info.colRef(0), filter);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(0, r.size());
         }
         {
-            LogicalTupleFilter filter = or(and(timeComp2, ageComp1), and(timeComp1, ageComp1),
-                    and(timeComp6, ageComp1));
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd),
-                    info.colRef(0), filter);
+            LogicalTupleFilter filter = or(and(timeComp2, ageComp1), and(timeComp1, ageComp1), and(timeComp6, ageComp1));
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(1, r.size());
             assertEquals("[1421193600000, 10]-[null, 10]", r.get(0).toString());
-            assertEquals("[[null, 10, null, null, null], [1421193600000, 10, null, null, null]]",
-                    r.get(0).fuzzyKeys.toString());
+            assertEquals("[[null, 10, null, null, null], [1421193600000, 10, null, null, null]]", r.get(0).fuzzyKeys.toString());
         }
         {
             LogicalTupleFilter filter = or(timeComp2, timeComp1, timeComp6);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd),
-                    info.colRef(0), filter);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(1, r.size());
             assertEquals("[1421193600000, null]-[null, null]", r.get(0).toString());
@@ -175,16 +167,14 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
         {
             //skip FALSE filter
             LogicalTupleFilter filter = and(ageComp1, ConstantTupleFilter.FALSE);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd),
-                    info.colRef(0), filter);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(0, r.size());
         }
         {
             //TRUE or FALSE filter
             LogicalTupleFilter filter = or(ConstantTupleFilter.TRUE, ConstantTupleFilter.FALSE);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd),
-                    info.colRef(0), filter);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(1, r.size());
             assertEquals("[null, null]-[null, null]", r.get(0).toString());
@@ -192,8 +182,7 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
         {
             //TRUE or other filter
             LogicalTupleFilter filter = or(ageComp1, ConstantTupleFilter.TRUE);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd),
-                    info.colRef(0), filter);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(1, r.size());
             assertEquals("[null, null]-[null, null]", r.get(0).toString());
@@ -206,8 +195,7 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
 
         {
             LogicalTupleFilter filter = and(timeComp0, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(new ByteArray(), segmentEnd),
-                    info.colRef(0), filter);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(new ByteArray(), segmentEnd), info.colRef(0), filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(1, r.size());//scan range are [close,close]
             assertEquals("[null, 10]-[1421193600000, 10]", r.get(0).toString());
@@ -217,8 +205,7 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
 
         {
             LogicalTupleFilter filter = and(timeComp5, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(new ByteArray(), segmentEnd),
-                    info.colRef(0), filter);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(new ByteArray(), segmentEnd), info.colRef(0), filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(0, r.size());//scan range are [close,close]
         }
@@ -263,8 +250,7 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
 
         // merge too many ranges
         {
-            LogicalTupleFilter filter = or(and(timeComp4, ageComp1), and(timeComp4, ageComp2),
-                    and(timeComp4, ageComp3));
+            LogicalTupleFilter filter = or(and(timeComp4, ageComp1), and(timeComp4, ageComp2), and(timeComp4, ageComp3));
             CubeScanRangePlanner planner = new CubeScanRangePlanner(info, null, null, filter);
             List<GTScanRange> r = planner.planScanRanges();
             assertEquals(3, r.size());
@@ -279,10 +265,7 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
 
     @Test
     public void verifyFirstRow() throws IOException {
-        doScanAndVerify(table,
-                new GTScanRequestBuilder().setInfo(table.getInfo()).setRanges(null).setDimensions(null)
-                        .setFilterPushDown(null).createGTScanRequest(),
-                "[1421193600000, 30, Yang, 10, 10.5]", //
+        doScanAndVerify(table, new GTScanRequestBuilder().setInfo(table.getInfo()).setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest(), "[1421193600000, 30, Yang, 10, 10.5]", //
                 "[1421193600000, 30, Luke, 10, 10.5]", //
                 "[1421280000000, 20, Dong, 10, 10.5]", //
                 "[1421280000000, 20, Jason, 10, 10.5]", //
@@ -315,18 +298,12 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
         LogicalTupleFilter fNotPlusUnevaluatable = not(unevaluatable(info.colRef(1)));
         LogicalTupleFilter filter = and(fComp, fUnevaluatable, fNotPlusUnevaluatable);
 
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null)
-                .setAggrGroupBy(setOf(0)).setAggrMetrics(setOf(3)).setAggrMetricsFuncs(new String[] { "sum" })
-                .setFilterPushDown(filter).createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setAggrGroupBy(setOf(0)).setAggrMetrics(setOf(3)).setAggrMetricsFuncs(new String[] { "sum" }).setFilterPushDown(filter).createGTScanRequest();
 
         // note the unEvaluatable column 1 in filter is added to group by
-        assertEquals(
-                "GTScanRequest [range=[[null, null]-[null, null]], columns={0, 1, 3}, filterPushDown=AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], [null], [null]], aggrGroupBy={0, 1}, aggrMetrics={3}, aggrMetricsFuncs=[sum]]",
-                req.toString());
+        assertEquals("GTScanRequest [range=[[null, null]-[null, null]], columns={0, 1, 3}, filterPushDown=AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], [null], [null]], aggrGroupBy={0, 1}, aggrMetrics={3}, aggrMetricsFuncs=[sum]]", req.toString());
 
-        doScanAndVerify(table, useDeserializedGTScanRequest(req), "[1421280000000, 20, null, 20, null]",
-                "[1421280000000, 30, null, 10, null]", "[1421366400000, 20, null, 20, null]",
-                "[1421366400000, 30, null, 20, null]", "[1421452800000, 10, null, 10, null]");
+        doScanAndVerify(table, useDeserializedGTScanRequest(req), "[1421280000000, 20, null, 20, null]", "[1421280000000, 30, null, 10, null]", "[1421366400000, 20, null, 20, null]", "[1421366400000, 30, null, 20, null]", "[1421452800000, 10, null, 10, null]");
     }
 
     @Test
@@ -337,34 +314,26 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
         CompareTupleFilter fComp2 = compare(info.colRef(1), FilterOperatorEnum.GT, enc(info, 1, "10"));
         LogicalTupleFilter filter = and(fComp1, fComp2);
 
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null)
-                .setAggrGroupBy(setOf(0)).setAggrMetrics(setOf(3)).setAggrMetricsFuncs(new String[] { "sum" })
-                .setFilterPushDown(filter).createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setAggrGroupBy(setOf(0)).setAggrMetrics(setOf(3)).setAggrMetricsFuncs(new String[] { "sum" }).setFilterPushDown(filter).createGTScanRequest();
         // note the evaluatable column 1 in filter is added to returned columns but not in group by
-        assertEquals(
-                "GTScanRequest [range=[[null, null]-[null, null]], columns={0, 1, 3}, filterPushDown=AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.1 GT [\\x00]], aggrGroupBy={0}, aggrMetrics={3}, aggrMetricsFuncs=[sum]]",
-                req.toString());
+        assertEquals("GTScanRequest [range=[[null, null]-[null, null]], columns={0, 1, 3}, filterPushDown=AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.1 GT [\\x00]], aggrGroupBy={0}, aggrMetrics={3}, aggrMetricsFuncs=[sum]]", req.toString());
 
-        doScanAndVerify(table, useDeserializedGTScanRequest(req), "[1421280000000, 20, null, 30, null]",
-                "[1421366400000, 20, null, 40, null]");
+        doScanAndVerify(table, useDeserializedGTScanRequest(req), "[1421280000000, 20, null, 30, null]", "[1421366400000, 20, null, 40, null]");
     }
 
     @Test
     public void verifyAggregateAndHavingFilter() throws IOException {
         GTInfo info = table.getInfo();
-
+        
         TblColRef havingCol = TblColRef.newInnerColumn("SUM_OF_BIGDECIMAL", InnerDataTypeEnum.LITERAL);
         havingCol.getColumnDesc().setId("1"); // point to the first aggregated measure
         CompareTupleFilter havingFilter = compare(havingCol, FilterOperatorEnum.GT, "20");
-
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null)
-                .setAggrGroupBy(setOf(1)).setAggrMetrics(setOf(4)).setAggrMetricsFuncs(new String[] { "sum" })
-                .setHavingFilterPushDown(havingFilter).createGTScanRequest();
-
-        doScanAndVerify(table, useDeserializedGTScanRequest(req), "[null, 20, null, null, 42.0]",
-                "[null, 30, null, null, 52.5]");
+        
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setAggrGroupBy(setOf(1)).setAggrMetrics(setOf(4)).setAggrMetricsFuncs(new String[] { "sum" }).setHavingFilterPushDown(havingFilter).createGTScanRequest();
+        
+        doScanAndVerify(table, useDeserializedGTScanRequest(req), "[null, 20, null, null, 42.0]", "[null, 30, null, null, 52.5]");
     }
-
+    
     @Test
     public void testFilterScannerPerf() throws IOException {
         GridTable table = newTestPerfTable();
@@ -385,11 +354,9 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
     }
 
     @SuppressWarnings("unused")
-    private void testFilterScannerPerfInner(GridTable table, GTInfo info, LogicalTupleFilter filter)
-            throws IOException {
+    private void testFilterScannerPerfInner(GridTable table, GTInfo info, LogicalTupleFilter filter) throws IOException {
         long start = System.currentTimeMillis();
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null)
-                .setFilterPushDown(filter).createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setFilterPushDown(filter).createGTScanRequest();
         IGTScanner scanner = table.scan(req);
         int i = 0;
         for (GTRecord r : scanner) {
@@ -397,8 +364,7 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
         }
         scanner.close();
         long end = System.currentTimeMillis();
-        System.out.println(
-                (end - start) + "ms with filter cache enabled=" + FilterResultCache.ENABLED + ", " + i + " rows");
+        System.out.println((end - start) + "ms with filter cache enabled=" + FilterResultCache.ENABLED + ", " + i + " rows");
     }
 
     @Test
@@ -418,9 +384,7 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
         colMapping.add(extColB);
 
         TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
-        assertEquals(
-                "AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.1 EQ [\\x00]]",
-                newFilter.toString());
+        assertEquals("AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.1 EQ [\\x00]]", newFilter.toString());
     }
 
     @Test
@@ -441,9 +405,7 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
 
         // $1<"9" round up to $1<"10"
         TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
-        assertEquals(
-                "AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.1 LT [\\x00]]",
-                newFilter.toString());
+        assertEquals("AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.1 LT [\\x00]]", newFilter.toString());
     }
 
     @Test
@@ -464,8 +426,7 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
 
         // $1<="9" round down to FALSE
         TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
-        assertEquals("AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], []]",
-                newFilter.toString());
+        assertEquals("AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], []]", newFilter.toString());
     }
 
     @Test
@@ -486,9 +447,7 @@ public class DictGridTableTest extends LocalFileMetadataTestCase {
 
         // $1 in ("9", "10", "15") has only "10" left
         TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
-        assertEquals(
-                "AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.1 IN [\\x00]]",
-                newFilter.toString());
+        assertEquals("AND [UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], UNKNOWN_MODEL:NULL.GT_MOCKUP_TABLE.1 IN [\\x00]]", newFilter.toString());
     }
 
     private void doScanAndVerify(GridTable table, GTScanRequest req, String... verifyRows) throws IOException {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/SortedIteratorMergerWithLimitTest.java
----------------------------------------------------------------------
diff --git a/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/SortedIteratorMergerWithLimitTest.java b/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/SortedIteratorMergerWithLimitTest.java
index 022a800..1627b4f 100644
--- a/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/SortedIteratorMergerWithLimitTest.java
+++ b/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/SortedIteratorMergerWithLimitTest.java
@@ -71,26 +71,20 @@ public class SortedIteratorMergerWithLimitTest {
     @Test
     public void basic1() {
 
-        List<CloneableInteger> a = Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2),
-                new CloneableInteger(3));
-        List<CloneableInteger> b = Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2),
-                new CloneableInteger(3));
-        List<CloneableInteger> c = Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2),
-                new CloneableInteger(5));
+        List<CloneableInteger> a = Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2), new CloneableInteger(3));
+        List<CloneableInteger> b = Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2), new CloneableInteger(3));
+        List<CloneableInteger> c = Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2), new CloneableInteger(5));
         List<Iterator<CloneableInteger>> input = Lists.newArrayList();
         input.add(a.iterator());
         input.add(b.iterator());
         input.add(c.iterator());
-        SortedIteratorMergerWithLimit<CloneableInteger> merger = new SortedIteratorMergerWithLimit<CloneableInteger>(
-                input.iterator(), 3, getComp());
+        SortedIteratorMergerWithLimit<CloneableInteger> merger = new SortedIteratorMergerWithLimit<CloneableInteger>(input.iterator(), 3, getComp());
         Iterator<CloneableInteger> iterator = merger.getIterator();
         List<CloneableInteger> result = Lists.newArrayList();
         while (iterator.hasNext()) {
             result.add(iterator.next());
         }
-        Assert.assertEquals(Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(1),
-                new CloneableInteger(1), new CloneableInteger(2), new CloneableInteger(2), new CloneableInteger(2),
-                new CloneableInteger(3), new CloneableInteger(3)), result);
+        Assert.assertEquals(Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(1), new CloneableInteger(1), new CloneableInteger(2), new CloneableInteger(2), new CloneableInteger(2), new CloneableInteger(3), new CloneableInteger(3)), result);
     }
 
     @Test
@@ -98,21 +92,18 @@ public class SortedIteratorMergerWithLimitTest {
 
         List<CloneableInteger> a = Lists.newArrayList(new CloneableInteger(2));
         List<CloneableInteger> b = Lists.newArrayList();
-        List<CloneableInteger> c = Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2),
-                new CloneableInteger(5));
+        List<CloneableInteger> c = Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2), new CloneableInteger(5));
         List<Iterator<CloneableInteger>> input = Lists.newArrayList();
         input.add(a.iterator());
         input.add(b.iterator());
         input.add(c.iterator());
-        SortedIteratorMergerWithLimit<CloneableInteger> merger = new SortedIteratorMergerWithLimit<CloneableInteger>(
-                input.iterator(), 3, getComp());
+        SortedIteratorMergerWithLimit<CloneableInteger> merger = new SortedIteratorMergerWithLimit<CloneableInteger>(input.iterator(), 3, getComp());
         Iterator<CloneableInteger> iterator = merger.getIterator();
         List<CloneableInteger> result = Lists.newArrayList();
         while (iterator.hasNext()) {
             result.add(iterator.next());
         }
-        Assert.assertEquals(Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2),
-                new CloneableInteger(2), new CloneableInteger(5)), result);
+        Assert.assertEquals(Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2), new CloneableInteger(2), new CloneableInteger(5)), result);
     }
 
     @Test(expected = IllegalStateException.class)
@@ -120,14 +111,12 @@ public class SortedIteratorMergerWithLimitTest {
 
         List<CloneableInteger> a = Lists.newArrayList(new CloneableInteger(2), new CloneableInteger(1));
         List<CloneableInteger> b = Lists.newArrayList();
-        List<CloneableInteger> c = Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2),
-                new CloneableInteger(5));
+        List<CloneableInteger> c = Lists.newArrayList(new CloneableInteger(1), new CloneableInteger(2), new CloneableInteger(5));
         List<Iterator<CloneableInteger>> input = Lists.newArrayList();
         input.add(a.iterator());
         input.add(b.iterator());
         input.add(c.iterator());
-        SortedIteratorMergerWithLimit<CloneableInteger> merger = new SortedIteratorMergerWithLimit<CloneableInteger>(
-                input.iterator(), 3, getComp());
+        SortedIteratorMergerWithLimit<CloneableInteger> merger = new SortedIteratorMergerWithLimit<CloneableInteger>(input.iterator(), 3, getComp());
         Iterator<CloneableInteger> iterator = merger.getIterator();
         List<CloneableInteger> result = Lists.newArrayList();
         while (iterator.hasNext()) {
@@ -135,4 +124,4 @@ public class SortedIteratorMergerWithLimitTest {
         }
     }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder.java
index 9b44277..1ec23b6 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder.java
@@ -42,8 +42,7 @@ public class BatchCubingJobBuilder extends JobBuilderSupport {
     public BatchCubingJobBuilder(CubeSegment newSegment, String submitter) {
         super(newSegment, submitter);
 
-        Preconditions.checkArgument(!newSegment.isEnableSharding(),
-                "V1 job engine does not support building sharded cubes");
+        Preconditions.checkArgument(!newSegment.isEnableSharding(), "V1 job engine does not support building sharded cubes");
 
         this.inputSide = MRUtil.getBatchCubingInputSide(seg);
         this.outputSide = MRUtil.getBatchCubingOutputSide((CubeSegment) seg);
@@ -70,8 +69,7 @@ public class BatchCubingJobBuilder extends JobBuilderSupport {
         result.addTask(createBaseCuboidStep(getCuboidOutputPathsByLevel(cuboidRootPath, 0), jobId));
         // n dim cuboid steps
         for (int i = 1; i <= groupRowkeyColumnsCount; i++) {
-            result.addTask(createNDimensionCuboidStep(getCuboidOutputPathsByLevel(cuboidRootPath, i - 1),
-                    getCuboidOutputPathsByLevel(cuboidRootPath, i), i));
+            result.addTask(createNDimensionCuboidStep(getCuboidOutputPathsByLevel(cuboidRootPath, i - 1), getCuboidOutputPathsByLevel(cuboidRootPath, i), i));
         }
         outputSide.addStepPhase3_BuildCube(result, cuboidRootPath);
 
@@ -96,8 +94,7 @@ public class BatchCubingJobBuilder extends JobBuilderSupport {
         appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
         appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, "FLAT_TABLE"); // marks flat table input
         appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, cuboidOutputPath);
-        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
-                "Kylin_Base_Cuboid_Builder_" + seg.getRealization().getName());
+        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Base_Cuboid_Builder_" + seg.getRealization().getName());
         appendExecCmdParameters(cmd, BatchConstants.ARG_LEVEL, "0");
         appendExecCmdParameters(cmd, BatchConstants.ARG_CUBING_JOB_ID, jobId);
 
@@ -118,8 +115,7 @@ public class BatchCubingJobBuilder extends JobBuilderSupport {
         appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
         appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, parentPath);
         appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
-        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
-                "Kylin_ND-Cuboid_Builder_" + seg.getRealization().getName() + "_Step");
+        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_ND-Cuboid_Builder_" + seg.getRealization().getName() + "_Step");
         appendExecCmdParameters(cmd, BatchConstants.ARG_LEVEL, "" + level);
 
         ndCuboidStep.setMapReduceParams(cmd.toString());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder2.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder2.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder2.java
index c177f1f..106077c 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder2.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchCubingJobBuilder2.java
@@ -81,8 +81,7 @@ public class BatchCubingJobBuilder2 extends JobBuilderSupport {
         result.addTask(createBaseCuboidStep(getCuboidOutputPathsByLevel(cuboidRootPath, 0), jobId));
         // n dim cuboid steps
         for (int i = 1; i <= maxLevel; i++) {
-            result.addTask(createNDimensionCuboidStep(getCuboidOutputPathsByLevel(cuboidRootPath, i - 1),
-                    getCuboidOutputPathsByLevel(cuboidRootPath, i), i, jobId));
+            result.addTask(createNDimensionCuboidStep(getCuboidOutputPathsByLevel(cuboidRootPath, i-1), getCuboidOutputPathsByLevel(cuboidRootPath, i), i, jobId));
         }
     }
 
@@ -108,8 +107,7 @@ public class BatchCubingJobBuilder2 extends JobBuilderSupport {
         appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
         appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
         appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, cuboidRootPath);
-        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
-                "Kylin_Cube_Builder_" + seg.getRealization().getName());
+        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Cube_Builder_" + seg.getRealization().getName());
         appendExecCmdParameters(cmd, BatchConstants.ARG_CUBING_JOB_ID, jobId);
 
         cubeStep.setMapReduceParams(cmd.toString());
@@ -134,14 +132,13 @@ public class BatchCubingJobBuilder2 extends JobBuilderSupport {
         appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
         appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, "FLAT_TABLE"); // marks flat table input
         appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, cuboidOutputPath);
-        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
-                "Kylin_Base_Cuboid_Builder_" + seg.getRealization().getName());
+        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Base_Cuboid_Builder_" + seg.getRealization().getName());
         appendExecCmdParameters(cmd, BatchConstants.ARG_LEVEL, "0");
         appendExecCmdParameters(cmd, BatchConstants.ARG_CUBING_JOB_ID, jobId);
 
         baseCuboidStep.setMapReduceParams(cmd.toString());
         baseCuboidStep.setMapReduceJobClass(getBaseCuboidJob());
-        //        baseCuboidStep.setCounterSaveAs(CubingJob.SOURCE_RECORD_COUNT + "," + CubingJob.SOURCE_SIZE_BYTES);
+//        baseCuboidStep.setCounterSaveAs(CubingJob.SOURCE_RECORD_COUNT + "," + CubingJob.SOURCE_SIZE_BYTES);
         return baseCuboidStep;
     }
 
@@ -149,8 +146,7 @@ public class BatchCubingJobBuilder2 extends JobBuilderSupport {
         return BaseCuboidJob.class;
     }
 
-    private MapReduceExecutable createNDimensionCuboidStep(String parentPath, String outputPath, int level,
-            String jobId) {
+    private MapReduceExecutable createNDimensionCuboidStep(String parentPath, String outputPath, int level, String jobId) {
         // ND cuboid job
         MapReduceExecutable ndCuboidStep = new MapReduceExecutable();
 
@@ -162,8 +158,7 @@ public class BatchCubingJobBuilder2 extends JobBuilderSupport {
         appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
         appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, parentPath);
         appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
-        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
-                "Kylin_ND-Cuboid_Builder_" + seg.getRealization().getName() + "_Step");
+        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_ND-Cuboid_Builder_" + seg.getRealization().getName() + "_Step");
         appendExecCmdParameters(cmd, BatchConstants.ARG_LEVEL, "" + level);
         appendExecCmdParameters(cmd, BatchConstants.ARG_CUBING_JOB_ID, jobId);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder.java
index 5dfa834..0b4ae40 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder.java
@@ -41,8 +41,7 @@ public class BatchMergeJobBuilder extends JobBuilderSupport {
     public BatchMergeJobBuilder(CubeSegment mergeSegment, String submitter) {
         super(mergeSegment, submitter);
 
-        Preconditions.checkArgument(!mergeSegment.isEnableSharding(),
-                "V1 job engine does not support merging sharded cubes");
+        Preconditions.checkArgument(!mergeSegment.isEnableSharding(), "V1 job engine does not support merging sharded cubes");
 
         this.outputSide = MRUtil.getBatchMergeOutputSide(mergeSegment);
     }
@@ -57,8 +56,7 @@ public class BatchMergeJobBuilder extends JobBuilderSupport {
         final String cuboidRootPath = getCuboidRootPath(jobId);
 
         final List<CubeSegment> mergingSegments = cubeSegment.getCubeInstance().getMergingSegments(cubeSegment);
-        Preconditions.checkState(mergingSegments.size() > 1,
-                "there should be more than 2 segments to merge, target segment " + cubeSegment);
+        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge, target segment " + cubeSegment);
         final List<String> mergingSegmentIds = Lists.newArrayList();
         final List<String> mergingCuboidPaths = Lists.newArrayList();
         for (CubeSegment merging : mergingSegments) {
@@ -91,8 +89,7 @@ public class BatchMergeJobBuilder extends JobBuilderSupport {
         appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
         appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, inputPath);
         appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
-        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
-                "Kylin_Merge_Cuboid_" + seg.getCubeInstance().getName() + "_Step");
+        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Merge_Cuboid_" + seg.getCubeInstance().getName() + "_Step");
 
         mergeCuboidDataStep.setMapReduceParams(cmd.toString());
         mergeCuboidDataStep.setMapReduceJobClass(MergeCuboidJob.class);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
index 17a75d2..badf628 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
@@ -50,8 +50,7 @@ public class BatchMergeJobBuilder2 extends JobBuilderSupport {
         final String jobId = result.getId();
 
         final List<CubeSegment> mergingSegments = cubeSegment.getCubeInstance().getMergingSegments(cubeSegment);
-        Preconditions.checkState(mergingSegments.size() > 1,
-                "there should be more than 2 segments to merge, target segment " + cubeSegment);
+        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge, target segment " + cubeSegment);
         final List<String> mergingSegmentIds = Lists.newArrayList();
         for (CubeSegment merging : mergingSegments) {
             mergingSegmentIds.add(merging.getUuid());
@@ -73,8 +72,7 @@ public class BatchMergeJobBuilder2 extends JobBuilderSupport {
         return result;
     }
 
-    private MergeStatisticsStep createMergeStatisticsStep(CubeSegment seg, List<String> mergingSegmentIds,
-            String mergedStatisticsFolder) {
+    private MergeStatisticsStep createMergeStatisticsStep(CubeSegment seg, List<String> mergingSegmentIds, String mergedStatisticsFolder) {
         MergeStatisticsStep result = new MergeStatisticsStep();
         result.setName(ExecutableConstants.STEP_NAME_MERGE_STATISTICS);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/ByteArrayWritable.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/ByteArrayWritable.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/ByteArrayWritable.java
index b385140..a504899 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/ByteArrayWritable.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/ByteArrayWritable.java
@@ -113,8 +113,7 @@ public class ByteArrayWritable implements WritableComparable<ByteArrayWritable>
      *         negative if left is smaller than right.
      */
     public int compareTo(ByteArrayWritable that) {
-        return WritableComparator.compareBytes(this.data, this.offset, this.length, that.data, that.offset,
-                that.length);
+        return WritableComparator.compareBytes(this.data, this.offset, this.length, that.data, that.offset, that.length);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
index 2087205..5aa7d72 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
@@ -80,13 +80,11 @@ public class CubingJob extends DefaultChainedExecutable {
     private static CubingJob initCubingJob(CubeSegment seg, String jobType, String submitter, JobEngineConfig config) {
         KylinConfig kylinConfig = config.getConfig();
         CubeInstance cube = seg.getCubeInstance();
-        List<ProjectInstance> projList = ProjectManager.getInstance(kylinConfig).findProjects(cube.getType(),
-                cube.getName());
+        List<ProjectInstance> projList = ProjectManager.getInstance(kylinConfig).findProjects(cube.getType(), cube.getName());
         if (projList == null || projList.size() == 0) {
             throw new RuntimeException("Cannot find the project containing the cube " + cube.getName() + "!!!");
         } else if (projList.size() >= 2) {
-            String msg = "Find more than one project containing the cube " + cube.getName()
-                    + ". It does't meet the uniqueness requirement!!! ";
+            String msg = "Find more than one project containing the cube " + cube.getName() + ". It does't meet the uniqueness requirement!!! ";
             if (!config.getConfig().allowCubeAppearInMultipleProjects()) {
                 throw new RuntimeException(msg);
             } else {
@@ -101,8 +99,7 @@ public class CubingJob extends DefaultChainedExecutable {
         result.setProjectName(projList.get(0).getName());
         CubingExecutableUtil.setCubeName(seg.getCubeInstance().getName(), result.getParams());
         CubingExecutableUtil.setSegmentId(seg.getUuid(), result.getParams());
-        result.setName(seg.getCubeInstance().getName() + " - " + seg.getName() + " - " + jobType + " - "
-                + format.format(new Date(System.currentTimeMillis())));
+        result.setName(seg.getCubeInstance().getName() + " - " + seg.getName() + " - " + jobType + " - " + format.format(new Date(System.currentTimeMillis())));
         result.setSubmitter(submitter);
         result.setNotifyList(seg.getCubeInstance().getDescriptor().getNotifyList());
         return result;
@@ -130,13 +127,11 @@ public class CubingJob extends DefaultChainedExecutable {
 
     @Override
     protected Pair<String, String> formatNotifications(ExecutableContext context, ExecutableState state) {
-        CubeInstance cubeInstance = CubeManager.getInstance(context.getConfig())
-                .getCube(CubingExecutableUtil.getCubeName(this.getParams()));
+        CubeInstance cubeInstance = CubeManager.getInstance(context.getConfig()).getCube(CubingExecutableUtil.getCubeName(this.getParams()));
         final Output output = getManager().getOutput(getId());
         String logMsg;
         state = output.getState();
-        if (state != ExecutableState.ERROR
-                && !cubeInstance.getDescriptor().getStatusNeedNotify().contains(state.toString())) {
+        if (state != ExecutableState.ERROR && !cubeInstance.getDescriptor().getStatusNeedNotify().contains(state.toString())) {
             logger.info("state:" + state + " no need to notify users");
             return null;
         }
@@ -165,8 +160,7 @@ public class CubingJob extends DefaultChainedExecutable {
         content = content.replaceAll("\\$\\{mr_waiting\\}", getMapReduceWaitTime() / 60000 + "mins");
         content = content.replaceAll("\\$\\{last_update_time\\}", new Date(getLastModified()).toString());
         content = content.replaceAll("\\$\\{submitter\\}", StringUtil.noBlank(getSubmitter(), "missing submitter"));
-        content = content.replaceAll("\\$\\{error_log\\}",
-                Matcher.quoteReplacement(StringUtil.noBlank(logMsg, "no error message")));
+        content = content.replaceAll("\\$\\{error_log\\}", Matcher.quoteReplacement(StringUtil.noBlank(logMsg, "no error message")));
 
         try {
             InetAddress inetAddress = InetAddress.getLocalHost();
@@ -175,8 +169,7 @@ public class CubingJob extends DefaultChainedExecutable {
             logger.warn(e.getLocalizedMessage(), e);
         }
 
-        String title = "[" + state.toString() + "] - [" + getDeployEnvName() + "] - [" + getProjectName() + "] - "
-                + CubingExecutableUtil.getCubeName(this.getParams());
+        String title = "[" + state.toString() + "] - [" + getDeployEnvName() + "] - [" + getProjectName() + "] - " + CubingExecutableUtil.getCubeName(this.getParams());
 
         return Pair.of(title, content);
     }
@@ -203,8 +196,7 @@ public class CubingJob extends DefaultChainedExecutable {
      */
     @Override
     protected void handleMetaDataPersistException(Exception exception) {
-        String title = "[ERROR] - [" + getDeployEnvName() + "] - [" + getProjectName() + "] - "
-                + CubingExecutableUtil.getCubeName(this.getParams());
+        String title = "[ERROR] - [" + getDeployEnvName() + "] - [" + getProjectName() + "] - " + CubingExecutableUtil.getCubeName(this.getParams());
         String content = ExecutableConstants.NOTIFY_EMAIL_TEMPLATE;
         final String UNKNOWN = "UNKNOWN";
         String errMsg = null;
@@ -225,8 +217,7 @@ public class CubingJob extends DefaultChainedExecutable {
         content = content.replaceAll("\\$\\{mr_waiting\\}", UNKNOWN);
         content = content.replaceAll("\\$\\{last_update_time\\}", UNKNOWN);
         content = content.replaceAll("\\$\\{submitter\\}", StringUtil.noBlank(getSubmitter(), "missing submitter"));
-        content = content.replaceAll("\\$\\{error_log\\}",
-                Matcher.quoteReplacement(StringUtil.noBlank(errMsg, "no error message")));
+        content = content.replaceAll("\\$\\{error_log\\}", Matcher.quoteReplacement(StringUtil.noBlank(errMsg, "no error message")));
 
         try {
             InetAddress inetAddress = InetAddress.getLocalHost();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/DFSFileTable.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/DFSFileTable.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/DFSFileTable.java
index b448f56..c036445 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/DFSFileTable.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/DFSFileTable.java
@@ -70,7 +70,7 @@ public class DFSFileTable implements IReadableTable {
         }
         return new TableSignature(path, sizeAndLastModified.getFirst(), sizeAndLastModified.getSecond());
     }
-
+    
     @Override
     public boolean exists() throws IOException {
         try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/DFSFileTableReader.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/DFSFileTableReader.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/DFSFileTableReader.java
index 49a3169..0c9c3fc 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/DFSFileTableReader.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/DFSFileTableReader.java
@@ -188,8 +188,7 @@ public class DFSFileTableReader implements TableReader {
         if (expectedColumnNumber > 0) {
             for (String delim : DETECT_DELIMS) {
                 if (StringSplitter.split(line, delim).length == expectedColumnNumber) {
-                    logger.info("Auto detect delim to be '" + delim + "', split line to " + expectedColumnNumber
-                            + " columns -- " + line);
+                    logger.info("Auto detect delim to be '" + delim + "', split line to " + expectedColumnNumber + " columns -- " + line);
                     return delim;
                 }
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMROutput2.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMROutput2.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMROutput2.java
index 443d6a1..69bba0a 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMROutput2.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMROutput2.java
@@ -94,8 +94,7 @@ public interface IMROutput2 {
          * value is M1+M2+..+Mm. CUBOID is 8 bytes cuboid ID; Dx is dimension value with
          * dictionary encoding; Mx is measure value serialization form.
          */
-        public void addStepPhase2_BuildCube(CubeSegment set, List<CubeSegment> mergingSegments,
-                DefaultChainedExecutable jobFlow);
+        public void addStepPhase2_BuildCube(CubeSegment set, List<CubeSegment> mergingSegments, DefaultChainedExecutable jobFlow);
 
         /** Add step that does any necessary clean up. */
         public void addStepPhase3_Cleanup(DefaultChainedExecutable jobFlow);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
index d92b654..c1ed345 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
@@ -73,10 +73,8 @@ public class JobBuilderSupport {
         appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
         appendExecCmdParameters(cmd, BatchConstants.ARG_STATS_ENABLED, String.valueOf(withStats));
         appendExecCmdParameters(cmd, BatchConstants.ARG_STATS_OUTPUT, getStatisticsPath(jobId));
-        appendExecCmdParameters(cmd, BatchConstants.ARG_STATS_SAMPLING_PERCENT,
-                String.valueOf(config.getConfig().getCubingInMemSamplingPercent()));
-        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
-                "Kylin_Fact_Distinct_Columns_" + seg.getRealization().getName() + "_Step");
+        appendExecCmdParameters(cmd, BatchConstants.ARG_STATS_SAMPLING_PERCENT, String.valueOf(config.getConfig().getCubingInMemSamplingPercent()));
+        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Fact_Distinct_Columns_" + seg.getRealization().getName() + "_Step");
         appendExecCmdParameters(cmd, BatchConstants.ARG_CUBING_JOB_ID, jobId);
         result.setMapReduceParams(cmd.toString());
         result.setCounterSaveAs(CubingJob.SOURCE_RECORD_COUNT + "," + CubingJob.SOURCE_SIZE_BYTES);
@@ -106,6 +104,7 @@ public class JobBuilderSupport {
         CubingExecutableUtil.setSegmentId(seg.getUuid(), result.getParams());
         CubingExecutableUtil.setCubingJobId(jobId, result.getParams());
 
+
         return result;
     }
 
@@ -120,8 +119,7 @@ public class JobBuilderSupport {
         return result;
     }
 
-    public UpdateCubeInfoAfterMergeStep createUpdateCubeInfoAfterMergeStep(List<String> mergingSegmentIds,
-            String jobId) {
+    public UpdateCubeInfoAfterMergeStep createUpdateCubeInfoAfterMergeStep(List<String> mergingSegmentIds, String jobId) {
         UpdateCubeInfoAfterMergeStep result = new UpdateCubeInfoAfterMergeStep();
         result.setName(ExecutableConstants.STEP_NAME_UPDATE_CUBE_INFO);
 
@@ -205,4 +203,5 @@ public class JobBuilderSupport {
         }
     }
 
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/KylinMapper.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/KylinMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/KylinMapper.java
index c9e2377..1595bdd 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/KylinMapper.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/KylinMapper.java
@@ -40,8 +40,7 @@ public class KylinMapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends Mapper<KEYIN,
     }
 
     @Override
-    final public void map(KEYIN key, VALUEIN value, Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context)
-            throws IOException, InterruptedException {
+    final public void map(KEYIN key, VALUEIN value, Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context) throws IOException, InterruptedException {
         try {
             if (mapCounter++ % BatchConstants.NORMAL_RECORD_LOG_THRESHOLD == 0) {
                 logger.info("Accepting Mapper Key with ordinal: " + mapCounter);
@@ -62,14 +61,12 @@ public class KylinMapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends Mapper<KEYIN,
         }
     }
 
-    protected void doMap(KEYIN key, VALUEIN value, Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context)
-            throws IOException, InterruptedException {
+    protected void doMap(KEYIN key, VALUEIN value, Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context) throws IOException, InterruptedException {
         super.map(key, value, context);
     }
 
     @Override
-    final protected void cleanup(Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context)
-            throws IOException, InterruptedException {
+    final protected void cleanup(Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context) throws IOException, InterruptedException {
         try {
             doCleanup(context);
         } catch (IOException ex) { // KYLIN-2170
@@ -87,7 +84,6 @@ public class KylinMapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends Mapper<KEYIN,
         }
     }
 
-    protected void doCleanup(Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context)
-            throws IOException, InterruptedException {
+    protected void doCleanup(Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context) throws IOException, InterruptedException {
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/KylinReducer.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/KylinReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/KylinReducer.java
index e3aef14..e47af9c 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/KylinReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/KylinReducer.java
@@ -39,8 +39,7 @@ public class KylinReducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends Reducer<KEYI
     }
 
     @Override
-    final public void reduce(KEYIN key, Iterable<VALUEIN> values,
-            Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context) throws IOException, InterruptedException {
+    final public void reduce(KEYIN key, Iterable<VALUEIN> values, Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context) throws IOException, InterruptedException {
         try {
             if (reduceCounter++ % BatchConstants.NORMAL_RECORD_LOG_THRESHOLD == 0) {
                 logger.info("Accepting Reducer Key with ordinal: " + reduceCounter);
@@ -62,14 +61,12 @@ public class KylinReducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends Reducer<KEYI
         }
     }
 
-    protected void doReduce(KEYIN key, Iterable<VALUEIN> values,
-            Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context) throws IOException, InterruptedException {
+    protected void doReduce(KEYIN key, Iterable<VALUEIN> values, Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context) throws IOException, InterruptedException {
         super.reduce(key, values, context);
     }
 
     @Override
-    final protected void cleanup(Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context)
-            throws IOException, InterruptedException {
+    final protected void cleanup(Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context) throws IOException, InterruptedException {
         try {
             doCleanup(context);
         } catch (IOException ex) { // KYLIN-2170
@@ -87,7 +84,6 @@ public class KylinReducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends Reducer<KEYI
         }
     }
 
-    protected void doCleanup(Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context)
-            throws IOException, InterruptedException {
+    protected void doCleanup(Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context context) throws IOException, InterruptedException {
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/MRUtil.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/MRUtil.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/MRUtil.java
index 11105cc..cbb68d2 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/MRUtil.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/MRUtil.java
@@ -44,8 +44,7 @@ public class MRUtil {
     }
 
     public static IMRTableInputFormat getTableInputFormat(String tableName) {
-        return SourceFactory.createEngineAdapter(getTableDesc(tableName), IMRInput.class)
-                .getTableInputFormat(getTableDesc(tableName));
+        return SourceFactory.createEngineAdapter(getTableDesc(tableName), IMRInput.class).getTableInputFormat(getTableDesc(tableName));
     }
 
     public static IMRTableInputFormat getTableInputFormat(TableDesc tableDesc) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/SortedColumnDFSFile.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/SortedColumnDFSFile.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/SortedColumnDFSFile.java
index 2d1c5fc..bcf4b98 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/SortedColumnDFSFile.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/SortedColumnDFSFile.java
@@ -85,7 +85,7 @@ public class SortedColumnDFSFile implements IReadableTable {
     public TableSignature getSignature() throws IOException {
         return dfsFileTable.getSignature();
     }
-
+    
     @Override
     public boolean exists() throws IOException {
         return dfsFileTable.exists();
@@ -104,8 +104,7 @@ public class SortedColumnDFSFile implements IReadableTable {
                         Long num2 = Long.parseLong(str2);
                         return num1.compareTo(num2);
                     } catch (NumberFormatException e) {
-                        logger.error("NumberFormatException when parse integer family number.str1:" + str1 + " str2:"
-                                + str2);
+                        logger.error("NumberFormatException when parse integer family number.str1:" + str1 + " str2:" + str2);
                         e.printStackTrace();
                         return 0;
                     }
@@ -120,8 +119,7 @@ public class SortedColumnDFSFile implements IReadableTable {
                         Double num2 = Double.parseDouble(str2);
                         return num1.compareTo(num2);
                     } catch (NumberFormatException e) {
-                        logger.error(
-                                "NumberFormatException when parse doul family number.str1:" + str1 + " str2:" + str2);
+                        logger.error("NumberFormatException when parse doul family number.str1:" + str1 + " str2:" + str2);
                         return 0;
                     }
                 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/SortedColumnDFSFileReader.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/SortedColumnDFSFileReader.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/SortedColumnDFSFileReader.java
index c2679bc..bb00442 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/SortedColumnDFSFileReader.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/SortedColumnDFSFileReader.java
@@ -17,13 +17,13 @@
 */
 package org.apache.kylin.engine.mr;
 
+import org.apache.kylin.source.IReadableTable;
+
 import java.io.IOException;
 import java.util.Collection;
 import java.util.Comparator;
 import java.util.PriorityQueue;
 
-import org.apache.kylin.source.IReadableTable;
-
 /**
  * Created by xiefan on 16-11-22.
  */
@@ -37,8 +37,7 @@ public class SortedColumnDFSFileReader implements IReadableTable.TableReader {
 
     private String[] row;
 
-    public SortedColumnDFSFileReader(Collection<IReadableTable.TableReader> readers,
-            final Comparator<String> comparator) {
+    public SortedColumnDFSFileReader(Collection<IReadableTable.TableReader> readers, final Comparator<String> comparator) {
         this.readers = readers;
         this.comparator = comparator;
         pq = new PriorityQueue<ReaderBuffer>(11, new Comparator<ReaderBuffer>() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
index 4515773..764cbdd 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
@@ -77,41 +77,22 @@ import org.slf4j.LoggerFactory;
 public abstract class AbstractHadoopJob extends Configured implements Tool {
     private static final Logger logger = LoggerFactory.getLogger(AbstractHadoopJob.class);
 
-    protected static final Option OPTION_JOB_NAME = OptionBuilder.withArgName(BatchConstants.ARG_JOB_NAME).hasArg()
-            .isRequired(true).withDescription("Job name. For example, Kylin_Cuboid_Builder-clsfd_v2_Step_22-D)")
-            .create(BatchConstants.ARG_JOB_NAME);
-    protected static final Option OPTION_CUBE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_CUBE_NAME).hasArg()
-            .isRequired(true).withDescription("Cube name. For exmaple, flat_item_cube")
-            .create(BatchConstants.ARG_CUBE_NAME);
-    protected static final Option OPTION_CUBING_JOB_ID = OptionBuilder.withArgName(BatchConstants.ARG_CUBING_JOB_ID)
-            .hasArg().isRequired(false).withDescription("ID of cubing job executable")
-            .create(BatchConstants.ARG_CUBING_JOB_ID);
+    protected static final Option OPTION_JOB_NAME = OptionBuilder.withArgName(BatchConstants.ARG_JOB_NAME).hasArg().isRequired(true).withDescription("Job name. For example, Kylin_Cuboid_Builder-clsfd_v2_Step_22-D)").create(BatchConstants.ARG_JOB_NAME);
+    protected static final Option OPTION_CUBE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_CUBE_NAME).hasArg().isRequired(true).withDescription("Cube name. For exmaple, flat_item_cube").create(BatchConstants.ARG_CUBE_NAME);
+    protected static final Option OPTION_CUBING_JOB_ID = OptionBuilder.withArgName(BatchConstants.ARG_CUBING_JOB_ID).hasArg().isRequired(false).withDescription("ID of cubing job executable").create(BatchConstants.ARG_CUBING_JOB_ID);
     //    @Deprecated
-    protected static final Option OPTION_SEGMENT_NAME = OptionBuilder.withArgName(BatchConstants.ARG_SEGMENT_NAME)
-            .hasArg().isRequired(true).withDescription("Cube segment name").create(BatchConstants.ARG_SEGMENT_NAME);
-    protected static final Option OPTION_SEGMENT_ID = OptionBuilder.withArgName(BatchConstants.ARG_SEGMENT_ID).hasArg()
-            .isRequired(true).withDescription("Cube segment id").create(BatchConstants.ARG_SEGMENT_ID);
-    protected static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_INPUT).hasArg()
-            .isRequired(true).withDescription("Input path").create(BatchConstants.ARG_INPUT);
-    protected static final Option OPTION_INPUT_FORMAT = OptionBuilder.withArgName(BatchConstants.ARG_INPUT_FORMAT)
-            .hasArg().isRequired(false).withDescription("Input format").create(BatchConstants.ARG_INPUT_FORMAT);
-    protected static final Option OPTION_OUTPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_OUTPUT).hasArg()
-            .isRequired(true).withDescription("Output path").create(BatchConstants.ARG_OUTPUT);
-    protected static final Option OPTION_NCUBOID_LEVEL = OptionBuilder.withArgName(BatchConstants.ARG_LEVEL).hasArg()
-            .isRequired(true).withDescription("N-Cuboid build level, e.g. 1, 2, 3...").create(BatchConstants.ARG_LEVEL);
-    protected static final Option OPTION_PARTITION_FILE_PATH = OptionBuilder.withArgName(BatchConstants.ARG_PARTITION)
-            .hasArg().isRequired(true).withDescription("Partition file path.").create(BatchConstants.ARG_PARTITION);
-    protected static final Option OPTION_HTABLE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_HTABLE_NAME)
-            .hasArg().isRequired(true).withDescription("HTable name").create(BatchConstants.ARG_HTABLE_NAME);
-
-    protected static final Option OPTION_STATISTICS_ENABLED = OptionBuilder
-            .withArgName(BatchConstants.ARG_STATS_ENABLED).hasArg().isRequired(false)
-            .withDescription("Statistics enabled").create(BatchConstants.ARG_STATS_ENABLED);
-    protected static final Option OPTION_STATISTICS_OUTPUT = OptionBuilder.withArgName(BatchConstants.ARG_STATS_OUTPUT)
-            .hasArg().isRequired(false).withDescription("Statistics output").create(BatchConstants.ARG_STATS_OUTPUT);
-    protected static final Option OPTION_STATISTICS_SAMPLING_PERCENT = OptionBuilder
-            .withArgName(BatchConstants.ARG_STATS_SAMPLING_PERCENT).hasArg().isRequired(false)
-            .withDescription("Statistics sampling percentage").create(BatchConstants.ARG_STATS_SAMPLING_PERCENT);
+    protected static final Option OPTION_SEGMENT_NAME = OptionBuilder.withArgName(BatchConstants.ARG_SEGMENT_NAME).hasArg().isRequired(true).withDescription("Cube segment name").create(BatchConstants.ARG_SEGMENT_NAME);
+    protected static final Option OPTION_SEGMENT_ID = OptionBuilder.withArgName(BatchConstants.ARG_SEGMENT_ID).hasArg().isRequired(true).withDescription("Cube segment id").create(BatchConstants.ARG_SEGMENT_ID);
+    protected static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_INPUT).hasArg().isRequired(true).withDescription("Input path").create(BatchConstants.ARG_INPUT);
+    protected static final Option OPTION_INPUT_FORMAT = OptionBuilder.withArgName(BatchConstants.ARG_INPUT_FORMAT).hasArg().isRequired(false).withDescription("Input format").create(BatchConstants.ARG_INPUT_FORMAT);
+    protected static final Option OPTION_OUTPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_OUTPUT).hasArg().isRequired(true).withDescription("Output path").create(BatchConstants.ARG_OUTPUT);
+    protected static final Option OPTION_NCUBOID_LEVEL = OptionBuilder.withArgName(BatchConstants.ARG_LEVEL).hasArg().isRequired(true).withDescription("N-Cuboid build level, e.g. 1, 2, 3...").create(BatchConstants.ARG_LEVEL);
+    protected static final Option OPTION_PARTITION_FILE_PATH = OptionBuilder.withArgName(BatchConstants.ARG_PARTITION).hasArg().isRequired(true).withDescription("Partition file path.").create(BatchConstants.ARG_PARTITION);
+    protected static final Option OPTION_HTABLE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_HTABLE_NAME).hasArg().isRequired(true).withDescription("HTable name").create(BatchConstants.ARG_HTABLE_NAME);
+
+    protected static final Option OPTION_STATISTICS_ENABLED = OptionBuilder.withArgName(BatchConstants.ARG_STATS_ENABLED).hasArg().isRequired(false).withDescription("Statistics enabled").create(BatchConstants.ARG_STATS_ENABLED);
+    protected static final Option OPTION_STATISTICS_OUTPUT = OptionBuilder.withArgName(BatchConstants.ARG_STATS_OUTPUT).hasArg().isRequired(false).withDescription("Statistics output").create(BatchConstants.ARG_STATS_OUTPUT);
+    protected static final Option OPTION_STATISTICS_SAMPLING_PERCENT = OptionBuilder.withArgName(BatchConstants.ARG_STATS_SAMPLING_PERCENT).hasArg().isRequired(false).withDescription("Statistics sampling percentage").create(BatchConstants.ARG_STATS_SAMPLING_PERCENT);
 
     private static final String MAP_REDUCE_CLASSPATH = "mapreduce.application.classpath";
 
@@ -169,9 +150,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
         } else {
             job.waitForCompletion(true);
             retVal = job.isSuccessful() ? 0 : 1;
-            logger.debug("Job '" + job.getJobName() + "' finished "
-                    + (job.isSuccessful() ? "successfully in " : "with failures.  Time taken ")
-                    + formatTime((System.nanoTime() - start) / 1000000L));
+            logger.debug("Job '" + job.getJobName() + "' finished " + (job.isSuccessful() ? "successfully in " : "with failures.  Time taken ") + formatTime((System.nanoTime() - start) / 1000000L));
         }
         return retVal;
     }
@@ -194,8 +173,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
         Configuration jobConf = job.getConfiguration();
         String classpath = jobConf.get(MAP_REDUCE_CLASSPATH);
         if (classpath == null || classpath.length() == 0) {
-            logger.info("Didn't find " + MAP_REDUCE_CLASSPATH
-                    + " in job configuration, will run 'mapred classpath' to get the default value.");
+            logger.info("Didn't find " + MAP_REDUCE_CLASSPATH + " in job configuration, will run 'mapred classpath' to get the default value.");
             classpath = getDefaultMapRedClasspath();
             logger.info("The default mapred classpath is: " + classpath);
         }
@@ -234,13 +212,11 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
                 StringUtil.appendWithSeparator(kylinDependency, hiveExecJarPath);
                 logger.info("hive-exec jar file: " + hiveExecJarPath);
 
-                String hiveHCatJarPath = ClassUtil
-                        .findContainingJar(Class.forName("org.apache.hive.hcatalog.mapreduce.HCatInputFormat"));
+                String hiveHCatJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hive.hcatalog.mapreduce.HCatInputFormat"));
                 StringUtil.appendWithSeparator(kylinDependency, hiveHCatJarPath);
                 logger.info("hive-catalog jar file: " + hiveHCatJarPath);
 
-                String hiveMetaStoreJarPath = ClassUtil
-                        .findContainingJar(Class.forName("org.apache.hadoop.hive.metastore.api.Table"));
+                String hiveMetaStoreJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hadoop.hive.metastore.api.Table"));
                 StringUtil.appendWithSeparator(kylinDependency, hiveMetaStoreJarPath);
                 logger.info("hive-metastore jar file: " + hiveMetaStoreJarPath);
             } catch (ClassNotFoundException e) {
@@ -256,8 +232,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
         } else {
             logger.info("No Kafka dependency jar set in the environment, will find them from classpath:");
             try {
-                String kafkaClientJarPath = ClassUtil
-                        .findContainingJar(Class.forName("org.apache.kafka.clients.consumer.KafkaConsumer"));
+                String kafkaClientJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.kafka.clients.consumer.KafkaConsumer"));
                 StringUtil.appendWithSeparator(kylinDependency, kafkaClientJarPath);
                 logger.info("kafka jar file: " + kafkaClientJarPath);
 
@@ -328,7 +303,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
                     logger.warn("The directory of kylin dependency '" + fileName + "' does not exist, skip");
                     continue;
                 }
-
+                
                 if (fs.getFileStatus(p).isDirectory()) {
                     appendTmpDir(job, fs, p, jarList, fileList);
                     continue;
@@ -515,8 +490,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
         return dumpList;
     }
 
-    protected void dumpKylinPropsAndMetadata(Set<String> dumpList, KylinConfig kylinConfig, Configuration conf)
-            throws IOException {
+    protected void dumpKylinPropsAndMetadata(Set<String> dumpList, KylinConfig kylinConfig, Configuration conf) throws IOException {
         File tmp = File.createTempFile("kylin_job_meta", "");
         FileUtils.forceDelete(tmp); // we need a directory, so delete the file first
 
@@ -585,8 +559,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
         HadoopUtil.deletePath(conf, path);
     }
 
-    public static double getTotalMapInputMB(Job job)
-            throws ClassNotFoundException, IOException, InterruptedException, JobException {
+    public static double getTotalMapInputMB(Job job) throws ClassNotFoundException, IOException, InterruptedException, JobException {
         if (job == null) {
             throw new JobException("Job is null");
         }
@@ -603,13 +576,11 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
         return totalMapInputMB;
     }
 
-    protected double getTotalMapInputMB()
-            throws ClassNotFoundException, IOException, InterruptedException, JobException {
+    protected double getTotalMapInputMB() throws ClassNotFoundException, IOException, InterruptedException, JobException {
         return getTotalMapInputMB(job);
     }
 
-    protected int getMapInputSplitCount()
-            throws ClassNotFoundException, JobException, IOException, InterruptedException {
+    protected int getMapInputSplitCount() throws ClassNotFoundException, JobException, IOException, InterruptedException {
         if (job == null) {
             throw new JobException("Job is null");
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BaseCuboidBuilder.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BaseCuboidBuilder.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BaseCuboidBuilder.java
index cb478c7..07b636b 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BaseCuboidBuilder.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BaseCuboidBuilder.java
@@ -18,11 +18,7 @@
 
 package org.apache.kylin.engine.mr.common;
 
-import java.nio.ByteBuffer;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import com.google.common.collect.Sets;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Dictionary;
 import org.apache.kylin.cube.CubeSegment;
@@ -39,7 +35,10 @@ import org.apache.kylin.metadata.model.TblColRef;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Sets;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 /**
  */
@@ -60,9 +59,8 @@ public class BaseCuboidBuilder implements java.io.Serializable {
 
     protected KylinConfig kylinConfig;
 
-    public BaseCuboidBuilder(KylinConfig kylinConfig, CubeDesc cubeDesc, CubeSegment cubeSegment,
-            CubeJoinedFlatTableEnrich intermediateTableDesc, AbstractRowKeyEncoder rowKeyEncoder,
-            MeasureIngester<?>[] aggrIngesters, Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    public BaseCuboidBuilder(KylinConfig kylinConfig, CubeDesc cubeDesc, CubeSegment cubeSegment, CubeJoinedFlatTableEnrich intermediateTableDesc,
+                             AbstractRowKeyEncoder rowKeyEncoder, MeasureIngester<?>[] aggrIngesters, Map<TblColRef, Dictionary<String>> dictionaryMap) {
         this.kylinConfig = kylinConfig;
         this.cubeDesc = cubeDesc;
         this.cubeSegment = cubeSegment;
@@ -75,8 +73,7 @@ public class BaseCuboidBuilder implements java.io.Serializable {
         measureCodec = new BufferedMeasureCodec(cubeDesc.getMeasures());
     }
 
-    public BaseCuboidBuilder(KylinConfig kylinConfig, CubeDesc cubeDesc, CubeSegment cubeSegment,
-            CubeJoinedFlatTableEnrich intermediateTableDesc) {
+    public BaseCuboidBuilder(KylinConfig kylinConfig, CubeDesc cubeDesc, CubeSegment cubeSegment, CubeJoinedFlatTableEnrich intermediateTableDesc) {
         this.kylinConfig = kylinConfig;
         this.cubeDesc = cubeDesc;
         this.cubeSegment = cubeSegment;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
index 001e76d..602b4bb 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
@@ -62,6 +62,7 @@ public interface BatchConstants {
     String CFG_OUTPUT_STATISTICS = "statistics";
     String CFG_OUTPUT_PARTITION = "partition";
 
+
     /**
      * command line ARGuments
      */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
index f8631ec..a372c5b 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
@@ -161,8 +161,7 @@ public class CubeStatsReader {
         return mapperOverlapRatioOfFirstBuild;
     }
 
-    public static Map<Long, Long> getCuboidRowCountMapFromSampling(Map<Long, HLLCounter> hllcMap,
-            int samplingPercentage) {
+    public static Map<Long, Long> getCuboidRowCountMapFromSampling(Map<Long, HLLCounter> hllcMap, int samplingPercentage) {
         Map<Long, Long> cuboidRowCountMap = Maps.newHashMap();
         for (Map.Entry<Long, HLLCounter> entry : hllcMap.entrySet()) {
             // No need to adjust according sampling percentage. Assumption is that data set is far
@@ -186,8 +185,7 @@ public class CubeStatsReader {
 
         Map<Long, Double> sizeMap = Maps.newHashMap();
         for (Map.Entry<Long, Long> entry : rowCountMap.entrySet()) {
-            sizeMap.put(entry.getKey(), estimateCuboidStorageSize(cubeSegment, entry.getKey(), entry.getValue(),
-                    baseCuboidId, rowkeyColumnSize));
+            sizeMap.put(entry.getKey(), estimateCuboidStorageSize(cubeSegment, entry.getKey(), entry.getValue(), baseCuboidId, rowkeyColumnSize));
         }
         return sizeMap;
     }
@@ -197,14 +195,13 @@ public class CubeStatsReader {
      *
      * @return the cuboid size in M bytes
      */
-    private static double estimateCuboidStorageSize(CubeSegment cubeSegment, long cuboidId, long rowCount,
-            long baseCuboidId, List<Integer> rowKeyColumnLength) {
+    private static double estimateCuboidStorageSize(CubeSegment cubeSegment, long cuboidId, long rowCount, long baseCuboidId, List<Integer> rowKeyColumnLength) {
 
         int rowkeyLength = cubeSegment.getRowKeyPreambleSize();
         KylinConfig kylinConf = cubeSegment.getConfig();
 
         long mask = Long.highestOneBit(baseCuboidId);
-        long parentCuboidIdActualLength = (long) Long.SIZE - Long.numberOfLeadingZeros(baseCuboidId);
+        long parentCuboidIdActualLength = (long)Long.SIZE - Long.numberOfLeadingZeros(baseCuboidId);
         for (int i = 0; i < parentCuboidIdActualLength; i++) {
             if ((mask & cuboidId) > 0) {
                 rowkeyLength += rowKeyColumnLength.get(i); //colIO.getColumnLength(columnList.get(i));
@@ -226,8 +223,7 @@ public class CubeStatsReader {
 
         double cuboidSizeRatio = kylinConf.getJobCuboidSizeRatio();
         double cuboidSizeMemHungryRatio = kylinConf.getJobCuboidSizeCountDistinctRatio();
-        double ret = (1.0 * normalSpace * rowCount * cuboidSizeRatio
-                + 1.0 * countDistinctSpace * rowCount * cuboidSizeMemHungryRatio) / (1024L * 1024L);
+        double ret = (1.0 * normalSpace * rowCount * cuboidSizeRatio + 1.0 * countDistinctSpace * rowCount * cuboidSizeMemHungryRatio) / (1024L * 1024L);
         return ret;
     }
 
@@ -240,8 +236,7 @@ public class CubeStatsReader {
         out.println("============================================================================");
         out.println("Statistics of " + seg);
         out.println();
-        out.println(
-                "Cube statistics hll precision: " + cuboidRowEstimatesHLL.values().iterator().next().getPrecision());
+        out.println("Cube statistics hll precision: " + cuboidRowEstimatesHLL.values().iterator().next().getPrecision());
         out.println("Total cuboids: " + cuboidRows.size());
         out.println("Total estimated rows: " + SumHelper.sumLong(cuboidRows.values()));
         out.println("Total estimated size(MB): " + SumHelper.sumDouble(cuboidSizes.values()));
@@ -262,8 +257,7 @@ public class CubeStatsReader {
             ret += cuboidSizeMap.get(cuboidId);
         }
 
-        logger.info("Estimating size for layer {}, all cuboids are {}, total size is {}", level,
-                StringUtils.join(layeredCuboids.get(level), ","), ret);
+        logger.info("Estimating size for layer {}, all cuboids are {}, total size is {}", level, StringUtils.join(layeredCuboids.get(level), ","), ret);
         return ret;
     }
 
@@ -286,8 +280,7 @@ public class CubeStatsReader {
         }
     }
 
-    private static void printCuboidInfoTree(long parent, long cuboidID, final CuboidScheduler scheduler,
-            Map<Long, Long> cuboidRows, Map<Long, Double> cuboidSizes, int dimensionCount, int depth, PrintWriter out) {
+    private static void printCuboidInfoTree(long parent, long cuboidID, final CuboidScheduler scheduler, Map<Long, Long> cuboidRows, Map<Long, Double> cuboidSizes, int dimensionCount, int depth, PrintWriter out) {
         printOneCuboidInfo(parent, cuboidID, cuboidRows, cuboidSizes, dimensionCount, depth, out);
 
         List<Long> children = scheduler.getSpanningCuboid(cuboidID);
@@ -298,8 +291,7 @@ public class CubeStatsReader {
         }
     }
 
-    private static void printOneCuboidInfo(long parent, long cuboidID, Map<Long, Long> cuboidRows,
-            Map<Long, Double> cuboidSizes, int dimensionCount, int depth, PrintWriter out) {
+    private static void printOneCuboidInfo(long parent, long cuboidID, Map<Long, Long> cuboidRows, Map<Long, Double> cuboidSizes, int dimensionCount, int depth, PrintWriter out) {
         StringBuffer sb = new StringBuffer();
         for (int i = 0; i < depth; i++) {
             sb.append("    ");
@@ -312,8 +304,7 @@ public class CubeStatsReader {
         sb.append(", est row: ").append(rowCount).append(", est MB: ").append(formatDouble(size));
 
         if (parent != -1) {
-            sb.append(", shrink: ").append(formatDouble(100.0 * cuboidRows.get(cuboidID) / cuboidRows.get(parent)))
-                    .append("%");
+            sb.append(", shrink: ").append(formatDouble(100.0 * cuboidRows.get(cuboidID) / cuboidRows.get(parent))).append("%");
         }
 
         out.println(sb.toString());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsWriter.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsWriter.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsWriter.java
index 0b288f3..8f400c3 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsWriter.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsWriter.java
@@ -43,8 +43,7 @@ public class CubeStatsWriter {
     }
 
     public static void writeCuboidStatistics(Configuration conf, Path outputPath, //
-            Map<Long, HLLCounter> cuboidHLLMap, int samplingPercentage, int mapperNumber, double mapperOverlapRatio)
-            throws IOException {
+            Map<Long, HLLCounter> cuboidHLLMap, int samplingPercentage, int mapperNumber, double mapperOverlapRatio) throws IOException {
         Path seqFilePath = new Path(outputPath, BatchConstants.CFG_STATISTICS_CUBOID_ESTIMATION_FILENAME);
 
         List<Long> allCuboids = new ArrayList<Long>();
@@ -52,12 +51,11 @@ public class CubeStatsWriter {
         Collections.sort(allCuboids);
 
         ByteBuffer valueBuf = ByteBuffer.allocate(BufferedMeasureCodec.DEFAULT_BUFFER_SIZE);
-        SequenceFile.Writer writer = SequenceFile.createWriter(conf, SequenceFile.Writer.file(seqFilePath),
-                SequenceFile.Writer.keyClass(LongWritable.class), SequenceFile.Writer.valueClass(BytesWritable.class));
+        SequenceFile.Writer writer = SequenceFile.createWriter(conf, SequenceFile.Writer.file(seqFilePath), SequenceFile.Writer.keyClass(LongWritable.class), SequenceFile.Writer.valueClass(BytesWritable.class));
         try {
             // mapper overlap ratio at key -1
             writer.append(new LongWritable(-1), new BytesWritable(Bytes.toBytes(mapperOverlapRatio)));
-
+            
             // mapper number at key -2
             writer.append(new LongWritable(-2), new BytesWritable(Bytes.toBytes(mapperNumber)));
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CuboidShardUtil.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CuboidShardUtil.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CuboidShardUtil.java
index 6b8bc9c..b6dbd5d 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CuboidShardUtil.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CuboidShardUtil.java
@@ -32,8 +32,7 @@ import com.google.common.collect.Maps;
 public class CuboidShardUtil {
     protected static final Logger logger = LoggerFactory.getLogger(CuboidShardUtil.class);
 
-    public static void saveCuboidShards(CubeSegment segment, Map<Long, Short> cuboidShards, int totalShards)
-            throws IOException {
+    public static void saveCuboidShards(CubeSegment segment, Map<Long, Short> cuboidShards, int totalShards) throws IOException {
         CubeManager cubeManager = CubeManager.getInstance(segment.getConfig());
 
         Map<Long, Short> filtered = Maps.newHashMap();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/DefaultSslProtocolSocketFactory.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/DefaultSslProtocolSocketFactory.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/DefaultSslProtocolSocketFactory.java
index 167d58c..d66e4eb 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/DefaultSslProtocolSocketFactory.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/DefaultSslProtocolSocketFactory.java
@@ -53,8 +53,7 @@ public class DefaultSslProtocolSocketFactory implements SecureProtocolSocketFact
     /**
      * @see SecureProtocolSocketFactory#createSocket(java.lang.String,int,java.net.InetAddress,int)
      */
-    public Socket createSocket(String host, int port, InetAddress clientHost, int clientPort)
-            throws IOException, UnknownHostException {
+    public Socket createSocket(String host, int port, InetAddress clientHost, int clientPort) throws IOException, UnknownHostException {
         return getSSLContext().getSocketFactory().createSocket(host, port, clientHost, clientPort);
     }
 
@@ -92,8 +91,7 @@ public class DefaultSslProtocolSocketFactory implements SecureProtocolSocketFact
      * @throws IllegalArgumentException
      *             DOCUMENT ME!
      */
-    public Socket createSocket(final String host, final int port, final InetAddress localAddress, final int localPort,
-            final HttpConnectionParams params) throws IOException, UnknownHostException, ConnectTimeoutException {
+    public Socket createSocket(final String host, final int port, final InetAddress localAddress, final int localPort, final HttpConnectionParams params) throws IOException, UnknownHostException, ConnectTimeoutException {
         if (params == null) {
             throw new IllegalArgumentException("Parameters may not be null");
         }
@@ -118,8 +116,7 @@ public class DefaultSslProtocolSocketFactory implements SecureProtocolSocketFact
     /**
      * @see SecureProtocolSocketFactory#createSocket(java.net.Socket,java.lang.String,int,boolean)
      */
-    public Socket createSocket(Socket socket, String host, int port, boolean autoClose)
-            throws IOException, UnknownHostException {
+    public Socket createSocket(Socket socket, String host, int port, boolean autoClose) throws IOException, UnknownHostException {
         return getSSLContext().getSocketFactory().createSocket(socket, host, port, autoClose);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopCmdOutput.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopCmdOutput.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopCmdOutput.java
index fce4353..5da1947 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopCmdOutput.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopCmdOutput.java
@@ -94,7 +94,7 @@ public class HadoopCmdOutput {
             }
             this.output.append(counters.toString()).append("\n");
             logger.debug(counters.toString());
-
+            
             String bytsWrittenCounterName = "HDFS_BYTES_WRITTEN";
             String fsScheme = FileSystem.get(job.getConfiguration()).getScheme();
             if (("wasb").equalsIgnoreCase(fsScheme)) {
@@ -103,8 +103,7 @@ public class HadoopCmdOutput {
             }
 
             mapInputRecords = String.valueOf(counters.findCounter(TaskCounter.MAP_INPUT_RECORDS).getValue());
-            hdfsBytesWritten = String
-                    .valueOf(counters.findCounter("FileSystemCounters", bytsWrittenCounterName).getValue());
+            hdfsBytesWritten = String.valueOf(counters.findCounter("FileSystemCounters", bytsWrittenCounterName).getValue());
             rawInputBytesRead = String.valueOf(counters.findCounter(RawDataCounter.BYTES).getValue());
         } catch (Exception e) {
             logger.error(e.getLocalizedMessage(), e);


[22/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java
index 2747ca0..f749fb4 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java
@@ -93,10 +93,10 @@ public class AggregationCacheMemSizeTest {
     }
 
     enum Settings {
-        WITHOUT_MEM_HUNGRY, // only test basic aggrs
-        WITH_HLLC, // basic aggrs + hllc
-        WITH_LOW_CARD_BITMAP, // basic aggrs + bitmap
-        WITH_HIGH_CARD_BITMAP // basic aggrs + bitmap
+        WITHOUT_MEM_HUNGRY,     // only test basic aggrs
+        WITH_HLLC,              // basic aggrs + hllc
+        WITH_LOW_CARD_BITMAP,   // basic aggrs + bitmap
+        WITH_HIGH_CARD_BITMAP   // basic aggrs + bitmap
     }
 
     private MeasureAggregator<?>[] createNoMemHungryAggrs() {
@@ -132,19 +132,19 @@ public class AggregationCacheMemSizeTest {
         aggregators.addAll(Arrays.asList(createNoMemHungryAggrs()));
 
         switch (settings) {
-        case WITHOUT_MEM_HUNGRY:
-            break;
-        case WITH_HLLC:
-            aggregators.add(createHLLCAggr());
-            break;
-        case WITH_LOW_CARD_BITMAP:
-            aggregators.add(createBitmapAggr(true));
-            break;
-        case WITH_HIGH_CARD_BITMAP:
-            aggregators.add(createBitmapAggr(false));
-            break;
-        default:
-            break;
+            case WITHOUT_MEM_HUNGRY:
+                break;
+            case WITH_HLLC:
+                aggregators.add(createHLLCAggr());
+                break;
+            case WITH_LOW_CARD_BITMAP:
+                aggregators.add(createBitmapAggr(true));
+                break;
+            case WITH_HIGH_CARD_BITMAP:
+                aggregators.add(createBitmapAggr(false));
+                break;
+            default:
+                break;
         }
 
         return aggregators.toArray(new MeasureAggregator[aggregators.size()]);
@@ -160,8 +160,8 @@ public class AggregationCacheMemSizeTest {
 
         System.out.printf("%-15s %-10s %-10s\n", "cardinality", "estimate", "actual");
         for (BitmapAggregator aggr : bitmapAggrs) {
-            System.out.printf("%-15d %-10d %-10d\n", aggr.getState().getCount(), aggr.getMemBytesEstimate(),
-                    meter.measureDeep(aggr));
+            System.out.printf("%-15d %-10d %-10d\n",
+                    aggr.getState().getCount(), aggr.getMemBytesEstimate(), meter.measureDeep(aggr));
         }
     }
 
@@ -190,8 +190,8 @@ public class AggregationCacheMemSizeTest {
         long actualMillis = 0;
 
         System.out.println("Settings: " + settings);
-        System.out.printf("%15s %15s %15s %15s %15s\n", "Size", "Estimate(bytes)", "Actual(bytes)", "Estimate(ms)",
-                "Actual(ms)");
+        System.out.printf("%15s %15s %15s %15s %15s\n",
+                "Size", "Estimate(bytes)", "Actual(bytes)", "Estimate(ms)", "Actual(ms)");
 
         for (int i = 0; i < inputCount; i++) {
             byte[] key = new byte[10];
@@ -199,7 +199,7 @@ public class AggregationCacheMemSizeTest {
             MeasureAggregator[] values = createAggrs(settings);
             map.put(key, values);
 
-            if ((i + 1) % reportInterval == 0) {
+            if ((i+1) % reportInterval == 0) {
                 stopwatch.start();
                 long estimateBytes = GTAggregateScanner.estimateSizeOfAggrCache(key, values, map.size());
                 estimateMillis += stopwatch.elapsedMillis();
@@ -210,8 +210,8 @@ public class AggregationCacheMemSizeTest {
                 actualMillis += stopwatch.elapsedMillis();
                 stopwatch.reset();
 
-                System.out.printf("%,15d %,15d %,15d %,15d %,15d\n", map.size(), estimateBytes, actualBytes,
-                        estimateMillis, actualMillis);
+                System.out.printf("%,15d %,15d %,15d %,15d %,15d\n",
+                        map.size(), estimateBytes, actualBytes, estimateMillis, actualMillis);
             }
         }
         System.out.println("---------------------------------------\n");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
index d9241e5..8b2243c 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
@@ -77,11 +77,7 @@ public class AggregationCacheSpillTest extends LocalFileMetadataTestCase {
             }
         };
 
-        GTScanRequest scanRequest = new GTScanRequestBuilder().setInfo(INFO).setRanges(null)
-                .setDimensions(new ImmutableBitSet(0, 3)).setAggrGroupBy(new ImmutableBitSet(0, 3))
-                .setAggrMetrics(new ImmutableBitSet(3, 6))
-                .setAggrMetricsFuncs(new String[] { "SUM", "SUM", "COUNT_DISTINCT" }).setFilterPushDown(null)
-                .setAggCacheMemThreshold(0.5).createGTScanRequest();
+        GTScanRequest scanRequest = new GTScanRequestBuilder().setInfo(INFO).setRanges(null).setDimensions(new ImmutableBitSet(0, 3)).setAggrGroupBy(new ImmutableBitSet(0, 3)).setAggrMetrics(new ImmutableBitSet(3, 6)).setAggrMetricsFuncs(new String[] { "SUM", "SUM", "COUNT_DISTINCT" }).setFilterPushDown(null).setAggCacheMemThreshold(0.5).createGTScanRequest();
 
         GTAggregateScanner scanner = new GTAggregateScanner(inputScanner, scanRequest);
 
@@ -118,11 +114,7 @@ public class AggregationCacheSpillTest extends LocalFileMetadataTestCase {
         };
 
         // all-in-mem testcase
-        GTScanRequest scanRequest = new GTScanRequestBuilder().setInfo(INFO).setRanges(null)
-                .setDimensions(new ImmutableBitSet(0, 3)).setAggrGroupBy(new ImmutableBitSet(1, 3))
-                .setAggrMetrics(new ImmutableBitSet(3, 6))
-                .setAggrMetricsFuncs(new String[] { "SUM", "SUM", "COUNT_DISTINCT" }).setFilterPushDown(null)
-                .setAggCacheMemThreshold(0.5).createGTScanRequest();
+        GTScanRequest scanRequest = new GTScanRequestBuilder().setInfo(INFO).setRanges(null).setDimensions(new ImmutableBitSet(0, 3)).setAggrGroupBy(new ImmutableBitSet(1, 3)).setAggrMetrics(new ImmutableBitSet(3, 6)).setAggrMetricsFuncs(new String[] { "SUM", "SUM", "COUNT_DISTINCT" }).setFilterPushDown(null).setAggCacheMemThreshold(0.5).createGTScanRequest();
 
         GTAggregateScanner scanner = new GTAggregateScanner(inputScanner, scanRequest);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/gridtable/DimEncodingPreserveOrderTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/DimEncodingPreserveOrderTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/DimEncodingPreserveOrderTest.java
index 770f6ce..1866079 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/DimEncodingPreserveOrderTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/DimEncodingPreserveOrderTest.java
@@ -45,13 +45,9 @@ public class DimEncodingPreserveOrderTest {
         successValue.add(new long[] { -32767, -127, 0, 127, 32767 });
         successValue.add(new long[] { -8388607, -32767, -127, 0, 127, 32767, 8388607 });
         successValue.add(new long[] { -2147483647L, -8388607, -32767, -127, 0, 127, 32767, 8388607, 2147483647L });
-        successValue.add(new long[] { -549755813887L, -2147483647L, -8388607, -32767, -127, 0, 127, 32767, 8388607,
-                2147483647L, 549755813887L });
-        successValue.add(new long[] { -140737488355327L, -549755813887L, -2147483647L, -8388607, -32767, -127, 0, 127,
-                32767, 8388607, 2147483647L, 549755813887L, 140737488355327L });
-        successValue.add(new long[] { -36028797018963967L, -140737488355327L, -549755813887L, -2147483647L, -8388607,
-                -32767, -127, 0, 127, 32767, 8388607, 2147483647L, 549755813887L, 140737488355327L,
-                36028797018963967L });
+        successValue.add(new long[] { -549755813887L, -2147483647L, -8388607, -32767, -127, 0, 127, 32767, 8388607, 2147483647L, 549755813887L });
+        successValue.add(new long[] { -140737488355327L, -549755813887L, -2147483647L, -8388607, -32767, -127, 0, 127, 32767, 8388607, 2147483647L, 549755813887L, 140737488355327L });
+        successValue.add(new long[] { -36028797018963967L, -140737488355327L, -549755813887L, -2147483647L, -8388607, -32767, -127, 0, 127, 32767, 8388607, 2147483647L, 549755813887L, 140737488355327L, 36028797018963967L });
         successValue.add(new long[] { //
                 -9223372036854775807L, //
                 -36028797018963967L, //

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/gridtable/GTScanReqSerDerTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/GTScanReqSerDerTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/GTScanReqSerDerTest.java
index aadcf45..1ae229a 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/GTScanReqSerDerTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/GTScanReqSerDerTest.java
@@ -88,8 +88,7 @@ public class GTScanReqSerDerTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testGTInfo() {
-        CubeInstance cube = CubeManager.getInstance(KylinConfig.getInstanceFromEnv())
-                .getCube("test_kylin_cube_with_slr_ready");
+        CubeInstance cube = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube("test_kylin_cube_with_slr_ready");
         CubeSegment segment = cube.getFirstSegment();
 
         Cuboid baseCuboid = Cuboid.getBaseCuboid(cube.getDescriptor());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/gridtable/SimpleGridTableTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/SimpleGridTableTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/SimpleGridTableTest.java
index 58bb76c..14a25c5 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/SimpleGridTableTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/SimpleGridTableTest.java
@@ -85,8 +85,7 @@ public class SimpleGridTableTest extends LocalFileMetadataTestCase {
     }
 
     private IGTScanner scan(GridTable table) throws IOException {
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(table.getInfo()).setRanges(null).setDimensions(null)
-                .setFilterPushDown(null).createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(table.getInfo()).setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest();
         IGTScanner scanner = table.scan(req);
         for (GTRecord r : scanner) {
             Object[] v = r.getValues();
@@ -101,9 +100,7 @@ public class SimpleGridTableTest extends LocalFileMetadataTestCase {
     }
 
     private IGTScanner scanAndAggregate(GridTable table) throws IOException {
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(table.getInfo()).setRanges(null).setDimensions(null)
-                .setAggrGroupBy(setOf(0, 2)).setAggrMetrics(setOf(3, 4))
-                .setAggrMetricsFuncs(new String[] { "count", "sum" }).setFilterPushDown(null).createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(table.getInfo()).setRanges(null).setDimensions(null).setAggrGroupBy(setOf(0, 2)).setAggrMetrics(setOf(3, 4)).setAggrMetricsFuncs(new String[] { "count", "sum" }).setFilterPushDown(null).createGTScanRequest();
         IGTScanner scanner = table.scan(req);
         int i = 0;
         for (GTRecord r : scanner) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/metadata/measure/MeasureCodecTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/metadata/measure/MeasureCodecTest.java b/core-cube/src/test/java/org/apache/kylin/metadata/measure/MeasureCodecTest.java
index a11b84a..7129a5e 100644
--- a/core-cube/src/test/java/org/apache/kylin/metadata/measure/MeasureCodecTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/metadata/measure/MeasureCodecTest.java
@@ -50,8 +50,7 @@ public class MeasureCodecTest extends LocalFileMetadataTestCase {
 
     @Test
     public void basicTest() {
-        MeasureDesc[] descs = new MeasureDesc[] { measure("double"), measure("long"), measure("decimal"),
-                measure("HLLC16"), measure("bitmap") };
+        MeasureDesc[] descs = new MeasureDesc[] { measure("double"), measure("long"), measure("decimal"), measure("HLLC16"), measure("bitmap") };
         BufferedMeasureCodec codec = new BufferedMeasureCodec(descs);
 
         Double d = new Double(1.0);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/metadata/measure/TopNMeasureTypeTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/metadata/measure/TopNMeasureTypeTest.java b/core-cube/src/test/java/org/apache/kylin/metadata/measure/TopNMeasureTypeTest.java
index bda2bf0..f04c742 100644
--- a/core-cube/src/test/java/org/apache/kylin/metadata/measure/TopNMeasureTypeTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/metadata/measure/TopNMeasureTypeTest.java
@@ -51,8 +51,7 @@ public class TopNMeasureTypeTest extends LocalFileMetadataTestCase {
     @Test
     public void test() {
 
-        CubeDesc desc = CubeDescManager.getInstance(getTestConfig())
-                .getCubeDesc("test_kylin_cube_without_slr_left_join_desc");
+        CubeDesc desc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_without_slr_left_join_desc");
 
         MeasureDesc topSellerMeasure = null;
 
@@ -62,8 +61,7 @@ public class TopNMeasureTypeTest extends LocalFileMetadataTestCase {
                 break;
             }
         }
-        TopNMeasureType measureType = (TopNMeasureType) MeasureTypeFactory.create(
-                topSellerMeasure.getFunction().getExpression(), topSellerMeasure.getFunction().getReturnDataType());
+        TopNMeasureType measureType = (TopNMeasureType) MeasureTypeFactory.create(topSellerMeasure.getFunction().getExpression(), topSellerMeasure.getFunction().getReturnDataType());
 
         topSellerMeasure.getFunction().getConfiguration().clear();
         List<TblColRef> colsNeedDict = measureType.getColumnsNeedDictionary(topSellerMeasure.getFunction());
@@ -71,8 +69,7 @@ public class TopNMeasureTypeTest extends LocalFileMetadataTestCase {
         assertTrue(colsNeedDict != null && colsNeedDict.size() == 1);
 
         TblColRef sellerColRef = topSellerMeasure.getFunction().getParameter().getColRefs().get(1);
-        topSellerMeasure.getFunction().getConfiguration()
-                .put(TopNMeasureType.CONFIG_ENCODING_PREFIX + sellerColRef.getIdentity(), "int:6");
+        topSellerMeasure.getFunction().getConfiguration().put(TopNMeasureType.CONFIG_ENCODING_PREFIX + sellerColRef.getIdentity(), "int:6");
         colsNeedDict = measureType.getColumnsNeedDictionary(topSellerMeasure.getFunction());
 
         assertTrue(colsNeedDict.size() == 0);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java b/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java
index 9df0e26..b9f0d2b 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java
@@ -17,16 +17,11 @@
 */
 package org.apache.kylin.dict;
 
-import static com.google.common.base.Preconditions.checkState;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.Arrays;
-import java.util.Objects;
-import java.util.concurrent.ExecutionException;
-
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
+import com.google.common.cache.RemovalListener;
+import com.google.common.cache.RemovalNotification;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Dictionary;
@@ -37,12 +32,14 @@ import org.apache.kylin.dict.global.GlobalDictMetadata;
 import org.apache.kylin.dict.global.GlobalDictStore;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
-import com.google.common.cache.RemovalListener;
-import com.google.common.cache.RemovalNotification;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.Arrays;
+import java.util.Objects;
+import java.util.concurrent.ExecutionException;
+import static com.google.common.base.Preconditions.checkState;
 
 /**
  * A dictionary based on Trie data structure that maps enumerations of byte[] to
@@ -63,8 +60,7 @@ import com.google.common.cache.RemovalNotification;
  */
 @SuppressWarnings({ "rawtypes", "unchecked", "serial" })
 public class AppendTrieDictionary<T> extends CacheDictionary<T> {
-    public static final byte[] HEAD_MAGIC = new byte[] { 0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65,
-            0x44, 0x69, 0x63, 0x74 }; // "AppendTrieDict"
+    public static final byte[] HEAD_MAGIC = new byte[] { 0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74 }; // "AppendTrieDict"
     public static final int HEAD_SIZE_I = HEAD_MAGIC.length;
     private static final Logger logger = LoggerFactory.getLogger(AppendTrieDictionary.class);
 
@@ -81,23 +77,19 @@ public class AppendTrieDictionary<T> extends CacheDictionary<T> {
         final Path latestVersionPath = globalDictStore.getVersionDir(latestVersion);
         this.metadata = globalDictStore.getMetadata(latestVersion);
         this.bytesConvert = metadata.bytesConverter;
-        this.dictCache = CacheBuilder.newBuilder().softValues()
-                .removalListener(new RemovalListener<AppendDictSliceKey, AppendDictSlice>() {
-                    @Override
-                    public void onRemoval(RemovalNotification<AppendDictSliceKey, AppendDictSlice> notification) {
-                        logger.info("Evict slice with key {} and value {} caused by {}, size {}/{}",
-                                notification.getKey(), notification.getValue(), notification.getCause(),
-                                dictCache.size(), metadata.sliceFileMap.size());
-                    }
-                }).build(new CacheLoader<AppendDictSliceKey, AppendDictSlice>() {
-                    @Override
-                    public AppendDictSlice load(AppendDictSliceKey key) throws Exception {
-                        AppendDictSlice slice = globalDictStore.readSlice(latestVersionPath.toString(),
-                                metadata.sliceFileMap.get(key));
-                        logger.info("Load slice with key {} and value {}", key, slice);
-                        return slice;
-                    }
-                });
+        this.dictCache = CacheBuilder.newBuilder().softValues().removalListener(new RemovalListener<AppendDictSliceKey, AppendDictSlice>() {
+            @Override
+            public void onRemoval(RemovalNotification<AppendDictSliceKey, AppendDictSlice> notification) {
+                logger.info("Evict slice with key {} and value {} caused by {}, size {}/{}", notification.getKey(), notification.getValue(), notification.getCause(), dictCache.size(), metadata.sliceFileMap.size());
+            }
+        }).build(new CacheLoader<AppendDictSliceKey, AppendDictSlice>() {
+            @Override
+            public AppendDictSlice load(AppendDictSliceKey key) throws Exception {
+                AppendDictSlice slice = globalDictStore.readSlice(latestVersionPath.toString(), metadata.sliceFileMap.get(key));
+                logger.info("Load slice with key {} and value {}", key, slice);
+                return slice;
+            }
+        });
     }
 
     @Override
@@ -191,4 +183,4 @@ public class AppendTrieDictionary<T> extends CacheDictionary<T> {
     public boolean contains(Dictionary other) {
         return false;
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/BuiltInFunctionTransformer.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/BuiltInFunctionTransformer.java b/core-dictionary/src/main/java/org/apache/kylin/dict/BuiltInFunctionTransformer.java
index 9eafc43..7a48a1c 100755
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/BuiltInFunctionTransformer.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/BuiltInFunctionTransformer.java
@@ -70,8 +70,7 @@ public class BuiltInFunctionTransformer implements ITupleFilterTransformer {
             }
         } else if (tupleFilter instanceof LogicalTupleFilter) {
             @SuppressWarnings("unchecked")
-            ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) tupleFilter.getChildren()
-                    .listIterator();
+            ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) tupleFilter.getChildren().listIterator();
             while (childIterator.hasNext()) {
                 TupleFilter transformed = transform(childIterator.next());
                 if (transformed != null) {
@@ -93,8 +92,7 @@ public class BuiltInFunctionTransformer implements ITupleFilterTransformer {
         if (dict == null)
             return null;
 
-        CompareTupleFilter translated = new CompareTupleFilter(
-                builtInFunctionTupleFilter.isReversed() ? FilterOperatorEnum.NOTIN : FilterOperatorEnum.IN);
+        CompareTupleFilter translated = new CompareTupleFilter(builtInFunctionTupleFilter.isReversed() ? FilterOperatorEnum.NOTIN : FilterOperatorEnum.IN);
         translated.addChild(new ColumnTupleFilter(columnRef));
 
         try {
@@ -120,12 +118,10 @@ public class BuiltInFunctionTransformer implements ITupleFilterTransformer {
 
     @SuppressWarnings({ "unchecked", "rawtypes" })
     private TupleFilter translateCompareTupleFilter(CompareTupleFilter compTupleFilter) {
-        if (compTupleFilter.getFunction() == null
-                || (!(compTupleFilter.getFunction() instanceof BuiltInFunctionTupleFilter)))
+        if (compTupleFilter.getFunction() == null || (!(compTupleFilter.getFunction() instanceof BuiltInFunctionTupleFilter)))
             return null;
 
-        BuiltInFunctionTupleFilter builtInFunctionTupleFilter = (BuiltInFunctionTupleFilter) compTupleFilter
-                .getFunction();
+        BuiltInFunctionTupleFilter builtInFunctionTupleFilter = (BuiltInFunctionTupleFilter) compTupleFilter.getFunction();
 
         if (!builtInFunctionTupleFilter.isValid())
             return null;
@@ -135,8 +131,7 @@ public class BuiltInFunctionTransformer implements ITupleFilterTransformer {
         if (dict == null)
             return null;
 
-        CompareTupleFilter translated = new CompareTupleFilter(
-                builtInFunctionTupleFilter.isReversed() ? FilterOperatorEnum.NOTIN : FilterOperatorEnum.IN);
+        CompareTupleFilter translated = new CompareTupleFilter(builtInFunctionTupleFilter.isReversed() ? FilterOperatorEnum.NOTIN : FilterOperatorEnum.IN);
         translated.addChild(new ColumnTupleFilter(columnRef));
 
         try {
@@ -147,8 +142,7 @@ public class BuiltInFunctionTransformer implements ITupleFilterTransformer {
                 Class clazz = Primitives.wrap(computedVal.getClass());
                 Object targetVal = compTupleFilter.getFirstValue();
                 if (Primitives.isWrapperType(clazz))
-                    targetVal = clazz.cast(clazz.getDeclaredMethod("valueOf", String.class).invoke(null,
-                            compTupleFilter.getFirstValue()));
+                    targetVal = clazz.cast(clazz.getDeclaredMethod("valueOf", String.class).invoke(null, compTupleFilter.getFirstValue()));
 
                 int comp = ((Comparable) computedVal).compareTo(targetVal);
                 boolean compResult = false;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/ByteComparator.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/ByteComparator.java b/core-dictionary/src/main/java/org/apache/kylin/dict/ByteComparator.java
index 798c9c9..74d5ec5 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/ByteComparator.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/ByteComparator.java
@@ -18,10 +18,10 @@
 
 package org.apache.kylin.dict;
 
-import java.util.Comparator;
-
 import org.apache.kylin.common.util.ByteArray;
 
+import java.util.Comparator;
+
 /**
  * Created by xiefan on 16-10-28.
  */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/CacheDictionary.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/CacheDictionary.java b/core-dictionary/src/main/java/org/apache/kylin/dict/CacheDictionary.java
index 15e28a6..156971d 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/CacheDictionary.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/CacheDictionary.java
@@ -18,12 +18,12 @@
 
 package org.apache.kylin.dict;
 
+import org.apache.kylin.common.util.Dictionary;
+
 import java.lang.ref.SoftReference;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.kylin.common.util.Dictionary;
-
 /**
  */
 public abstract class CacheDictionary<T> extends Dictionary<T> {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/DateStrDictionary.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/DateStrDictionary.java b/core-dictionary/src/main/java/org/apache/kylin/dict/DateStrDictionary.java
index f772caf..56b4994 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/DateStrDictionary.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/DateStrDictionary.java
@@ -83,13 +83,13 @@ public class DateStrDictionary extends Dictionary<String> {
         return pattern.length();
     }
 
+
     @Override
     final protected int getIdFromValueImpl(String value, int roundFlag) {
         Date date = stringToDate(value, pattern);
         int id = calcIdFromSeqNo((int) DateDimEnc.getNumOfDaysSince0000FromMillis(date.getTime()));
         if (id < baseId || id > maxId)
-            throw new IllegalArgumentException(
-                    "'" + value + "' encodes to '" + id + "' which is out of range [" + baseId + "," + maxId + "]");
+            throw new IllegalArgumentException("'" + value + "' encodes to '" + id + "' which is out of range [" + baseId + "," + maxId + "]");
 
         return id;
     }
@@ -123,6 +123,7 @@ public class DateStrDictionary extends Dictionary<String> {
         init(pattern, baseId);
     }
 
+
     @Override
     public int hashCode() {
         return 31 * baseId + pattern.hashCode();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryGenerator.java b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryGenerator.java
index 14faff4..61a0664 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryGenerator.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryGenerator.java
@@ -60,13 +60,11 @@ public class DictionaryGenerator {
         return builder;
     }
 
-    public static Dictionary<String> buildDictionary(DataType dataType, IDictionaryValueEnumerator valueEnumerator)
-            throws IOException {
+    public static Dictionary<String> buildDictionary(DataType dataType, IDictionaryValueEnumerator valueEnumerator) throws IOException {
         return buildDictionary(newDictionaryBuilder(dataType), null, valueEnumerator);
     }
 
-    static Dictionary<String> buildDictionary(IDictionaryBuilder builder, DictionaryInfo dictInfo,
-            IDictionaryValueEnumerator valueEnumerator) throws IOException {
+    static Dictionary<String> buildDictionary(IDictionaryBuilder builder, DictionaryInfo dictInfo, IDictionaryValueEnumerator valueEnumerator) throws IOException {
         int baseId = 0; // always 0 for now
         int nSamples = 5;
         ArrayList<String> samples = new ArrayList<String>(nSamples);
@@ -121,7 +119,7 @@ public class DictionaryGenerator {
         public boolean addValue(String value) {
             if (StringUtils.isBlank(value)) // empty string is treated as null
                 return false;
-
+            
             // detect date pattern on the first value
             if (datePattern == null) {
                 for (String p : DATE_PATTERNS) {
@@ -136,7 +134,7 @@ public class DictionaryGenerator {
                 if (datePattern == null)
                     throw new IllegalArgumentException("Unknown date pattern for input value: " + value);
             }
-
+            
             // check the date format
             DateFormat.stringToDate(value, datePattern);
             return true;
@@ -176,28 +174,28 @@ public class DictionaryGenerator {
     private static class StringTrieDictBuilder implements IDictionaryBuilder {
         int baseId;
         TrieDictionaryBuilder builder;
-
+        
         @Override
         public void init(DictionaryInfo info, int baseId) throws IOException {
             this.baseId = baseId;
             this.builder = new TrieDictionaryBuilder(new StringBytesConverter());
         }
-
+        
         @Override
         public boolean addValue(String value) {
             if (value == null)
                 return false;
-
+            
             builder.addValue(value);
             return true;
         }
-
+        
         @Override
         public Dictionary<String> build() throws IOException {
             return builder.build(baseId);
         }
     }
-
+    
     private static class StringTrieDictForestBuilder implements IDictionaryBuilder {
         TrieDictionaryForestBuilder builder;
 
@@ -225,28 +223,28 @@ public class DictionaryGenerator {
     private static class NumberTrieDictBuilder implements IDictionaryBuilder {
         int baseId;
         NumberDictionaryBuilder builder;
-
+        
         @Override
         public void init(DictionaryInfo info, int baseId) throws IOException {
             this.baseId = baseId;
             this.builder = new NumberDictionaryBuilder();
         }
-
+        
         @Override
         public boolean addValue(String value) {
             if (StringUtils.isBlank(value)) // empty string is treated as null
                 return false;
-
+            
             builder.addValue(value);
             return true;
         }
-
+        
         @Override
         public Dictionary<String> build() throws IOException {
             return builder.build(baseId);
         }
     }
-
+    
     private static class NumberTrieDictForestBuilder implements IDictionaryBuilder {
         NumberDictionaryForestBuilder builder;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryInfo.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryInfo.java b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryInfo.java
index 6d27d98..ae5c0f1 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryInfo.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryInfo.java
@@ -55,9 +55,8 @@ public class DictionaryInfo extends RootPersistentEntity {
     public DictionaryInfo(ColumnDesc col, String dataType, TableSignature input) {
         this(col.getTable().getIdentity(), col.getName(), col.getZeroBasedIndex(), dataType, input);
     }
-
-    public DictionaryInfo(String sourceTable, String sourceColumn, int sourceColumnIndex, String dataType,
-            TableSignature input) {
+    
+    public DictionaryInfo(String sourceTable, String sourceColumn, int sourceColumnIndex, String dataType, TableSignature input) {
 
         this.updateRandomUuid();
 
@@ -94,10 +93,7 @@ public class DictionaryInfo extends RootPersistentEntity {
     // to decide if two dictionaries are built on the same table/column,
     // regardless of their signature
     public boolean isDictOnSameColumn(DictionaryInfo other) {
-        return this.sourceTable.equalsIgnoreCase(other.sourceTable)
-                && this.sourceColumn.equalsIgnoreCase(other.sourceColumn)
-                && this.sourceColumnIndex == other.sourceColumnIndex && this.dataType.equalsIgnoreCase(other.dataType)
-                && this.dictionaryClass.equalsIgnoreCase(other.dictionaryClass);
+        return this.sourceTable.equalsIgnoreCase(other.sourceTable) && this.sourceColumn.equalsIgnoreCase(other.sourceColumn) && this.sourceColumnIndex == other.sourceColumnIndex && this.dataType.equalsIgnoreCase(other.dataType) && this.dictionaryClass.equalsIgnoreCase(other.dictionaryClass);
     }
 
     public String getSourceTable() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryManager.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryManager.java b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryManager.java
index aa84f3a..1628f4e 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryManager.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryManager.java
@@ -95,8 +95,7 @@ public class DictionaryManager {
                 .removalListener(new RemovalListener<String, DictionaryInfo>() {
                     @Override
                     public void onRemoval(RemovalNotification<String, DictionaryInfo> notification) {
-                        DictionaryManager.logger.info("Dict with resource path " + notification.getKey()
-                                + " is removed due to " + notification.getCause());
+                        DictionaryManager.logger.info("Dict with resource path " + notification.getKey() + " is removed due to " + notification.getCause());
                     }
                 })//
                 .maximumSize(config.getCachedDictMaxEntrySize())//
@@ -156,8 +155,7 @@ public class DictionaryManager {
                 largestDictInfo = getDictionaryInfo(largestDictInfo.getResourcePath());
                 Dictionary<String> largestDictObject = largestDictInfo.getDictionaryObject();
                 if (largestDictObject.contains(newDict)) {
-                    logger.info("dictionary content " + newDict + ", is contained by  dictionary at "
-                            + largestDictInfo.getResourcePath());
+                    logger.info("dictionary content " + newDict + ", is contained by  dictionary at " + largestDictInfo.getResourcePath());
                     return largestDictInfo;
                 } else if (newDict.contains(largestDictObject)) {
                     logger.info("dictionary content " + newDict + " is by far the largest, save it");
@@ -233,8 +231,7 @@ public class DictionaryManager {
             } else {
                 if (!firstDictInfo.isDictOnSameColumn(info)) {
                     // don't throw exception, just output warning as legacy cube segment may build dict on PK
-                    logger.warn("Merging dictionaries are not structurally equal : " + firstDictInfo.getResourcePath()
-                            + " and " + info.getResourcePath());
+                    logger.warn("Merging dictionaries are not structurally equal : " + firstDictInfo.getResourcePath() + " and " + info.getResourcePath());
                 }
             }
             totalSize += info.getInput().getSize();
@@ -270,19 +267,16 @@ public class DictionaryManager {
             logger.info("Use one of the merging dictionaries directly");
             return dicts.get(0);
         } else {
-            Dictionary<String> newDict = DictionaryGenerator
-                    .mergeDictionaries(DataType.getType(newDictInfo.getDataType()), dicts);
+            Dictionary<String> newDict = DictionaryGenerator.mergeDictionaries(DataType.getType(newDictInfo.getDataType()), dicts);
             return trySaveNewDict(newDict, newDictInfo);
         }
     }
 
-    public DictionaryInfo buildDictionary(DataModelDesc model, TblColRef col, IReadableTable inpTable)
-            throws IOException {
+    public DictionaryInfo buildDictionary(DataModelDesc model, TblColRef col, IReadableTable inpTable) throws IOException {
         return buildDictionary(model, col, inpTable, null);
     }
 
-    public DictionaryInfo buildDictionary(DataModelDesc model, TblColRef col, IReadableTable inpTable,
-            String builderClass) throws IOException {
+    public DictionaryInfo buildDictionary(DataModelDesc model, TblColRef col, IReadableTable inpTable, String builderClass) throws IOException {
         if (inpTable.exists() == false)
             return null;
 
@@ -291,8 +285,7 @@ public class DictionaryManager {
         DictionaryInfo dictInfo = createDictionaryInfo(model, col, inpTable);
         String dupInfo = checkDupByInfo(dictInfo);
         if (dupInfo != null) {
-            logger.info(
-                    "Identical dictionary input " + dictInfo.getInput() + ", reuse existing dictionary at " + dupInfo);
+            logger.info("Identical dictionary input " + dictInfo.getInput() + ", reuse existing dictionary at " + dupInfo);
             return getDictionaryInfo(dupInfo);
         }
 
@@ -303,16 +296,13 @@ public class DictionaryManager {
         return trySaveNewDict(dictionary, dictInfo);
     }
 
-    private Dictionary<String> buildDictFromReadableTable(IReadableTable inpTable, DictionaryInfo dictInfo,
-            String builderClass, TblColRef col) throws IOException {
+    private Dictionary<String> buildDictFromReadableTable(IReadableTable inpTable, DictionaryInfo dictInfo, String builderClass, TblColRef col) throws IOException {
         Dictionary<String> dictionary;
         IDictionaryValueEnumerator columnValueEnumerator = null;
         try {
-            columnValueEnumerator = new TableColumnValueEnumerator(inpTable.getReader(),
-                    dictInfo.getSourceColumnIndex());
+            columnValueEnumerator = new TableColumnValueEnumerator(inpTable.getReader(), dictInfo.getSourceColumnIndex());
             if (builderClass == null) {
-                dictionary = DictionaryGenerator.buildDictionary(DataType.getType(dictInfo.getDataType()),
-                        columnValueEnumerator);
+                dictionary = DictionaryGenerator.buildDictionary(DataType.getType(dictInfo.getDataType()), columnValueEnumerator);
             } else {
                 IDictionaryBuilder builder = (IDictionaryBuilder) ClassUtil.newInstance(builderClass);
                 dictionary = DictionaryGenerator.buildDictionary(builder, dictInfo, columnValueEnumerator);
@@ -326,21 +316,18 @@ public class DictionaryManager {
         return dictionary;
     }
 
-    public DictionaryInfo saveDictionary(DataModelDesc model, TblColRef col, IReadableTable inpTable,
-            Dictionary<String> dictionary) throws IOException {
+    public DictionaryInfo saveDictionary(DataModelDesc model, TblColRef col, IReadableTable inpTable, Dictionary<String> dictionary) throws IOException {
         DictionaryInfo dictInfo = createDictionaryInfo(model, col, inpTable);
         String dupInfo = checkDupByInfo(dictInfo);
         if (dupInfo != null) {
-            logger.info(
-                    "Identical dictionary input " + dictInfo.getInput() + ", reuse existing dictionary at " + dupInfo);
+            logger.info("Identical dictionary input " + dictInfo.getInput() + ", reuse existing dictionary at " + dupInfo);
             return getDictionaryInfo(dupInfo);
         }
 
         return trySaveNewDict(dictionary, dictInfo);
     }
 
-    private DictionaryInfo createDictionaryInfo(DataModelDesc model, TblColRef col, IReadableTable inpTable)
-            throws IOException {
+    private DictionaryInfo createDictionaryInfo(DataModelDesc model, TblColRef col, IReadableTable inpTable) throws IOException {
         TblColRef srcCol = decideSourceData(model, col);
         TableSignature inputSig = inpTable.getSignature();
         if (inputSig == null) // table does not exists
@@ -387,8 +374,7 @@ public class DictionaryManager {
 
     private String checkDupByInfo(DictionaryInfo dictInfo) throws IOException {
         final ResourceStore store = MetadataManager.getInstance(config).getStore();
-        final List<DictionaryInfo> allResources = store.getAllResources(dictInfo.getResourceDir(), DictionaryInfo.class,
-                DictionaryInfoSerializer.INFO_SERIALIZER);
+        final List<DictionaryInfo> allResources = store.getAllResources(dictInfo.getResourceDir(), DictionaryInfo.class, DictionaryInfoSerializer.INFO_SERIALIZER);
 
         TableSignature input = dictInfo.getInput();
 
@@ -402,8 +388,7 @@ public class DictionaryManager {
 
     private DictionaryInfo findLargestDictInfo(DictionaryInfo dictInfo) throws IOException {
         final ResourceStore store = MetadataManager.getInstance(config).getStore();
-        final List<DictionaryInfo> allResources = store.getAllResources(dictInfo.getResourceDir(), DictionaryInfo.class,
-                DictionaryInfoSerializer.INFO_SERIALIZER);
+        final List<DictionaryInfo> allResources = store.getAllResources(dictInfo.getResourceDir(), DictionaryInfo.class, DictionaryInfoSerializer.INFO_SERIALIZER);
 
         DictionaryInfo largestDict = null;
         for (DictionaryInfo dictionaryInfo : allResources) {
@@ -459,10 +444,8 @@ public class DictionaryManager {
     DictionaryInfo load(String resourcePath, boolean loadDictObj) throws IOException {
         ResourceStore store = MetadataManager.getInstance(config).getStore();
 
-        logger.info("DictionaryManager(" + System.identityHashCode(this) + ") loading DictionaryInfo(loadDictObj:"
-                + loadDictObj + ") at " + resourcePath);
-        DictionaryInfo info = store.getResource(resourcePath, DictionaryInfo.class,
-                loadDictObj ? DictionaryInfoSerializer.FULL_SERIALIZER : DictionaryInfoSerializer.INFO_SERIALIZER);
+        logger.info("DictionaryManager(" + System.identityHashCode(this) + ") loading DictionaryInfo(loadDictObj:" + loadDictObj + ") at " + resourcePath);
+        DictionaryInfo info = store.getResource(resourcePath, DictionaryInfo.class, loadDictObj ? DictionaryInfoSerializer.FULL_SERIALIZER : DictionaryInfoSerializer.INFO_SERIALIZER);
         return info;
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java
index 002a89a..a593371 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java
@@ -63,8 +63,7 @@ public class GlobalDictionaryBuilder implements IDictionaryBuilder {
             if (lock.lock(getLockPath(sourceColumn))) {
                 logger.info("processed {} values for {}", counter, sourceColumn);
             } else {
-                throw new RuntimeException(
-                        "Failed to create global dictionary on " + sourceColumn + " This client doesn't keep the lock");
+                throw new RuntimeException("Failed to create global dictionary on " + sourceColumn + " This client doesn't keep the lock");
             }
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/IDictionaryBuilder.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/IDictionaryBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/IDictionaryBuilder.java
index 72491a3..0934a7d 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/IDictionaryBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/IDictionaryBuilder.java
@@ -29,10 +29,10 @@ public interface IDictionaryBuilder {
 
     /** Sets the dictionary info for the dictionary being built. Mainly for GlobalDictionaryBuilder. */
     void init(DictionaryInfo info, int baseId) throws IOException;
-
+    
     /** Add a new value into dictionary, returns it is accepted (not null) or not. */
     boolean addValue(String value);
-
+    
     /** Build the dictionary */
     Dictionary<String> build() throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/IDictionaryValueEnumerator.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/IDictionaryValueEnumerator.java b/core-dictionary/src/main/java/org/apache/kylin/dict/IDictionaryValueEnumerator.java
index cbd9108..f193d4f 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/IDictionaryValueEnumerator.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/IDictionaryValueEnumerator.java
@@ -24,7 +24,7 @@ import java.io.IOException;
  * Created by dongli on 10/28/15.
  */
 public interface IDictionaryValueEnumerator {
-
+    
     String current() throws IOException;
 
     boolean moveNext() throws IOException;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/IterableDictionaryValueEnumerator.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/IterableDictionaryValueEnumerator.java b/core-dictionary/src/main/java/org/apache/kylin/dict/IterableDictionaryValueEnumerator.java
index 027b3dc..5c80a26 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/IterableDictionaryValueEnumerator.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/IterableDictionaryValueEnumerator.java
@@ -31,7 +31,7 @@ public class IterableDictionaryValueEnumerator implements IDictionaryValueEnumer
     public IterableDictionaryValueEnumerator(String... strs) {
         this(Arrays.asList(strs));
     }
-
+    
     public IterableDictionaryValueEnumerator(Iterable<String> list) {
         iterator = list.iterator();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/MultipleDictionaryValueEnumerator.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/MultipleDictionaryValueEnumerator.java b/core-dictionary/src/main/java/org/apache/kylin/dict/MultipleDictionaryValueEnumerator.java
index 33ed607..c1bc5d5 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/MultipleDictionaryValueEnumerator.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/MultipleDictionaryValueEnumerator.java
@@ -57,7 +57,7 @@ public class MultipleDictionaryValueEnumerator implements IDictionaryValueEnumer
         while (curDictIndex < dictionaryList.size()) {
             if (curKey <= curDict.getMaxId()) {
                 curValue = curDict.getValueFromId(curKey);
-                curKey++;
+                curKey ++;
 
                 return true;
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/Number2BytesConverter.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/Number2BytesConverter.java b/core-dictionary/src/main/java/org/apache/kylin/dict/Number2BytesConverter.java
index 4df0c84..397ca9f 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/Number2BytesConverter.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/Number2BytesConverter.java
@@ -46,7 +46,7 @@ public class Number2BytesConverter implements BytesConverter<String>, Serializab
         return codec;
     }
 
-    public Number2BytesConverter() {
+    public Number2BytesConverter(){
         this.maxDigitsBeforeDecimalPoint = MAX_DIGITS_BEFORE_DECIMAL_POINT;
     }
 
@@ -74,11 +74,11 @@ public class Number2BytesConverter implements BytesConverter<String>, Serializab
         if (badBegin) {
             v = new BigDecimal(v).toPlainString();
         }
-
+        
         while (v.contains(".") && (v.endsWith("0") || v.endsWith("."))) {
             v = v.substring(0, v.length() - 1);
         }
-
+        
         return v;
     }
 
@@ -117,9 +117,9 @@ public class Number2BytesConverter implements BytesConverter<String>, Serializab
                 return;
             }
 
+
             if (len > buf.length) {
-                throw new IllegalArgumentException("Too many digits for NumberDictionary: "
-                        + Bytes.toString(value, offset, len) + ". Internal buffer is only " + buf.length + " bytes");
+                throw new IllegalArgumentException("Too many digits for NumberDictionary: " + Bytes.toString(value, offset, len) + ". Internal buffer is only " + buf.length + " bytes");
             }
 
             boolean negative = value[offset] == '-';
@@ -149,9 +149,7 @@ public class Number2BytesConverter implements BytesConverter<String>, Serializab
             // prepend '0'
             int nZeroPadding = maxDigitsBeforeDecimalPoint - (decimalPoint - start);
             if (nZeroPadding < 0 || nZeroPadding + 1 > start)
-                throw new IllegalArgumentException(
-                        "Too many digits for NumberDictionary: " + Bytes.toString(value, offset, len) + ". Expect "
-                                + maxDigitsBeforeDecimalPoint + " digits before decimal point at max.");
+                throw new IllegalArgumentException("Too many digits for NumberDictionary: " + Bytes.toString(value, offset, len) + ". Expect " + maxDigitsBeforeDecimalPoint + " digits before decimal point at max.");
             for (int i = 0; i < nZeroPadding; i++) {
                 buf[--start] = '0';
             }
@@ -243,4 +241,4 @@ public class Number2BytesConverter implements BytesConverter<String>, Serializab
             return out - offset;
         }
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
index 89b4701..1377e8e 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.dict;
 
+
 import org.apache.kylin.common.util.ClassUtil;
 
 /**
@@ -28,6 +29,7 @@ import org.apache.kylin.common.util.ClassUtil;
 @Deprecated
 public class NumberDictionary<T> extends TrieDictionary<T> {
 
+
     // ============================================================================
 
     public NumberDictionary() { // default constructor for Writable interface
@@ -48,8 +50,8 @@ public class NumberDictionary<T> extends TrieDictionary<T> {
     protected void setConverterByName(String converterName) throws Exception {
         converterName = "org.apache.kylin.dict.Number2BytesConverter";
         this.bytesConvert = ClassUtil.forName(converterName, BytesConverter.class).newInstance();
-        ((Number2BytesConverter) this.bytesConvert)
-                .setMaxDigitsBeforeDecimalPoint(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT_LEGACY);
+        ((Number2BytesConverter)this.bytesConvert).setMaxDigitsBeforeDecimalPoint(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT_LEGACY);
     }
 
-}
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryBuilder.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryBuilder.java
index f7edf88..26e4f89 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryBuilder.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.dict;
 
+
 /**
  * Use <code>NumberDictionaryForestBuilder</code> instead.
  * 
@@ -26,10 +27,12 @@ package org.apache.kylin.dict;
 @Deprecated
 public class NumberDictionaryBuilder extends TrieDictionaryBuilder<String> {
 
+
     public NumberDictionaryBuilder() {
         super(new Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT));
     }
 
+
     public NumberDictionary build(int baseId) {
         byte[] trieBytes = buildTrieBytes(baseId);
         NumberDictionary2 r = new NumberDictionary2(trieBytes);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
index afd013b..380cd1d 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
@@ -41,12 +41,10 @@ public class NumberDictionaryForestBuilder extends TrieDictionaryForestBuilder<S
     }
 
     public NumberDictionaryForestBuilder(int baseId) {
-        super(new org.apache.kylin.dict.Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT),
-                0);
+        super(new org.apache.kylin.dict.Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT), 0);
     }
 
     public NumberDictionaryForestBuilder(int baseId, int maxTrieSizeMB) {
-        super(new org.apache.kylin.dict.Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT), 0,
-                maxTrieSizeMB);
+        super(new org.apache.kylin.dict.Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT), 0, maxTrieSizeMB);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/TableColumnValueEnumerator.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/TableColumnValueEnumerator.java b/core-dictionary/src/main/java/org/apache/kylin/dict/TableColumnValueEnumerator.java
index 960681a..7caf686 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/TableColumnValueEnumerator.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/TableColumnValueEnumerator.java
@@ -47,8 +47,7 @@ public class TableColumnValueEnumerator implements IDictionaryValueEnumerator {
             } else {
                 // normal case
                 if (split.length <= colIndex) {
-                    throw new ArrayIndexOutOfBoundsException(
-                            "Column no. " + colIndex + " not found, line split is " + Arrays.asList(split));
+                    throw new ArrayIndexOutOfBoundsException("Column no. " + colIndex + " not found, line split is " + Arrays.asList(split));
                 }
                 colStrValue = split[colIndex];
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/TableColumnValueSortedEnumerator.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/TableColumnValueSortedEnumerator.java b/core-dictionary/src/main/java/org/apache/kylin/dict/TableColumnValueSortedEnumerator.java
index ff4cc64..8f9f74f 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/TableColumnValueSortedEnumerator.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/TableColumnValueSortedEnumerator.java
@@ -41,8 +41,7 @@ public class TableColumnValueSortedEnumerator implements IDictionaryValueEnumera
 
     private PriorityQueue<ReaderBuffer> pq;
 
-    public TableColumnValueSortedEnumerator(Collection<IReadableTable.TableReader> readers, int colIndex,
-            final Comparator<String> comparator) {
+    public TableColumnValueSortedEnumerator(Collection<IReadableTable.TableReader> readers, int colIndex, final Comparator<String> comparator) {
         this.readers = readers;
         this.colIndex = colIndex;
         this.comparator = comparator;
@@ -90,6 +89,7 @@ public class TableColumnValueSortedEnumerator implements IDictionaryValueEnumera
         return false;
     }
 
+
     @Override
     public void close() throws IOException {
         for (IReadableTable.TableReader reader : readers) {
@@ -136,8 +136,7 @@ public class TableColumnValueSortedEnumerator implements IDictionaryValueEnumera
                 } else {
                     // normal case
                     if (split.length <= colIndex) {
-                        throw new ArrayIndexOutOfBoundsException(
-                                "Column no. " + colIndex + " not found, line split is " + Arrays.asList(split));
+                        throw new ArrayIndexOutOfBoundsException("Column no. " + colIndex + " not found, line split is " + Arrays.asList(split));
                     }
                     this.cache = split[colIndex];
                 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionary.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionary.java b/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionary.java
index 160bfc5..8849015 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionary.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionary.java
@@ -29,6 +29,7 @@ import java.io.ObjectOutputStream;
 import java.io.PrintStream;
 import java.util.Arrays;
 
+
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.common.util.ClassUtil;
@@ -51,11 +52,11 @@ import com.google.common.base.Preconditions;
  *
  * @author yangli9
  */
-@SuppressWarnings({ "rawtypes", "unchecked" })
+@SuppressWarnings({"rawtypes", "unchecked"})
 public class TrieDictionary<T> extends CacheDictionary<T> {
     private static final long serialVersionUID = 1L;
 
-    public static final byte[] MAGIC = new byte[] { 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74 }; // "TrieDict"
+    public static final byte[] MAGIC = new byte[]{0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74}; // "TrieDict"
     public static final int MAGIC_SIZE_I = MAGIC.length;
 
     public static final int BIT_IS_LAST_CHILD = 0x80;
@@ -78,6 +79,7 @@ public class TrieDictionary<T> extends CacheDictionary<T> {
     transient private long childOffsetMask;
     transient private int firstByteOffset;
 
+
     public TrieDictionary() { // default constructor for Writable interface
     }
 
@@ -141,6 +143,7 @@ public class TrieDictionary<T> extends CacheDictionary<T> {
         return maxValueLength;
     }
 
+
     @Override
     protected int getIdFromValueBytesWithoutCache(byte[] value, int offset, int len, int roundingFlag) {
         int seq = lookupSeqNoFromValue(headSize, value, offset, offset + len, roundingFlag);
@@ -230,6 +233,7 @@ public class TrieDictionary<T> extends CacheDictionary<T> {
             return k;
     }
 
+
     @Override
     protected byte[] getValueBytesFromIdWithoutCache(int id) {
         byte[] buf = new byte[maxValueLength];
@@ -409,8 +413,7 @@ public class TrieDictionary<T> extends CacheDictionary<T> {
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         new ObjectOutputStream(baos).writeObject(dict);
 
-        TrieDictionary<String> dict2 = (TrieDictionary<String>) new ObjectInputStream(
-                new ByteArrayInputStream(baos.toByteArray())).readObject();
+        TrieDictionary<String> dict2 = (TrieDictionary<String>) new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray())).readObject();
         Preconditions.checkArgument(dict.contains(dict2));
         Preconditions.checkArgument(dict2.contains(dict));
         Preconditions.checkArgument(dict.equals(dict2));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryBuilder.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryBuilder.java
index 12ef117..1750ac1 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryBuilder.java
@@ -216,10 +216,7 @@ public class TrieDictionaryBuilder<T> {
 
             // flatten trie footprint calculation, case of One-Byte-Per-Node
             out.println("----------------------------------------------------------------------------");
-            out.println("OBPN node size:  "
-                    + (obpn_sizeValue + obpn_sizeNoValuesBeneath + obpn_sizeChildCount + obpn_sizeChildOffset) + " = "
-                    + obpn_sizeValue + " + " + obpn_sizeNoValuesBeneath + " + " + obpn_sizeChildCount + " + "
-                    + obpn_sizeChildOffset);
+            out.println("OBPN node size:  " + (obpn_sizeValue + obpn_sizeNoValuesBeneath + obpn_sizeChildCount + obpn_sizeChildOffset) + " = " + obpn_sizeValue + " + " + obpn_sizeNoValuesBeneath + " + " + obpn_sizeChildCount + " + " + obpn_sizeChildOffset);
             out.println("OBPN no. nodes:  " + obpn_nNodes);
             out.println("OBPN trie depth: " + maxValueLength);
             out.println("OBPN footprint:  " + obpn_footprint + " in bytes");
@@ -231,9 +228,7 @@ public class TrieDictionaryBuilder<T> {
             out.println("MBPN total fan out:     " + mbpn_nTotalFanOut);
             out.println("MBPN average fan out:   " + (double) mbpn_nTotalFanOut / mbpn_nChildLookups);
             out.println("MBPN values size total: " + mbpn_sizeValueTotal);
-            out.println("MBPN node size:         "
-                    + (mbpn_sizeNoValueBytes + mbpn_sizeNoValueBeneath + mbpn_sizeChildOffset) + " = "
-                    + mbpn_sizeNoValueBytes + " + " + mbpn_sizeNoValueBeneath + " + " + mbpn_sizeChildOffset);
+            out.println("MBPN node size:         " + (mbpn_sizeNoValueBytes + mbpn_sizeNoValueBeneath + mbpn_sizeChildOffset) + " = " + mbpn_sizeNoValueBytes + " + " + mbpn_sizeNoValueBeneath + " + " + mbpn_sizeChildOffset);
             out.println("MBPN no. nodes:         " + mbpn_nNodes);
             out.println("MBPN trie depth:        " + mbpn_trieDepth);
             out.println("MBPN footprint:         " + mbpn_footprint + " in bytes");
@@ -297,11 +292,9 @@ public class TrieDictionaryBuilder<T> {
         s.obpn_sizeChildCount = 1;
         s.obpn_sizeChildOffset = 5; // MSB used as isEndOfValue flag
         s.obpn_nNodes = s.nValueBytesCompressed; // no. nodes is the total number of compressed bytes in OBPN
-        s.obpn_footprint = s.obpn_nNodes * (long) (s.obpn_sizeValue + s.obpn_sizeNoValuesBeneath + s.obpn_sizeChildCount
-                + s.obpn_sizeChildOffset);
+        s.obpn_footprint = s.obpn_nNodes * (long) (s.obpn_sizeValue + s.obpn_sizeNoValuesBeneath + s.obpn_sizeChildCount + s.obpn_sizeChildOffset);
         while (true) { // minimize the offset size to match the footprint
-            long t = s.obpn_nNodes * (long) (s.obpn_sizeValue + s.obpn_sizeNoValuesBeneath + s.obpn_sizeChildCount
-                    + s.obpn_sizeChildOffset - 1);
+            long t = s.obpn_nNodes * (long) (s.obpn_sizeValue + s.obpn_sizeNoValuesBeneath + s.obpn_sizeChildCount + s.obpn_sizeChildOffset - 1);
             if (BytesUtil.sizeForValue(t * 2) <= s.obpn_sizeChildOffset - 1) { // *2 because MSB of offset is used for isEndOfValue flag
                 s.obpn_sizeChildOffset--;
                 s.obpn_footprint = t;
@@ -314,11 +307,9 @@ public class TrieDictionaryBuilder<T> {
         s.mbpn_sizeNoValueBytes = 1;
         s.mbpn_sizeNoValueBeneath = BytesUtil.sizeForValue(s.nValues);
         s.mbpn_sizeChildOffset = 5;
-        s.mbpn_footprint = s.mbpn_sizeValueTotal
-                + s.mbpn_nNodes * (long) (s.mbpn_sizeNoValueBytes + s.mbpn_sizeNoValueBeneath + s.mbpn_sizeChildOffset);
+        s.mbpn_footprint = s.mbpn_sizeValueTotal + s.mbpn_nNodes * (long) (s.mbpn_sizeNoValueBytes + s.mbpn_sizeNoValueBeneath + s.mbpn_sizeChildOffset);
         while (true) { // minimize the offset size to match the footprint
-            long t = s.mbpn_sizeValueTotal + s.mbpn_nNodes
-                    * (long) (s.mbpn_sizeNoValueBytes + s.mbpn_sizeNoValueBeneath + s.mbpn_sizeChildOffset - 1);
+            long t = s.mbpn_sizeValueTotal + s.mbpn_nNodes * (long) (s.mbpn_sizeNoValueBytes + s.mbpn_sizeNoValueBeneath + s.mbpn_sizeChildOffset - 1);
             if (BytesUtil.sizeForValue(t * 4) <= s.mbpn_sizeChildOffset - 1) { // *4 because 2 MSB of offset is used for isEndOfValue & isEndChild flag
                 s.mbpn_sizeChildOffset--;
                 s.mbpn_footprint = t;
@@ -493,14 +484,12 @@ public class TrieDictionaryBuilder<T> {
     }
 
     private void build_overwriteChildOffset(int parentOffset, int childOffset, int sizeChildOffset, byte[] trieBytes) {
-        int flags = (int) trieBytes[parentOffset]
-                & (TrieDictionary.BIT_IS_LAST_CHILD | TrieDictionary.BIT_IS_END_OF_VALUE);
+        int flags = (int) trieBytes[parentOffset] & (TrieDictionary.BIT_IS_LAST_CHILD | TrieDictionary.BIT_IS_END_OF_VALUE);
         BytesUtil.writeUnsigned(childOffset, trieBytes, parentOffset, sizeChildOffset);
         trieBytes[parentOffset] |= flags;
     }
 
-    private int build_writeNode(Node n, int offset, boolean isLastChild, int sizeNoValuesBeneath, int sizeChildOffset,
-            byte[] trieBytes) {
+    private int build_writeNode(Node n, int offset, boolean isLastChild, int sizeNoValuesBeneath, int sizeChildOffset, byte[] trieBytes) {
         int o = offset;
         if (o > _2GB)
             throw new IllegalStateException();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryForest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryForest.java b/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryForest.java
index 6ef767b..09d5bc2 100755
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryForest.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryForest.java
@@ -63,12 +63,11 @@ public class TrieDictionaryForest<T> extends CacheDictionary<T> {
     }
 
     public TrieDictionaryForest(ArrayList<TrieDictionary<T>> trees, ArrayList<ByteArray> valueDivide, //
-            ArrayList<Integer> accuOffset, BytesConverter<T> bytesConverter, int baseId) {
+                                ArrayList<Integer> accuOffset, BytesConverter<T> bytesConverter, int baseId) {
         init(trees, valueDivide, accuOffset, bytesConverter, baseId);
     }
 
-    private void init(ArrayList<TrieDictionary<T>> trees, ArrayList<ByteArray> valueDivide,
-            ArrayList<Integer> accuOffset, BytesConverter<T> bytesConverter, int baseId) {
+    private void init(ArrayList<TrieDictionary<T>> trees, ArrayList<ByteArray> valueDivide, ArrayList<Integer> accuOffset, BytesConverter<T> bytesConverter, int baseId) {
         this.trees = trees;
         this.valueDivide = valueDivide;
         this.accuOffset = accuOffset;
@@ -99,8 +98,7 @@ public class TrieDictionaryForest<T> extends CacheDictionary<T> {
     }
 
     @Override
-    protected int getIdFromValueBytesWithoutCache(byte[] value, int offset, int len, int roundingFlag)
-            throws IllegalArgumentException {
+    protected int getIdFromValueBytesWithoutCache(byte[] value, int offset, int len, int roundingFlag) throws IllegalArgumentException {
         int index;
         if (trees.size() == 1) {
             index = 0;
@@ -111,8 +109,7 @@ public class TrieDictionaryForest<T> extends CacheDictionary<T> {
                 if (roundingFlag > 0) {
                     return getMinId(); //searching value smaller than the smallest value in dict
                 } else {
-                    throw new IllegalArgumentException("Value '" + Bytes.toString(value, offset, len) + "' ("
-                            + Bytes.toStringBinary(value, offset, len) + ") not exists!");
+                    throw new IllegalArgumentException("Value '" + Bytes.toString(value, offset, len) + "' (" + Bytes.toStringBinary(value, offset, len) + ") not exists!");
                 }
             }
 
@@ -121,15 +118,13 @@ public class TrieDictionaryForest<T> extends CacheDictionary<T> {
                 if (search.compareTo(maxValueOfTree) > 0)
                     index++;
                 if (index >= trees.size())
-                    throw new IllegalArgumentException("Value '" + Bytes.toString(value, offset, len) + "' ("
-                            + Bytes.toStringBinary(value, offset, len) + ") not exists!");
+                    throw new IllegalArgumentException("Value '" + Bytes.toString(value, offset, len) + "' (" + Bytes.toStringBinary(value, offset, len) + ") not exists!");
             }
         }
         TrieDictionary<T> tree = trees.get(index);
         int id = tree.getIdFromValueBytesWithoutCache(value, offset, len, roundingFlag);
         if (id == -1)
-            throw new IllegalArgumentException("Value '" + Bytes.toString(value, offset, len) + "' ("
-                    + Bytes.toStringBinary(value, offset, len) + ") not exists!");
+            throw new IllegalArgumentException("Value '" + Bytes.toString(value, offset, len) + "' (" + Bytes.toStringBinary(value, offset, len) + ") not exists!");
         id = id + accuOffset.get(index);
         id += baseId;
         return id;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryForestBuilder.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryForestBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryForestBuilder.java
index 482c12a..0e5e63e 100755
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryForestBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/TrieDictionaryForestBuilder.java
@@ -17,13 +17,13 @@
 */
 package org.apache.kylin.dict;
 
-import java.util.ArrayList;
-
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ByteArray;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.ArrayList;
+
 /**
  * Build a trie dictionary forest if the input values is ordered, or the forest falls back to a single trie.
  */
@@ -86,12 +86,10 @@ public class TrieDictionaryForestBuilder<T> {
                 return; //duplicate value
             }
             if (comp > 0) {
-                logger.info("values not in ascending order, previous '{}', current '{}'", previousValue,
-                        valueByteArray);
+                logger.info("values not in ascending order, previous '{}', current '{}'", previousValue, valueByteArray);
                 isOrdered = false;
                 if (trees.size() > 0) {
-                    throw new IllegalStateException(
-                            "Invalid input data. Unordered data cannot be split into multi trees");
+                    throw new IllegalStateException("Invalid input data. Unordered data cannot be split into multi trees");
                 }
             }
         }
@@ -112,8 +110,7 @@ public class TrieDictionaryForestBuilder<T> {
             addTree(tree);
             reset();
         }
-        TrieDictionaryForest<T> forest = new TrieDictionaryForest<T>(this.trees, this.valueDivide, this.accuOffset,
-                this.bytesConverter, baseId);
+        TrieDictionaryForest<T> forest = new TrieDictionaryForest<T>(this.trees, this.valueDivide, this.accuOffset, this.bytesConverter, baseId);
         // if input values are not in ascending order and tree num>1,TrieDictionaryForest can not work correctly.
         if (forest.getTrees().size() > 1 && !isOrdered) {
             throw new IllegalStateException("Invalid input data. Unordered data can not be split into multi trees");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java
index 31e938a..ee3a2c2 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java
@@ -18,6 +18,11 @@
 
 package org.apache.kylin.dict.global;
 
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.common.util.BytesUtil;
+import org.apache.kylin.dict.AppendTrieDictionary;
+import org.apache.kylin.dict.TrieDictionary;
+
 import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
@@ -27,11 +32,6 @@ import java.util.ArrayList;
 import java.util.IdentityHashMap;
 import java.util.LinkedList;
 
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.common.util.BytesUtil;
-import org.apache.kylin.dict.AppendTrieDictionary;
-import org.apache.kylin.dict.TrieDictionary;
-
 public class AppendDictNode {
     public byte[] part;
     public int id = -1;
@@ -195,14 +195,12 @@ public class AppendDictNode {
     }
 
     private void build_overwriteChildOffset(int parentOffset, int childOffset, int sizeChildOffset, byte[] trieBytes) {
-        int flags = (int) trieBytes[parentOffset]
-                & (TrieDictionary.BIT_IS_LAST_CHILD | TrieDictionary.BIT_IS_END_OF_VALUE);
+        int flags = (int) trieBytes[parentOffset] & (TrieDictionary.BIT_IS_LAST_CHILD | TrieDictionary.BIT_IS_END_OF_VALUE);
         BytesUtil.writeUnsigned(childOffset, trieBytes, parentOffset, sizeChildOffset);
         trieBytes[parentOffset] |= flags;
     }
 
-    private int build_writeNode(AppendDictNode n, int offset, boolean isLastChild, int sizeChildOffset, int sizeId,
-            byte[] trieBytes) {
+    private int build_writeNode(AppendDictNode n, int offset, boolean isLastChild, int sizeChildOffset, int sizeId, byte[] trieBytes) {
         int o = offset;
 
         // childOffset
@@ -214,8 +212,7 @@ public class AppendDictNode {
 
         // nValueBytes
         if (n.part.length > 255)
-            throw new RuntimeException(
-                    "Value length is " + n.part.length + " and larger than 255: " + Bytes.toStringBinary(n.part));
+            throw new RuntimeException("Value length is " + n.part.length + " and larger than 255: " + Bytes.toStringBinary(n.part));
         BytesUtil.writeUnsigned(n.part.length, trieBytes, o, 1);
         o++;
 
@@ -241,8 +238,7 @@ public class AppendDictNode {
 
     @Override
     public String toString() {
-        return String.format("DictNode[root=%s, nodes=%d, firstValue=%s]", Bytes.toStringBinary(part), childrenCount,
-                Bytes.toStringBinary(firstValue()));
+        return String.format("DictNode[root=%s, nodes=%d, firstValue=%s]", Bytes.toStringBinary(part), childrenCount, Bytes.toStringBinary(firstValue()));
     }
 
     static class Stats {
@@ -336,8 +332,7 @@ public class AppendDictNode {
             s.mbpn_sizeValueTotal = s.nValueBytesCompressed + s.nValues * s.mbpn_sizeId;
             s.mbpn_sizeNoValueBytes = 1;
             s.mbpn_sizeChildOffset = 5;
-            s.mbpn_footprint = s.mbpn_sizeValueTotal
-                    + s.mbpn_nNodes * (s.mbpn_sizeNoValueBytes + s.mbpn_sizeChildOffset);
+            s.mbpn_footprint = s.mbpn_sizeValueTotal + s.mbpn_nNodes * (s.mbpn_sizeNoValueBytes + s.mbpn_sizeChildOffset);
             while (true) { // minimize the offset size to match the footprint
                 int t = s.mbpn_sizeValueTotal + s.mbpn_nNodes * (s.mbpn_sizeNoValueBytes + s.mbpn_sizeChildOffset - 1);
                 // *4 because 2 MSB of offset is used for isEndOfValue & isEndChild flag
@@ -380,4 +375,4 @@ public class AppendDictNode {
             }, 0);
         }
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java
index a21832d..4e820e0 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java
@@ -18,6 +18,9 @@
 
 package org.apache.kylin.dict.global;
 
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.common.util.BytesUtil;
+
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInput;
@@ -26,12 +29,8 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.HashSet;
 
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.common.util.BytesUtil;
-
 public class AppendDictSlice {
-    static final byte[] HEAD_MAGIC = new byte[] { 0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65, 0x44,
-            0x69, 0x63, 0x74 }; // "AppendTrieDict"
+    static final byte[] HEAD_MAGIC = new byte[] { 0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74 }; // "AppendTrieDict"
     static final int HEAD_SIZE_I = HEAD_MAGIC.length;
     static final int BIT_IS_LAST_CHILD = 0x80;
     static final int BIT_IS_END_OF_VALUE = 0x40;
@@ -59,8 +58,7 @@ public class AppendDictSlice {
             throw new IllegalArgumentException("Wrong file type (magic does not match)");
 
         try {
-            DataInputStream headIn = new DataInputStream(
-                    new ByteArrayInputStream(trieBytes, HEAD_SIZE_I, trieBytes.length - HEAD_SIZE_I));
+            DataInputStream headIn = new DataInputStream(new ByteArrayInputStream(trieBytes, HEAD_SIZE_I, trieBytes.length - HEAD_SIZE_I));
             this.headSize = headIn.readShort();
             this.bodyLen = headIn.readInt();
             this.nValues = headIn.readInt();
@@ -106,8 +104,7 @@ public class AppendDictSlice {
             if (checkFlag(nodeOffset, BIT_IS_END_OF_VALUE)) {
                 break;
             }
-            nodeOffset = headSize
-                    + (int) (BytesUtil.readLong(trieBytes, nodeOffset, sizeChildOffset) & childOffsetMask);
+            nodeOffset = headSize + (int) (BytesUtil.readLong(trieBytes, nodeOffset, sizeChildOffset) & childOffsetMask);
             if (nodeOffset == headSize) {
                 break;
             }
@@ -158,8 +155,7 @@ public class AppendDictSlice {
                 } else if (comp < 0) { // try next child
                     if (checkFlag(c, BIT_IS_LAST_CHILD))
                         return -1;
-                    c = p + BytesUtil.readUnsigned(trieBytes, p - 1, 1)
-                            + (checkFlag(c, BIT_IS_END_OF_VALUE) ? sizeOfId : 0);
+                    c = p + BytesUtil.readUnsigned(trieBytes, p - 1, 1) + (checkFlag(c, BIT_IS_END_OF_VALUE) ? sizeOfId : 0);
                 } else { // children are ordered by their first value byte
                     return -1;
                 }
@@ -265,8 +261,7 @@ public class AppendDictSlice {
 
     @Override
     public String toString() {
-        return String.format("DictSlice[firstValue=%s, values=%d, bytes=%d]", Bytes.toStringBinary(getFirstValue()),
-                nValues, bodyLen);
+        return String.format("DictSlice[firstValue=%s, values=%d, bytes=%d]", Bytes.toStringBinary(getFirstValue()), nValues, bodyLen);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSliceKey.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSliceKey.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSliceKey.java
index d61258a..323fe6b 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSliceKey.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSliceKey.java
@@ -18,13 +18,13 @@
 
 package org.apache.kylin.dict.global;
 
+import org.apache.kylin.common.util.Bytes;
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.util.Arrays;
 
-import org.apache.kylin.common.util.Bytes;
-
 public class AppendDictSliceKey implements Comparable<AppendDictSliceKey> {
     static final AppendDictSliceKey START_KEY = AppendDictSliceKey.wrap(new byte[0]);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendTrieDictionaryBuilder.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendTrieDictionaryBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendTrieDictionaryBuilder.java
index 2911798..90d65b6 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendTrieDictionaryBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendTrieDictionaryBuilder.java
@@ -18,18 +18,18 @@
 
 package org.apache.kylin.dict.global;
 
-import static com.google.common.base.Preconditions.checkState;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.TreeMap;
-
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.dict.AppendTrieDictionary;
 import org.apache.kylin.dict.BytesConverter;
 import org.apache.kylin.dict.StringBytesConverter;
 
+import java.io.IOException;
+import java.util.List;
+import java.util.TreeMap;
+
+import static com.google.common.base.Preconditions.checkState;
+
 public class AppendTrieDictionaryBuilder {
 
     private final String baseDir;
@@ -47,8 +47,7 @@ public class AppendTrieDictionaryBuilder {
     private AppendDictNode curNode;
 
     public AppendTrieDictionaryBuilder(String resourceDir, int maxEntriesPerSlice) throws IOException {
-        this.baseDir = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "resources/GlobalDict" + resourceDir
-                + "/";
+        this.baseDir = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "resources/GlobalDict" + resourceDir + "/";
         this.workingDir = this.baseDir + "/working";
         this.maxEntriesPerSlice = maxEntriesPerSlice;
         init();
@@ -84,8 +83,7 @@ public class AppendTrieDictionaryBuilder {
             curNode = new AppendDictNode(new byte[0], false);
             sliceFileMap.put(AppendDictSliceKey.START_KEY, null);
         }
-        checkState(sliceFileMap.firstKey().equals(AppendDictSliceKey.START_KEY),
-                "first key should be \"\", but got \"%s\"", sliceFileMap.firstKey());
+        checkState(sliceFileMap.firstKey().equals(AppendDictSliceKey.START_KEY), "first key should be \"\", but got \"%s\"", sliceFileMap.firstKey());
 
         AppendDictSliceKey nextKey = sliceFileMap.floorKey(AppendDictSliceKey.wrap(valueBytes));
 
@@ -123,8 +121,7 @@ public class AppendTrieDictionaryBuilder {
             flushCurrentNode();
         }
 
-        GlobalDictMetadata metadata = new GlobalDictMetadata(baseId, this.maxId, this.maxValueLength, this.nValues,
-                this.bytesConverter, sliceFileMap);
+        GlobalDictMetadata metadata = new GlobalDictMetadata(baseId, this.maxId, this.maxValueLength, this.nValues, this.bytesConverter, sliceFileMap);
         store.commit(workingDir, metadata);
 
         AppendTrieDictionary dict = new AppendTrieDictionary();
@@ -161,8 +158,7 @@ public class AppendTrieDictionaryBuilder {
                 }
             } else {
                 // otherwise, split the current node into two
-                AppendDictNode c = new AppendDictNode(BytesUtil.subarray(node.part, i, n), node.isEndOfValue,
-                        node.children);
+                AppendDictNode c = new AppendDictNode(BytesUtil.subarray(node.part, i, n), node.isEndOfValue, node.children);
                 c.id = node.id;
                 node.reset(BytesUtil.subarray(node.part, 0, i), true);
                 node.addChild(c);
@@ -174,8 +170,7 @@ public class AppendTrieDictionaryBuilder {
         // if partially matched the current, split the current node, add the new
         // value, make a 3-way
         if (i < n) {
-            AppendDictNode c1 = new AppendDictNode(BytesUtil.subarray(node.part, i, n), node.isEndOfValue,
-                    node.children);
+            AppendDictNode c1 = new AppendDictNode(BytesUtil.subarray(node.part, i, n), node.isEndOfValue, node.children);
             c1.id = node.id;
             AppendDictNode c2 = addNodeMaybeOverflow(value, j, nn);
             node.reset(BytesUtil.subarray(node.part, 0, i), false);


[33/67] [abbrv] kylin git commit: #732 catch and cache ResourceLimitExceededException

Posted by li...@apache.org.
#732 catch and cache ResourceLimitExceededException


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6bfc6d28
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6bfc6d28
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6bfc6d28

Branch: refs/heads/master
Commit: 6bfc6d286de0d8d2fcec43cf655fee7030b18d84
Parents: 87d5d8d
Author: Roger Shi <ro...@hotmail.com>
Authored: Sat May 27 11:06:29 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Sat May 27 16:41:52 2017 +0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/kylin/rest/service/QueryService.java | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/6bfc6d28/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 33fb614..61ddbb0 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -48,6 +48,7 @@ import javax.sql.DataSource;
 import org.apache.calcite.avatica.ColumnMetaData.Rep;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
@@ -437,7 +438,7 @@ public class QueryService extends BasicService {
                 sqlResponse.setTotalScanCount(queryContext.getScannedRows());
                 sqlResponse.setTotalScanBytes(queryContext.getScannedBytes());
 
-                if (queryCacheEnabled && e.getCause() != null && e.getCause() instanceof ResourceLimitExceededException) {
+                if (queryCacheEnabled && e.getCause() != null && ExceptionUtils.getRootCause(e) instanceof ResourceLimitExceededException) {
                     Cache exceptionCache = cacheManager.getCache(EXCEPTION_QUERY_CACHE);
                     exceptionCache.put(new Element(sqlRequest, sqlResponse));
                 }


[21/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendTrieDictionaryChecker.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendTrieDictionaryChecker.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendTrieDictionaryChecker.java
index 8af633e..94b6e9d 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendTrieDictionaryChecker.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendTrieDictionaryChecker.java
@@ -17,8 +17,6 @@
 */
 package org.apache.kylin.dict.global;
 
-import static org.apache.kylin.dict.global.GlobalDictHDFSStore.BUFFER_SIZE;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
@@ -30,6 +28,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.HadoopUtil;
 
+import static org.apache.kylin.dict.global.GlobalDictHDFSStore.BUFFER_SIZE;
+
 /**
  * Created by sunyerui on 16/11/15.
  */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java
index a5ba46c..b30d5b9 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java
@@ -18,15 +18,6 @@
 
 package org.apache.kylin.dict.global;
 
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkState;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Map;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -42,6 +33,15 @@ import org.apache.kylin.dict.BytesConverter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
 public class GlobalDictHDFSStore extends GlobalDictStore {
 
     static final Logger logger = LoggerFactory.getLogger(GlobalDictHDFSStore.class);
@@ -241,12 +241,9 @@ public class GlobalDictHDFSStore extends GlobalDictStore {
 
     @Override
     public String copyToAnotherMeta(KylinConfig srcConfig, KylinConfig dstConfig) throws IOException {
-        checkArgument(baseDir.startsWith(srcConfig.getHdfsWorkingDirectory()),
-                "Please check why current directory {} doesn't belong to source working directory {}", baseDir,
-                srcConfig.getHdfsWorkingDirectory());
+        checkArgument(baseDir.startsWith(srcConfig.getHdfsWorkingDirectory()), "Please check why current directory {} doesn't belong to source working directory {}", baseDir, srcConfig.getHdfsWorkingDirectory());
 
-        final String dstBaseDir = baseDir.replaceFirst(srcConfig.getHdfsWorkingDirectory(),
-                dstConfig.getHdfsWorkingDirectory());
+        final String dstBaseDir = baseDir.replaceFirst(srcConfig.getHdfsWorkingDirectory(), dstConfig.getHdfsWorkingDirectory());
 
         Long[] versions = listAllVersions();
         if (versions.length == 0) { // empty dict, nothing to copy
@@ -254,8 +251,7 @@ public class GlobalDictHDFSStore extends GlobalDictStore {
         }
 
         Path srcVersionDir = getVersionDir(versions[versions.length - 1]);
-        Path dstVersionDir = new Path(srcVersionDir.toString().replaceFirst(srcConfig.getHdfsWorkingDirectory(),
-                dstConfig.getHdfsWorkingDirectory()));
+        Path dstVersionDir = new Path(srcVersionDir.toString().replaceFirst(srcConfig.getHdfsWorkingDirectory(), dstConfig.getHdfsWorkingDirectory()));
         FileSystem dstFS = dstVersionDir.getFileSystem(conf);
         if (dstFS.exists(dstVersionDir)) {
             dstFS.delete(dstVersionDir, true);
@@ -405,8 +401,7 @@ public class GlobalDictHDFSStore extends GlobalDictStore {
         public void sanityCheck(Path dir, GlobalDictMetadata metadata) throws IOException {
             for (Map.Entry<AppendDictSliceKey, String> entry : metadata.sliceFileMap.entrySet()) {
                 if (!fs.exists(new Path(dir, entry.getValue()))) {
-                    throw new RuntimeException("The slice file " + entry.getValue() + " for the key: " + entry.getKey()
-                            + " must be existed!");
+                    throw new RuntimeException("The slice file " + entry.getValue() + " for the key: " + entry.getKey() + " must be existed!");
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictMetadata.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictMetadata.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictMetadata.java
index d35380c..7c89ea2 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictMetadata.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictMetadata.java
@@ -18,12 +18,11 @@
 
 package org.apache.kylin.dict.global;
 
-import java.util.NavigableMap;
-import java.util.TreeMap;
-
+import com.google.common.base.Preconditions;
 import org.apache.kylin.dict.BytesConverter;
 
-import com.google.common.base.Preconditions;
+import java.util.NavigableMap;
+import java.util.TreeMap;
 
 /**
  * Encapsulates the metadata for a particular version of the global dictionary.
@@ -37,8 +36,7 @@ public class GlobalDictMetadata {
     public final BytesConverter bytesConverter;
     public final TreeMap<AppendDictSliceKey, String> sliceFileMap; // slice key -> slice file name
 
-    public GlobalDictMetadata(int baseId, int maxId, int maxValueLength, int nValues, BytesConverter bytesConverter,
-            NavigableMap<AppendDictSliceKey, String> sliceFileMap) {
+    public GlobalDictMetadata(int baseId, int maxId, int maxValueLength, int nValues, BytesConverter bytesConverter, NavigableMap<AppendDictSliceKey, String> sliceFileMap) {
 
         Preconditions.checkNotNull(bytesConverter, "bytesConverter");
         Preconditions.checkNotNull(sliceFileMap, "sliceFileMap");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictStore.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictStore.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictStore.java
index c370afc..eaf0729 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictStore.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictStore.java
@@ -18,12 +18,11 @@
 
 package org.apache.kylin.dict.global;
 
-import java.io.IOException;
-
+import com.google.common.base.Preconditions;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
 
-import com.google.common.base.Preconditions;
+import java.io.IOException;
 
 public abstract class GlobalDictStore {
 
@@ -74,8 +73,7 @@ public abstract class GlobalDictStore {
      * @return file name of the new written slice
      * @throws IOException on I/O error
      */
-    public abstract String writeSlice(String workingDir, AppendDictSliceKey key, AppendDictNode slice)
-            throws IOException;
+    public abstract String writeSlice(String workingDir, AppendDictSliceKey key, AppendDictNode slice) throws IOException;
 
     /**
      * Delete a slice with the specified file name.

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/LookupTable.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/LookupTable.java b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/LookupTable.java
index a22878f..a99ef29 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/LookupTable.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/LookupTable.java
@@ -83,8 +83,7 @@ abstract public class LookupTable<T> {
         Array<T> key = new Array<T>(keyCols);
 
         if (data.containsKey(key))
-            throw new IllegalStateException("The table: " + tableDesc.getName() + " Dup key found, key="
-                    + toString(keyCols) + ", value1=" + toString(data.get(key)) + ", value2=" + toString(value));
+            throw new IllegalStateException("The table: " + tableDesc.getName() + " Dup key found, key=" + toString(keyCols) + ", value1=" + toString(data.get(key)) + ", value2=" + toString(value));
 
         data.put(key, value);
     }
@@ -148,8 +147,7 @@ abstract public class LookupTable<T> {
     }
 
     private boolean between(T beginValue, T v, T endValue, Comparator<T> comp) {
-        return (beginValue == null || comp.compare(beginValue, v) <= 0)
-                && (endValue == null || comp.compare(v, endValue) <= 0);
+        return (beginValue == null || comp.compare(beginValue, v) <= 0) && (endValue == null || comp.compare(v, endValue) <= 0);
     }
 
     abstract protected Comparator<T> getComparator(int colIdx);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotCLI.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotCLI.java b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotCLI.java
index 07659a8..45fd56b 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotCLI.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotCLI.java
@@ -41,8 +41,7 @@ public class SnapshotCLI {
         if (tableDesc == null)
             throw new IllegalArgumentException("Not table found by " + table);
 
-        SnapshotTable snapshot = snapshotMgr.rebuildSnapshot(SourceFactory.createReadableTable(tableDesc), tableDesc,
-                overwriteUUID);
+        SnapshotTable snapshot = snapshotMgr.rebuildSnapshot(SourceFactory.createReadableTable(tableDesc), tableDesc, overwriteUUID);
         System.out.println("resource path updated: " + snapshot.getResourcePath());
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java
index d7c1b68..b997a88 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotManager.java
@@ -86,8 +86,7 @@ public class SnapshotManager {
         this.snapshotCache = CacheBuilder.newBuilder().removalListener(new RemovalListener<String, SnapshotTable>() {
             @Override
             public void onRemoval(RemovalNotification<String, SnapshotTable> notification) {
-                SnapshotManager.logger.info("Snapshot with resource path " + notification.getKey()
-                        + " is removed due to " + notification.getCause());
+                SnapshotManager.logger.info("Snapshot with resource path " + notification.getKey() + " is removed due to " + notification.getCause());
             }
         }).maximumSize(config.getCachedSnapshotMaxEntrySize())//
                 .expireAfterWrite(1, TimeUnit.DAYS).build(new CacheLoader<String, SnapshotTable>() {
@@ -142,8 +141,7 @@ public class SnapshotManager {
         return trySaveNewSnapshot(snapshot);
     }
 
-    public SnapshotTable rebuildSnapshot(IReadableTable table, TableDesc tableDesc, String overwriteUUID)
-            throws IOException {
+    public SnapshotTable rebuildSnapshot(IReadableTable table, TableDesc tableDesc, String overwriteUUID) throws IOException {
         SnapshotTable snapshot = new SnapshotTable(table, tableDesc.getIdentity());
         snapshot.setUuid(overwriteUUID);
 
@@ -216,8 +214,7 @@ public class SnapshotManager {
         logger.info("Loading snapshotTable from " + resourcePath + ", with loadData: " + loadData);
         ResourceStore store = MetadataManager.getInstance(this.config).getStore();
 
-        SnapshotTable table = store.getResource(resourcePath, SnapshotTable.class,
-                loadData ? SnapshotTableSerializer.FULL_SERIALIZER : SnapshotTableSerializer.INFO_SERIALIZER);
+        SnapshotTable table = store.getResource(resourcePath, SnapshotTable.class, loadData ? SnapshotTableSerializer.FULL_SERIALIZER : SnapshotTableSerializer.INFO_SERIALIZER);
 
         if (loadData)
             logger.debug("Loaded snapshot at " + resourcePath);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotTable.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotTable.java b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotTable.java
index ed0cb7a..1d7e474 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotTable.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/lookup/SnapshotTable.java
@@ -27,6 +27,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+import com.google.common.base.Strings;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -42,7 +43,6 @@ import org.apache.kylin.source.IReadableTable;
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Strings;
 
 /**
  * @author yangli9
@@ -83,8 +83,7 @@ public class SnapshotTable extends RootPersistentEntity implements IReadableTabl
             while (reader.next()) {
                 String[] row = reader.getRow();
                 if (row.length <= maxIndex) {
-                    throw new IllegalStateException("Bad hive table row, " + tableDesc + " expect " + (maxIndex + 1)
-                            + " columns, but got " + Arrays.toString(row));
+                    throw new IllegalStateException("Bad hive table row, " + tableDesc + " expect " + (maxIndex + 1) + " columns, but got " + Arrays.toString(row));
                 }
                 for (ColumnDesc column : tableDesc.getColumns()) {
                     String cell = row[column.getZeroBasedIndex()];
@@ -169,7 +168,7 @@ public class SnapshotTable extends RootPersistentEntity implements IReadableTabl
     public boolean exists() throws IOException {
         return true;
     }
-
+    
     /**
      * a naive implementation
      *

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/test/java/org/apache/kylin/dict/DictionaryManagerTest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/DictionaryManagerTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/DictionaryManagerTest.java
index cd86936..4820318 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/DictionaryManagerTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/DictionaryManagerTest.java
@@ -61,14 +61,14 @@ public class DictionaryManagerTest extends LocalFileMetadataTestCase {
             assertEquals(lookupDate, dictMgr.decideSourceData(innerModel, lookupDate));
             assertEquals(formatName, dictMgr.decideSourceData(innerModel, formatName));
         }
-
+        
         {
             DataModelDesc outerModel = metaMgr.getDataModelDesc("test_kylin_left_join_model_desc");
             TblColRef factDate = outerModel.findColumn("TEST_KYLIN_FACT.CAL_DT");
             assertEquals(factDate, dictMgr.decideSourceData(outerModel, factDate));
         }
     }
-
+    
     @Test
     public void testBuildSaveDictionary() throws IOException {
         KylinConfig config = KylinConfig.getInstanceFromEnv();
@@ -80,32 +80,25 @@ public class DictionaryManagerTest extends LocalFileMetadataTestCase {
         // non-exist input returns null;
         DictionaryInfo nullInfo = dictMgr.buildDictionary(model, col, MockupReadableTable.newNonExistTable("/a/path"));
         assertEquals(null, nullInfo);
-
-        DictionaryInfo info1 = dictMgr.buildDictionary(model, col,
-                MockupReadableTable.newSingleColumnTable("/a/path", "1", "2", "3"));
+        
+        DictionaryInfo info1 = dictMgr.buildDictionary(model, col, MockupReadableTable.newSingleColumnTable("/a/path", "1", "2", "3"));
         assertEquals(3, info1.getDictionaryObject().getSize());
 
         // same input returns same dict
-        DictionaryInfo info2 = dictMgr.buildDictionary(model, col,
-                MockupReadableTable.newSingleColumnTable("/a/path", "1", "2", "3"));
+        DictionaryInfo info2 = dictMgr.buildDictionary(model, col, MockupReadableTable.newSingleColumnTable("/a/path", "1", "2", "3"));
         assertTrue(info1 == info2);
-
+        
         // same input values (different path) returns same dict
-        DictionaryInfo info3 = dictMgr.buildDictionary(model, col,
-                MockupReadableTable.newSingleColumnTable("/a/different/path", "1", "2", "3"));
+        DictionaryInfo info3 = dictMgr.buildDictionary(model, col, MockupReadableTable.newSingleColumnTable("/a/different/path", "1", "2", "3"));
         assertTrue(info1 == info3);
-
+        
         // save dictionary works in spite of non-exist table
-        Dictionary<String> dict = DictionaryGenerator.buildDictionary(col.getType(),
-                new IterableDictionaryValueEnumerator("1", "2", "3"));
-        DictionaryInfo info4 = dictMgr.saveDictionary(model, col, MockupReadableTable.newNonExistTable("/a/path"),
-                dict);
+        Dictionary<String> dict = DictionaryGenerator.buildDictionary(col.getType(), new IterableDictionaryValueEnumerator("1", "2", "3"));
+        DictionaryInfo info4 = dictMgr.saveDictionary(model, col, MockupReadableTable.newNonExistTable("/a/path"), dict);
         assertTrue(info1 == info4);
-
-        Dictionary<String> dict2 = DictionaryGenerator.buildDictionary(col.getType(),
-                new IterableDictionaryValueEnumerator("1", "2", "3", "4"));
-        DictionaryInfo info5 = dictMgr.saveDictionary(model, col, MockupReadableTable.newNonExistTable("/a/path"),
-                dict2);
+        
+        Dictionary<String> dict2 = DictionaryGenerator.buildDictionary(col.getType(), new IterableDictionaryValueEnumerator("1", "2", "3", "4"));
+        DictionaryInfo info5 = dictMgr.saveDictionary(model, col, MockupReadableTable.newNonExistTable("/a/path"), dict2);
         assertTrue(info1 != info5);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/test/java/org/apache/kylin/dict/DictionaryProviderTest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/DictionaryProviderTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/DictionaryProviderTest.java
index 794f334..4b386a7 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/DictionaryProviderTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/DictionaryProviderTest.java
@@ -37,7 +37,7 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-public class DictionaryProviderTest extends LocalFileMetadataTestCase {
+public class DictionaryProviderTest extends LocalFileMetadataTestCase{
 
     @Before
     public void setUp() throws Exception {
@@ -52,28 +52,23 @@ public class DictionaryProviderTest extends LocalFileMetadataTestCase {
     @Test
     public void testReadWrite() throws Exception {
         //string dict
-        Dictionary<String> dict = getDict(DataType.getType("string"),
-                Arrays.asList(new String[] { "a", "b" }).iterator());
+        Dictionary<String> dict = getDict(DataType.getType("string"), Arrays.asList(new String[] { "a", "b" }).iterator());
         readWriteTest(dict);
         //number dict
-        Dictionary<String> dict2 = getDict(DataType.getType("long"),
-                Arrays.asList(new String[] { "1", "2" }).iterator());
+        Dictionary<String> dict2 = getDict(DataType.getType("long"), Arrays.asList(new String[] { "1", "2" }).iterator());
         readWriteTest(dict2);
 
         //date dict
-        Dictionary<String> dict3 = getDict(DataType.getType("datetime"),
-                Arrays.asList(new String[] { "20161122", "20161123" }).iterator());
+        Dictionary<String> dict3 = getDict(DataType.getType("datetime"), Arrays.asList(new String[] { "20161122", "20161123" }).iterator());
         readWriteTest(dict3);
 
         //date dict
-        Dictionary<String> dict4 = getDict(DataType.getType("datetime"),
-                Arrays.asList(new String[] { "2016-11-22", "2016-11-23" }).iterator());
+        Dictionary<String> dict4 = getDict(DataType.getType("datetime"), Arrays.asList(new String[] { "2016-11-22", "2016-11-23" }).iterator());
         readWriteTest(dict4);
 
         //date dict
         try {
-            Dictionary<String> dict5 = getDict(DataType.getType("date"),
-                    Arrays.asList(new String[] { "2016-11-22", "20161122" }).iterator());
+            Dictionary<String> dict5 = getDict(DataType.getType("date"), Arrays.asList(new String[] { "2016-11-22", "20161122" }).iterator());
             readWriteTest(dict5);
             fail("Date format not correct.Should throw exception");
         } catch (IllegalArgumentException e) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/test/java/org/apache/kylin/dict/MockupReadableTable.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/MockupReadableTable.java b/core-dictionary/src/test/java/org/apache/kylin/dict/MockupReadableTable.java
index a9d2398..53c9476 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/MockupReadableTable.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/MockupReadableTable.java
@@ -34,7 +34,7 @@ public class MockupReadableTable implements IReadableTable {
         }
         return new MockupReadableTable(content, sig, true);
     }
-
+    
     public static IReadableTable newNonExistTable(String path) {
         TableSignature sig = new TableSignature(path, -1, 0);
         return new MockupReadableTable(null, sig, false);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/test/java/org/apache/kylin/dict/MultipleDictionaryValueEnumeratorTest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/MultipleDictionaryValueEnumeratorTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/MultipleDictionaryValueEnumeratorTest.java
index d25f2f4..2e90bcf 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/MultipleDictionaryValueEnumeratorTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/MultipleDictionaryValueEnumeratorTest.java
@@ -56,52 +56,52 @@ public class MultipleDictionaryValueEnumeratorTest {
     @Test
     public void testNormalDicts() throws IOException {
         List<DictionaryInfo> dictionaryInfoList = new ArrayList<>(2);
-        dictionaryInfoList.add(createDictInfo(new int[] { 0, 1, 2 }));
-        dictionaryInfoList.add(createDictInfo(new int[] { 4, 5, 6 }));
+        dictionaryInfoList.add(createDictInfo(new int[]{0, 1, 2}));
+        dictionaryInfoList.add(createDictInfo(new int[]{4, 5, 6}));
 
         Integer[] values = enumerateDictInfoList(dictionaryInfoList);
         assertEquals(6, values.length);
-        assertArrayEquals(new Integer[] { 0, 1, 2, 4, 5, 6 }, values);
+        assertArrayEquals(new Integer[]{0, 1, 2, 4, 5, 6}, values);
     }
 
     @Test
     public void testFirstEmptyDicts() throws IOException {
         List<DictionaryInfo> dictionaryInfoList = new ArrayList<>(2);
-        dictionaryInfoList.add(createDictInfo(new int[] {}));
-        dictionaryInfoList.add(createDictInfo(new int[] { 4, 5, 6 }));
+        dictionaryInfoList.add(createDictInfo(new int[]{}));
+        dictionaryInfoList.add(createDictInfo(new int[]{4, 5, 6}));
 
         Integer[] values = enumerateDictInfoList(dictionaryInfoList);
         assertEquals(3, values.length);
-        assertArrayEquals(new Integer[] { 4, 5, 6 }, values);
+        assertArrayEquals(new Integer[]{4, 5, 6}, values);
     }
 
     @Test
     public void testMiddleEmptyDicts() throws IOException {
         List<DictionaryInfo> dictionaryInfoList = new ArrayList<>(3);
-        dictionaryInfoList.add(createDictInfo(new int[] { 0, 1, 2 }));
-        dictionaryInfoList.add(createDictInfo(new int[] {}));
-        dictionaryInfoList.add(createDictInfo(new int[] { 7, 8, 9 }));
+        dictionaryInfoList.add(createDictInfo(new int[]{0, 1, 2}));
+        dictionaryInfoList.add(createDictInfo(new int[]{}));
+        dictionaryInfoList.add(createDictInfo(new int[]{7, 8, 9}));
 
         Integer[] values = enumerateDictInfoList(dictionaryInfoList);
         assertEquals(6, values.length);
-        assertArrayEquals(new Integer[] { 0, 1, 2, 7, 8, 9 }, values);
+        assertArrayEquals(new Integer[]{0, 1, 2, 7, 8, 9}, values);
     }
 
     @Test
     public void testLastEmptyDicts() throws IOException {
         List<DictionaryInfo> dictionaryInfoList = new ArrayList<>(3);
-        dictionaryInfoList.add(createDictInfo(new int[] { 0, 1, 2 }));
-        dictionaryInfoList.add(createDictInfo(new int[] { 6, 7, 8 }));
-        dictionaryInfoList.add(createDictInfo(new int[] {}));
+        dictionaryInfoList.add(createDictInfo(new int[]{0, 1, 2}));
+        dictionaryInfoList.add(createDictInfo(new int[]{6, 7, 8}));
+        dictionaryInfoList.add(createDictInfo(new int[]{}));
 
         Integer[] values = enumerateDictInfoList(dictionaryInfoList);
         assertEquals(6, values.length);
-        assertArrayEquals(new Integer[] { 0, 1, 2, 6, 7, 8 }, values);
+        assertArrayEquals(new Integer[]{0, 1, 2, 6, 7, 8}, values);
     }
 
     public static class MockDictionary extends Dictionary<String> {
         private static final long serialVersionUID = 1L;
-
+        
         public int[] values;
 
         @Override
@@ -111,7 +111,7 @@ public class MultipleDictionaryValueEnumeratorTest {
 
         @Override
         public int getMaxId() {
-            return values.length - 1;
+            return values.length-1;
         }
 
         @Override
@@ -134,17 +134,16 @@ public class MultipleDictionaryValueEnumeratorTest {
             return "" + values[id];
         }
 
+
         @Override
-        public void dump(PrintStream out) {
-        }
+        public void dump(PrintStream out) {}
 
         @Override
-        public void write(DataOutput out) throws IOException {
-        }
+        public void write(DataOutput out) throws IOException {}
 
         @Override
-        public void readFields(DataInput in) throws IOException {
-        }
+        public void readFields(DataInput in) throws IOException {}
+
 
         @Override
         public boolean contains(Dictionary another) {
@@ -152,4 +151,4 @@ public class MultipleDictionaryValueEnumeratorTest {
         }
     }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
index 9bdb174..8da7208 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
@@ -45,8 +45,7 @@ import com.google.common.collect.Sets;
  */
 public class NumberDictionaryTest extends LocalFileMetadataTestCase {
 
-    Number2BytesConverter.NumberBytesCodec codec = new Number2BytesConverter.NumberBytesCodec(
-            MAX_DIGITS_BEFORE_DECIMAL_POINT);
+    Number2BytesConverter.NumberBytesCodec codec = new Number2BytesConverter.NumberBytesCodec(MAX_DIGITS_BEFORE_DECIMAL_POINT);
     Random rand = new Random();
 
     @Before
@@ -79,8 +78,7 @@ public class NumberDictionaryTest extends LocalFileMetadataTestCase {
         String[] ints = new String[] { "", "0", "5", "100", "13" };
 
         // check "" is treated as NULL, not a code of dictionary
-        Dictionary<?> dict = DictionaryGenerator.buildDictionary(DataType.getType("integer"),
-                new IterableDictionaryValueEnumerator(ints));
+        Dictionary<?> dict = DictionaryGenerator.buildDictionary(DataType.getType("integer"), new IterableDictionaryValueEnumerator(ints));
         assertEquals(4, dict.getSize());
 
         final int id = ((NumberDictionary<String>) dict).getIdFromValue("");
@@ -97,8 +95,7 @@ public class NumberDictionaryTest extends LocalFileMetadataTestCase {
         //test resolved jira-1800
         checkCodec("-0.0045454354354354359999999999877218", "-9999999999999999999.9954545645645645640000000000122781;");
         checkCodec("-0.009999999999877218", "-9999999999999999999.990000000000122781;");
-        checkCodec("12343434372493274.438403840384023840253554345345345345",
-                "00012343434372493274.438403840384023840253554345345345345");
+        checkCodec("12343434372493274.438403840384023840253554345345345345", "00012343434372493274.438403840384023840253554345345345345");
         assertEquals("00000000000000000052.57", encodeNumber("52.5700"));
         assertEquals("00000000000000000000", encodeNumber("0.00"));
         assertEquals("00000000000000000000", encodeNumber("0.0"));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestBenchmark.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestBenchmark.java b/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestBenchmark.java
index e1eb119..adc1074 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestBenchmark.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestBenchmark.java
@@ -18,16 +18,16 @@
 
 package org.apache.kylin.dict;
 
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Random;
-import java.util.UUID;
-
 import org.apache.kylin.common.util.Dictionary;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Random;
+import java.util.UUID;
+
 /**
  * Created by xiefan on 16-12-28.
  */
@@ -50,8 +50,7 @@ public class TrieDictionaryForestBenchmark {
     public void before() {
         int dataSize = 100 * 10000;
         TrieDictionaryBuilder<String> b1 = new TrieDictionaryBuilder<>(new StringBytesConverter());
-        TrieDictionaryForestBuilder<String> b2 = new TrieDictionaryForestBuilder<String>(new StringBytesConverter(), 0,
-                5);
+        TrieDictionaryForestBuilder<String> b2 = new TrieDictionaryForestBuilder<String>(new StringBytesConverter(), 0, 5);
         this.rawData = genStringDataSet(dataSize);
         for (String str : this.rawData) {
             b1.addValue(str);
@@ -153,8 +152,7 @@ public class TrieDictionaryForestBenchmark {
         return System.currentTimeMillis() - startTime;
     }
 
-    private long runQueryIdByValueBytes(ArrayList<String> rawData, Dictionary<String> dict, int cardnality,
-            int testTimes) {
+    private long runQueryIdByValueBytes(ArrayList<String> rawData, Dictionary<String> dict, int cardnality, int testTimes) {
         long startTime = System.currentTimeMillis();
         int step = 1;
         for (int i = 0; i < testTimes; i++) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java
index a8c9a67..82380b3 100755
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java
@@ -186,14 +186,8 @@ public class TrieDictionaryForestTest {
         str.add("party");
         str.add("parties");
         str.add("paint");
-        String longStr = "paintjkjdfklajkdljfkdsajklfjklsadjkjekjrklewjrklewjklrjklewjkljkljkljkljweklrjewkljrklewjrlkjewkljrkljkljkjlkjjkljkljkljkljlkjlkjlkjljdfadfads"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk";
+        String longStr = "paintjkjdfklajkdljfkdsajklfjklsadjkjekjrklewjrklewjklrjklewjkljkljkljkljweklrjewkljrklewjrlkjewkljrkljkljkjlkjjkljkljkljkljlkjlkjlkjljdfadfads" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk";
         System.out.println("The length of the long string is " + longStr.length());
         str.add(longStr);
 
@@ -745,8 +739,7 @@ public class TrieDictionaryForestTest {
             System.out.println("times:" + i);
         }
 
-        System.out.println("compare build time.  Old trie : " + oldDictTotalBuildTime / 1000.0 + "s.New trie : "
-                + newDictTotalBuildTime / 1000.0 + "s");
+        System.out.println("compare build time.  Old trie : " + oldDictTotalBuildTime / 1000.0 + "s.New trie : " + newDictTotalBuildTime / 1000.0 + "s");
     }
 
     private void evaluateDataSize(ArrayList<String> list) {
@@ -812,8 +805,7 @@ public class TrieDictionaryForestTest {
         benchmark("Benchmark", dict, set, map, strArray, array);
     }
 
-    private static int benchmark(String msg, TrieDictionaryForest<String> dict, TreeSet<String> set,
-            HashMap<String, Integer> map, String[] strArray, byte[][] array) {
+    private static int benchmark(String msg, TrieDictionaryForest<String> dict, TreeSet<String> set, HashMap<String, Integer> map, String[] strArray, byte[][] array) {
         int n = set.size();
         int times = Math.max(10 * 1000 * 1000 / n, 1); // run 10 million lookups
         int keep = 0; // make sure JIT don't OPT OUT function calls under test
@@ -937,16 +929,14 @@ public class TrieDictionaryForestTest {
     }
 
     public static TrieDictionaryForestBuilder<String> newDictBuilder(Iterable<String> strs, int baseId) {
-        TrieDictionaryForestBuilder<String> b = new TrieDictionaryForestBuilder<String>(new StringBytesConverter(),
-                baseId);
+        TrieDictionaryForestBuilder<String> b = new TrieDictionaryForestBuilder<String>(new StringBytesConverter(), baseId);
         for (String s : strs)
             b.addValue(s);
         return b;
     }
 
     public static TrieDictionaryForestBuilder<String> newDictBuilder(Iterable<String> strs, int baseId, int treeSize) {
-        TrieDictionaryForestBuilder<String> b = new TrieDictionaryForestBuilder<String>(new StringBytesConverter(),
-                baseId);
+        TrieDictionaryForestBuilder<String> b = new TrieDictionaryForestBuilder<String>(new StringBytesConverter(), baseId);
         b.setMaxTrieTreeSize(treeSize);
         for (String s : strs) {
             b.addValue(s);
@@ -955,8 +945,7 @@ public class TrieDictionaryForestTest {
     }
 
     public static TrieDictionaryForestBuilder<String> newDictBuilder(Iterator<String> strs, int baseId, int treeSize) {
-        TrieDictionaryForestBuilder<String> b = new TrieDictionaryForestBuilder<String>(new StringBytesConverter(),
-                baseId);
+        TrieDictionaryForestBuilder<String> b = new TrieDictionaryForestBuilder<String>(new StringBytesConverter(), baseId);
         b.setMaxTrieTreeSize(treeSize);
         while (strs.hasNext())
             b.addValue(strs.next());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryTest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryTest.java
index af4c9cd..13c83ac 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryTest.java
@@ -111,14 +111,8 @@ public class TrieDictionaryTest {
         str.add("party");
         str.add("parties");
         str.add("paint");
-        String longStr = "paintjkjdfklajkdljfkdsajklfjklsadjkjekjrklewjrklewjklrjklewjkljkljkljkljweklrjewkljrklewjrlkjewkljrkljkljkjlkjjkljkljkljkljlkjlkjlkjljdfadfads"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk";
+        String longStr = "paintjkjdfklajkdljfkdsajklfjklsadjkjekjrklewjrklewjklrjklewjkljkljkljkljweklrjewkljrklewjrlkjewkljrkljkljkjlkjjkljkljkljkljlkjlkjlkjljdfadfads" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk";
         System.out.println("The length of the long string is " + longStr.length());
         str.add(longStr);
 
@@ -275,8 +269,7 @@ public class TrieDictionaryTest {
         benchmark("Benchmark", dict, set, map, strArray, array);
     }
 
-    private static int benchmark(String msg, TrieDictionary<String> dict, TreeSet<String> set,
-            HashMap<String, Integer> map, String[] strArray, byte[][] array) {
+    private static int benchmark(String msg, TrieDictionary<String> dict, TreeSet<String> set, HashMap<String, Integer> map, String[] strArray, byte[][] array) {
         int n = set.size();
         int times = Math.max(10 * 1000 * 1000 / n, 1); // run 10 million lookups
         int keep = 0; // make sure JIT don't OPT OUT function calls under test
@@ -412,12 +405,10 @@ public class TrieDictionaryTest {
 
     @Test
     public void testSuperLongStringValue() {
-        String longPrefix = "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789"
-                + "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789";
+        String longPrefix = "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789" + "0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789";
 
         TrieDictionaryBuilder<String> b = new TrieDictionaryBuilder<String>(new StringBytesConverter());
-        String v1 = longPrefix
-                + "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz";
+        String v1 = longPrefix + "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz";
         String v2 = longPrefix + "xyz";
 
         b.addValue(v1);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/test/java/org/apache/kylin/dict/global/AppendTrieDictionaryTest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/global/AppendTrieDictionaryTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/global/AppendTrieDictionaryTest.java
index 1671748..47011fe 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/global/AppendTrieDictionaryTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/global/AppendTrieDictionaryTest.java
@@ -66,8 +66,7 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
     public void beforeTest() {
         staticCreateTestMetadata();
         KylinConfig.getInstanceFromEnv().setProperty("kylin.dictionary.append-entry-size", "50000");
-        BASE_DIR = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "/resources/GlobalDict" + RESOURCE_DIR
-                + "/";
+        BASE_DIR = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "/resources/GlobalDict" + RESOURCE_DIR + "/";
         LOCAL_BASE_DIR = getLocalWorkingDirectory() + "/resources/GlobalDict" + RESOURCE_DIR + "/";
     }
 
@@ -85,19 +84,11 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
         }
     }
 
-    private static final String[] words = new String[] { "paint", "par", "part", "parts", "partition", "partitions",
-            "party", "partie", "parties", "patient", "taste", "tar", "trie", "try", "tries", "字典", "字典树", "字母", // non-ascii characters
+    private static final String[] words = new String[] { "paint", "par", "part", "parts", "partition", "partitions", "party", "partie", "parties", "patient", "taste", "tar", "trie", "try", "tries", "字典", "字典树", "字母", // non-ascii characters
             "", // empty
-            "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii",
-            "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiipaiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii",
-            "paintjkjdfklajkdljfkdsajklfjklsadjkjekjrklewjrklewjklrjklewjkljkljkljkljweklrjewkljrklewjrlkjewkljrkljkljkjlkjjkljkljkljkljlkjlkjlkjljdfadfads"
-                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk",
+            "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii", "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiipaiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii",
+            "paintjkjdfklajkdljfkdsajklfjklsadjkjekjrklewjrklewjklrjklewjkljkljkljkljweklrjewkljrklewjrlkjewkljrkljkljkjlkjjkljkljkljkljlkjlkjlkjljdfadfads" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk",
             "paint", "tar", "try", // some dup
     };
 
@@ -177,8 +168,7 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
         dict.dump(System.out);
     }
 
-    private void testStringDictAppend(ArrayList<String> list, ArrayList<String> notfound, boolean shuffleList)
-            throws IOException {
+    private void testStringDictAppend(ArrayList<String> list, ArrayList<String> notfound, boolean shuffleList) throws IOException {
         Random rnd = new Random(System.currentTimeMillis());
         ArrayList<String> strList = new ArrayList<String>();
         strList.addAll(list);
@@ -205,8 +195,7 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
             byte[] bytes = converter.convertToBytes(str);
             int id = dict.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
             assertNotEquals(String.format("Value %s not exist", str), -1, id);
-            assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)),
-                    checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
+            assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
             checkMap.put(id, str);
         }
 
@@ -230,8 +219,7 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
                 assertEquals("Except id " + id + " for " + str + " but " + checkMap.get(id), str, checkMap.get(id));
             } else {
                 // check second append str, should be new id
-                assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)),
-                        checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
+                assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
                 checkMap.put(id, str);
             }
         }
@@ -256,8 +244,7 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
                 assertEquals("Except id " + id + " for " + str + " but " + checkMap.get(id), str, checkMap.get(id));
             } else {
                 // check third append str, should be new id
-                assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)),
-                        checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
+                assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
                 checkMap.put(id, str);
             }
         }
@@ -278,8 +265,7 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
         }
     }
 
-    private static AppendTrieDictionary<String> testSerialize(AppendTrieDictionary<String> dict,
-            BytesConverter converter) {
+    private static AppendTrieDictionary<String> testSerialize(AppendTrieDictionary<String> dict, BytesConverter converter) {
         try {
             ByteArrayOutputStream bout = new ByteArrayOutputStream();
             DataOutputStream dataout = new DataOutputStream(bout);
@@ -526,8 +512,7 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
         Path v2IndexFile = new Path(versionPath, V2_INDEX_NAME);
 
         fs.delete(v2IndexFile, true);
-        GlobalDictHDFSStore.IndexFormat indexFormatV1 = new GlobalDictHDFSStore.IndexFormatV1(fs,
-                HadoopUtil.getCurrentConfiguration());
+        GlobalDictHDFSStore.IndexFormat indexFormatV1 = new GlobalDictHDFSStore.IndexFormatV1(fs, HadoopUtil.getCurrentConfiguration());
         indexFormatV1.writeIndexFile(versionPath, metadata);
 
         //convert v2 fileName format to v1 fileName format

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-dictionary/src/test/java/org/apache/kylin/dict/lookup/LookupTableTest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/lookup/LookupTableTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/lookup/LookupTableTest.java
index b798b86..5a5c988 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/lookup/LookupTableTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/lookup/LookupTableTest.java
@@ -72,8 +72,7 @@ public class LookupTableTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testMapRange() throws Exception {
-        Pair<String, String> results = lookupTable.mapRange("CAL_DT", millis("2012-01-24"), millis("2012-12-30"),
-                "QTR_BEG_DT");
+        Pair<String, String> results = lookupTable.mapRange("CAL_DT", millis("2012-01-24"), millis("2012-12-30"), "QTR_BEG_DT");
 
         Assert.assertTrue(results != null);
         System.out.println("The first qtr_beg_dt is " + results.getFirst());
@@ -85,8 +84,7 @@ public class LookupTableTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testMapRange2() throws Exception {
-        Pair<String, String> results = lookupTable.mapRange("WEEK_BEG_DT", millis("2013-05-01"), millis("2013-08-01"),
-                "CAL_DT");
+        Pair<String, String> results = lookupTable.mapRange("WEEK_BEG_DT", millis("2013-05-01"), millis("2013-08-01"), "CAL_DT");
 
         System.out.println(DateFormat.formatToDateStr(Long.parseLong(results.getFirst())));
         System.out.println(DateFormat.formatToDateStr(Long.parseLong(results.getSecond())));
@@ -111,7 +109,7 @@ public class LookupTableTest extends LocalFileMetadataTestCase {
     }
 
     @Test
-    public void testGetClassName() {
+    public void testGetClassName(){
         String name = TrieDictionaryForest.class.getName();
         System.out.println(name);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/engine/EngineFactory.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/engine/EngineFactory.java b/core-job/src/main/java/org/apache/kylin/engine/EngineFactory.java
index 302af14..acaa7da 100644
--- a/core-job/src/main/java/org/apache/kylin/engine/EngineFactory.java
+++ b/core-job/src/main/java/org/apache/kylin/engine/EngineFactory.java
@@ -45,11 +45,11 @@ public class EngineFactory {
     public static IJoinedFlatTableDesc getJoinedFlatTableDesc(CubeDesc cubeDesc) {
         return batchEngine(cubeDesc).getJoinedFlatTableDesc(cubeDesc);
     }
-
+    
     public static IJoinedFlatTableDesc getJoinedFlatTableDesc(CubeSegment newSegment) {
         return batchEngine(newSegment).getJoinedFlatTableDesc(newSegment);
     }
-
+    
     /** Build a new cube segment, typically its time range appends to the end of current cube. */
     public static DefaultChainedExecutable createBatchCubingJob(CubeSegment newSegment, String submitter) {
         return batchEngine(newSegment).createBatchCubingJob(newSegment, submitter);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/engine/IBatchCubingEngine.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/engine/IBatchCubingEngine.java b/core-job/src/main/java/org/apache/kylin/engine/IBatchCubingEngine.java
index 3abdbc2..754dbde 100644
--- a/core-job/src/main/java/org/apache/kylin/engine/IBatchCubingEngine.java
+++ b/core-job/src/main/java/org/apache/kylin/engine/IBatchCubingEngine.java
@@ -24,11 +24,11 @@ import org.apache.kylin.job.execution.DefaultChainedExecutable;
 import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
 
 public interface IBatchCubingEngine {
-
+    
     /** Mark deprecated to indicate for test purpose only */
     @Deprecated
     public IJoinedFlatTableDesc getJoinedFlatTableDesc(CubeDesc cubeDesc);
-
+    
     public IJoinedFlatTableDesc getJoinedFlatTableDesc(CubeSegment newSegment);
 
     /** Build a new cube segment, typically its time range appends to the end of current cube. */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/JobInstance.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/JobInstance.java b/core-job/src/main/java/org/apache/kylin/job/JobInstance.java
index b45e3e0..8dcdff6 100644
--- a/core-job/src/main/java/org/apache/kylin/job/JobInstance.java
+++ b/core-job/src/main/java/org/apache/kylin/job/JobInstance.java
@@ -70,8 +70,7 @@ public class JobInstance extends RootPersistentEntity implements Comparable<JobI
 
     public JobStep getRunningStep() {
         for (JobStep step : this.getSteps()) {
-            if (step.getStatus().equals(JobStepStatusEnum.RUNNING)
-                    || step.getStatus().equals(JobStepStatusEnum.WAITING)) {
+            if (step.getStatus().equals(JobStepStatusEnum.RUNNING) || step.getStatus().equals(JobStepStatusEnum.WAITING)) {
                 return step;
             }
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
index 3f82f16..4665465 100644
--- a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
@@ -87,21 +87,18 @@ public class JoinedFlatTable {
         if (kylinConfig.isAdvancedFlatTableUsed()) {
             try {
                 Class advancedFlatTable = Class.forName(kylinConfig.getAdvancedFlatTableClass());
-                Method method = advancedFlatTable.getMethod("generateInsertDataStatement", IJoinedFlatTableDesc.class,
-                        JobEngineConfig.class);
+                Method method = advancedFlatTable.getMethod("generateInsertDataStatement", IJoinedFlatTableDesc.class, JobEngineConfig.class);
                 return (String) method.invoke(null, flatDesc);
             } catch (Exception e) {
                 throw new RuntimeException(e);
             }
         }
 
-        return "INSERT OVERWRITE TABLE " + flatDesc.getTableName() + " " + generateSelectDataStatement(flatDesc)
-                + ";\n";
+        return "INSERT OVERWRITE TABLE " + flatDesc.getTableName() + " " + generateSelectDataStatement(flatDesc) + ";\n";
     }
 
     public static String generateInsertPartialDataStatement(IJoinedFlatTableDesc flatDesc, String statement) {
-        return "INSERT OVERWRITE TABLE " + flatDesc.getTableName() + " " + generateSelectDataStatement(flatDesc)
-                + statement + ";\n";
+        return "INSERT OVERWRITE TABLE " + flatDesc.getTableName() + " " + generateSelectDataStatement(flatDesc) + statement + ";\n";
     }
 
     public static String generateSelectDataStatement(IJoinedFlatTableDesc flatDesc) {
@@ -123,8 +120,7 @@ public class JoinedFlatTable {
         final StringBuilder sql = new StringBuilder();
         final TableRef rootTbl = flatDesc.getDataModel().getRootFactTable();
         sql.append("dfs -mkdir -p " + outputDir + ";\n");
-        sql.append("INSERT OVERWRITE DIRECTORY '" + outputDir + "' SELECT count(*) FROM " + rootTbl.getTableIdentity()
-                + " " + rootTbl.getAlias() + "\n");
+        sql.append("INSERT OVERWRITE DIRECTORY '" + outputDir + "' SELECT count(*) FROM " + rootTbl.getTableIdentity() + " " + rootTbl.getAlias() + "\n");
         appendWhereStatement(flatDesc, sql);
         return sql.toString();
     }
@@ -195,8 +191,7 @@ public class JoinedFlatTable {
 
                 if (!(dateStart == 0 && dateEnd == Long.MAX_VALUE)) {
                     whereBuilder.append(hasCondition ? " AND (" : " (");
-                    whereBuilder.append(partDesc.getPartitionConditionBuilder().buildDateRangeCondition(partDesc,
-                            dateStart, dateEnd));
+                    whereBuilder.append(partDesc.getPartitionConditionBuilder().buildDateRangeCondition(partDesc, dateStart, dateEnd));
                     whereBuilder.append(")\n");
                     hasCondition = true;
                 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/common/PatternedLogger.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/common/PatternedLogger.java b/core-job/src/main/java/org/apache/kylin/job/common/PatternedLogger.java
index 3b007f6..8be5d02 100644
--- a/core-job/src/main/java/org/apache/kylin/job/common/PatternedLogger.java
+++ b/core-job/src/main/java/org/apache/kylin/job/common/PatternedLogger.java
@@ -37,23 +37,21 @@ public class PatternedLogger extends BufferedLogger {
     private static final Pattern PATTERN_APP_ID = Pattern.compile("Submitted application (.*?) to ResourceManager");
     private static final Pattern PATTERN_APP_URL = Pattern.compile("The url to track the job: (.*)");
     private static final Pattern PATTERN_JOB_ID = Pattern.compile("Running job: (.*)");
-    private static final Pattern PATTERN_HDFS_BYTES_WRITTEN = Pattern
-            .compile("(?:HD|MAPR)FS: Number of bytes written=(\\d+)");
+    private static final Pattern PATTERN_HDFS_BYTES_WRITTEN = Pattern.compile("(?:HD|MAPR)FS: Number of bytes written=(\\d+)");
     private static final Pattern PATTERN_SOURCE_RECORDS_COUNT = Pattern.compile("Map input records=(\\d+)");
     private static final Pattern PATTERN_SOURCE_RECORDS_SIZE = Pattern.compile("(?:HD|MAPR)FS Read: (\\d+) HDFS Write");
 
     // hive
     private static final Pattern PATTERN_HIVE_APP_ID_URL = Pattern.compile("Starting Job = (.*?), Tracking URL = (.*)");
-    private static final Pattern PATTERN_HIVE_BYTES_WRITTEN = Pattern
-            .compile("(?:HD|MAPR)FS Read: (\\d+) HDFS Write: (\\d+) SUCCESS");
+    private static final Pattern PATTERN_HIVE_BYTES_WRITTEN = Pattern.compile("(?:HD|MAPR)FS Read: (\\d+) HDFS Write: (\\d+) SUCCESS");
 
-    private static final Pattern PATTERN_HIVE_APP_ID_URL_2 = Pattern
-            .compile("Executing on YARN cluster with App id  (.*?)");
+    private static final Pattern PATTERN_HIVE_APP_ID_URL_2 = Pattern.compile("Executing on YARN cluster with App id  (.*?)");
 
     // spark
     private static final Pattern PATTERN_SPARK_APP_ID = Pattern.compile("Submitted application (.*?)");
     private static final Pattern PATTERN_SPARK_APP_URL = Pattern.compile("tracking URL: (.*)");
 
+
     public PatternedLogger(Logger wrappedLogger) {
         super(wrappedLogger);
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/common/ShellExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/common/ShellExecutable.java b/core-job/src/main/java/org/apache/kylin/job/common/ShellExecutable.java
index 2b69d8f..9f431b0 100644
--- a/core-job/src/main/java/org/apache/kylin/job/common/ShellExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/common/ShellExecutable.java
@@ -19,7 +19,6 @@
 package org.apache.kylin.job.common;
 
 import java.io.IOException;
-
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.job.exception.ExecuteException;
 import org.apache.kylin.job.execution.AbstractExecutable;
@@ -44,11 +43,9 @@ public class ShellExecutable extends AbstractExecutable {
         try {
             logger.info("executing:" + getCmd());
             final PatternedLogger patternedLogger = new PatternedLogger(logger);
-            final Pair<Integer, String> result = context.getConfig().getCliCommandExecutor().execute(getCmd(),
-                    patternedLogger);
+            final Pair<Integer, String> result = context.getConfig().getCliCommandExecutor().execute(getCmd(), patternedLogger);
             getManager().addJobInfo(getId(), patternedLogger.getInfo());
-            return new ExecuteResult(result.getFirst() == 0 ? ExecuteResult.State.SUCCEED : ExecuteResult.State.FAILED,
-                    result.getSecond());
+            return new ExecuteResult(result.getFirst() == 0 ? ExecuteResult.State.SUCCEED : ExecuteResult.State.FAILED, result.getSecond());
         } catch (IOException e) {
             logger.error("job:" + getId() + " execute finished with exception", e);
             return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java b/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
index 6f30d06..d7f6292 100644
--- a/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
+++ b/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
@@ -57,11 +57,5 @@ public final class ExecutableConstants {
     public static final String STEP_NAME_GARBAGE_COLLECTION_HBASE = "Garbage Collection on HBase";
     public static final String STEP_NAME_GARBAGE_COLLECTION_HDFS = "Garbage Collection on HDFS";
     public static final String STEP_NAME_REDISTRIBUTE_FLAT_HIVE_TABLE = "Redistribute Flat Hive Table";
-    public static final String NOTIFY_EMAIL_TEMPLATE = "<div><b>Build Result of Job ${job_name}</b><pre><ul>"
-            + "<li>Build Result: <b>${result}</b></li>" + "<li>Job Engine: ${job_engine}</li>"
-            + "<li>Env: ${env_name}</li>" + "<li>Project: ${project_name}</li>" + "<li>Cube Name: ${cube_name}</li>"
-            + "<li>Source Records Count: ${source_records_count}</li>" + "<li>Start Time: ${start_time}</li>"
-            + "<li>Duration: ${duration}</li>" + "<li>MR Waiting: ${mr_waiting}</li>"
-            + "<li>Last Update Time: ${last_update_time}</li>" + "<li>Submitter: ${submitter}</li>"
-            + "<li>Error Log: ${error_log}</li>" + "</ul></pre><div/>";
+    public static final String NOTIFY_EMAIL_TEMPLATE = "<div><b>Build Result of Job ${job_name}</b><pre><ul>" + "<li>Build Result: <b>${result}</b></li>" + "<li>Job Engine: ${job_engine}</li>" + "<li>Env: ${env_name}</li>" + "<li>Project: ${project_name}</li>" + "<li>Cube Name: ${cube_name}</li>" + "<li>Source Records Count: ${source_records_count}</li>" + "<li>Start Time: ${start_time}</li>" + "<li>Duration: ${duration}</li>" + "<li>MR Waiting: ${mr_waiting}</li>" + "<li>Last Update Time: ${last_update_time}</li>" + "<li>Submitter: ${submitter}</li>" + "<li>Error Log: ${error_log}</li>" + "</ul></pre><div/>";
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/constant/JobStatusEnum.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/constant/JobStatusEnum.java b/core-job/src/main/java/org/apache/kylin/job/constant/JobStatusEnum.java
index 8a9693d..4c6ac97 100644
--- a/core-job/src/main/java/org/apache/kylin/job/constant/JobStatusEnum.java
+++ b/core-job/src/main/java/org/apache/kylin/job/constant/JobStatusEnum.java
@@ -43,8 +43,7 @@ public enum JobStatusEnum {
     }
 
     public boolean isComplete() {
-        return code == JobStatusEnum.FINISHED.getCode() || code == JobStatusEnum.ERROR.getCode()
-                || code == JobStatusEnum.DISCARDED.getCode();
+        return code == JobStatusEnum.FINISHED.getCode() || code == JobStatusEnum.ERROR.getCode() || code == JobStatusEnum.DISCARDED.getCode();
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/constant/JobStepStatusEnum.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/constant/JobStepStatusEnum.java b/core-job/src/main/java/org/apache/kylin/job/constant/JobStepStatusEnum.java
index d83b322..08cd138 100644
--- a/core-job/src/main/java/org/apache/kylin/job/constant/JobStepStatusEnum.java
+++ b/core-job/src/main/java/org/apache/kylin/job/constant/JobStepStatusEnum.java
@@ -42,8 +42,7 @@ public enum JobStepStatusEnum {
     }
 
     public boolean isComplete() {
-        return code == JobStepStatusEnum.FINISHED.getCode() || code == JobStepStatusEnum.ERROR.getCode()
-                || code == JobStepStatusEnum.DISCARDED.getCode();
+        return code == JobStepStatusEnum.FINISHED.getCode() || code == JobStepStatusEnum.ERROR.getCode() || code == JobStepStatusEnum.DISCARDED.getCode();
     }
 
     public boolean isRunable() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java b/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
index b395ff8..70799d8 100644
--- a/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
+++ b/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
@@ -42,8 +42,7 @@ import com.google.common.collect.Lists;
 public class ExecutableDao {
 
     private static final Serializer<ExecutablePO> JOB_SERIALIZER = new JsonSerializer<ExecutablePO>(ExecutablePO.class);
-    private static final Serializer<ExecutableOutputPO> JOB_OUTPUT_SERIALIZER = new JsonSerializer<ExecutableOutputPO>(
-            ExecutableOutputPO.class);
+    private static final Serializer<ExecutableOutputPO> JOB_OUTPUT_SERIALIZER = new JsonSerializer<ExecutableOutputPO>(ExecutableOutputPO.class);
     private static final Logger logger = LoggerFactory.getLogger(ExecutableDao.class);
     private static final ConcurrentMap<KylinConfig, ExecutableDao> CACHE = new ConcurrentHashMap<KylinConfig, ExecutableDao>();
 
@@ -107,8 +106,7 @@ public class ExecutableDao {
 
     public List<ExecutableOutputPO> getJobOutputs() throws PersistentException {
         try {
-            return store.getAllResources(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT, ExecutableOutputPO.class,
-                    JOB_OUTPUT_SERIALIZER);
+            return store.getAllResources(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT, ExecutableOutputPO.class, JOB_OUTPUT_SERIALIZER);
         } catch (IOException e) {
             logger.error("error get all Jobs:", e);
             throw new PersistentException(e);
@@ -117,8 +115,7 @@ public class ExecutableDao {
 
     public List<ExecutableOutputPO> getJobOutputs(long timeStart, long timeEndExclusive) throws PersistentException {
         try {
-            return store.getAllResources(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT, timeStart, timeEndExclusive,
-                    ExecutableOutputPO.class, JOB_OUTPUT_SERIALIZER);
+            return store.getAllResources(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT, timeStart, timeEndExclusive, ExecutableOutputPO.class, JOB_OUTPUT_SERIALIZER);
         } catch (IOException e) {
             logger.error("error get all Jobs:", e);
             throw new PersistentException(e);
@@ -136,8 +133,7 @@ public class ExecutableDao {
 
     public List<ExecutablePO> getJobs(long timeStart, long timeEndExclusive) throws PersistentException {
         try {
-            return store.getAllResources(ResourceStore.EXECUTE_RESOURCE_ROOT, timeStart, timeEndExclusive,
-                    ExecutablePO.class, JOB_SERIALIZER);
+            return store.getAllResources(ResourceStore.EXECUTE_RESOURCE_ROOT, timeStart, timeEndExclusive, ExecutablePO.class, JOB_SERIALIZER);
         } catch (IOException e) {
             logger.error("error get all Jobs:", e);
             throw new PersistentException(e);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java b/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java
index 43789d6..c9ac583 100644
--- a/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java
+++ b/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java
@@ -59,8 +59,7 @@ public class JobEngineConfig {
 
         File jobConfig = getJobConfig(hadoopJobConfFile);
         if (jobConfig == null || !jobConfig.exists()) {
-            logger.warn(
-                    "fail to locate " + hadoopJobConfFile + ", trying to locate " + HADOOP_JOB_CONF_FILENAME + ".xml");
+            logger.warn("fail to locate " + hadoopJobConfFile + ", trying to locate " + HADOOP_JOB_CONF_FILENAME + ".xml");
             jobConfig = getJobConfig(HADOOP_JOB_CONF_FILENAME + ".xml");
             if (jobConfig == null || !jobConfig.exists()) {
                 logger.error("fail to locate " + HADOOP_JOB_CONF_FILENAME + ".xml");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/exception/IllegalStateTranferException.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/exception/IllegalStateTranferException.java b/core-job/src/main/java/org/apache/kylin/job/exception/IllegalStateTranferException.java
index 67ff03f..f19b0ca 100644
--- a/core-job/src/main/java/org/apache/kylin/job/exception/IllegalStateTranferException.java
+++ b/core-job/src/main/java/org/apache/kylin/job/exception/IllegalStateTranferException.java
@@ -39,8 +39,7 @@ public class IllegalStateTranferException extends RuntimeException {
         super(cause);
     }
 
-    public IllegalStateTranferException(String message, Throwable cause, boolean enableSuppression,
-            boolean writableStackTrace) {
+    public IllegalStateTranferException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
         super(message, cause, enableSuppression, writableStackTrace);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
index 63a36b5..1f1be41 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
@@ -60,16 +60,16 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
     public AbstractExecutable() {
         setId(UUID.randomUUID().toString());
     }
-
+    
     protected void initConfig(KylinConfig config) {
         Preconditions.checkState(this.config == null || this.config == config);
         this.config = config;
     }
-
+    
     protected KylinConfig getConfig() {
         return config;
     }
-
+    
     protected ExecutableManager getManager() {
         return ExecutableManager.getInstance(config);
     }
@@ -391,7 +391,6 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
 
     @Override
     public String toString() {
-        return Objects.toStringHelper(this).add("id", getId()).add("name", getName()).add("state", getStatus())
-                .toString();
+        return Objects.toStringHelper(this).add("id", getId()).add("name", getName()).add("state", getStatus()).toString();
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
index 817565b..8bcaaad 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
@@ -43,7 +43,7 @@ public class DefaultChainedExecutable extends AbstractExecutable implements Chai
             sub.initConfig(config);
         }
     }
-
+    
     @Override
     protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
         List<? extends Executable> executables = getTasks();
@@ -58,8 +58,7 @@ public class DefaultChainedExecutable extends AbstractExecutable implements Chai
                 // the job is paused
                 break;
             } else if (state == ExecutableState.ERROR) {
-                throw new IllegalStateException(
-                        "invalid subtask state, subtask:" + subTask.getName() + ", state:" + subTask.getStatus());
+                throw new IllegalStateException("invalid subtask state, subtask:" + subTask.getName() + ", state:" + subTask.getStatus());
             }
             if (subTask.isRunnable()) {
                 return subTask.execute(context);
@@ -89,7 +88,7 @@ public class DefaultChainedExecutable extends AbstractExecutable implements Chai
     @Override
     protected void onExecuteFinished(ExecuteResult result, ExecutableContext executableContext) {
         ExecutableManager mgr = getManager();
-
+        
         if (isDiscarded()) {
             setEndTime(System.currentTimeMillis());
             notifyUserStatusChange(executableContext, ExecutableState.DISCARDED);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
index 4dc22b7..2272582 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
@@ -18,10 +18,6 @@
 
 package org.apache.kylin.job.execution;
 
-import static org.apache.kylin.job.constant.ExecutableConstants.MR_JOB_ID;
-import static org.apache.kylin.job.constant.ExecutableConstants.YARN_APP_ID;
-import static org.apache.kylin.job.constant.ExecutableConstants.YARN_APP_URL;
-
 import java.lang.reflect.Constructor;
 import java.util.HashMap;
 import java.util.IllegalFormatException;
@@ -46,6 +42,10 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
+import static org.apache.kylin.job.constant.ExecutableConstants.MR_JOB_ID;
+import static org.apache.kylin.job.constant.ExecutableConstants.YARN_APP_ID;
+import static org.apache.kylin.job.constant.ExecutableConstants.YARN_APP_URL;
+
 /**
  */
 public class ExecutableManager {
@@ -235,8 +235,7 @@ public class ExecutableManager {
      * @param expectedClass
      * @return
      */
-    public List<AbstractExecutable> getAllAbstractExecutables(long timeStartInMillis, long timeEndInMillis,
-            Class<? extends AbstractExecutable> expectedClass) {
+    public List<AbstractExecutable> getAllAbstractExecutables(long timeStartInMillis, long timeEndInMillis, Class<? extends AbstractExecutable> expectedClass) {
         try {
             List<AbstractExecutable> ret = Lists.newArrayList();
             for (ExecutablePO po : executableDao.getJobs(timeStartInMillis, timeEndInMillis)) {
@@ -383,8 +382,7 @@ public class ExecutableManager {
             ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus());
             if (newStatus != null && oldStatus != newStatus) {
                 if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) {
-                    throw new IllegalStateTranferException("there is no valid state transfer from:" + oldStatus + " to:"
-                            + newStatus + ", job id: " + jobId);
+                    throw new IllegalStateTranferException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus + ", job id: " + jobId);
                 }
                 jobOutput.setStatus(newStatus.toString());
             }
@@ -484,8 +482,7 @@ public class ExecutableManager {
         }
     }
 
-    private AbstractExecutable parseToAbstract(ExecutablePO executablePO,
-            Class<? extends AbstractExecutable> expectedClass) {
+    private AbstractExecutable parseToAbstract(ExecutablePO executablePO, Class<? extends AbstractExecutable> expectedClass) {
         if (executablePO == null) {
             logger.warn("executablePO is null");
             return null;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableState.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableState.java b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableState.java
index 0d3258e..910bd7e 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableState.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableState.java
@@ -36,14 +36,12 @@ public enum ExecutableState {
     private static Multimap<ExecutableState, ExecutableState> VALID_STATE_TRANSFER;
 
     static {
-        VALID_STATE_TRANSFER = Multimaps.newSetMultimap(
-                Maps.<ExecutableState, Collection<ExecutableState>> newEnumMap(ExecutableState.class),
-                new Supplier<Set<ExecutableState>>() {
-                    @Override
-                    public Set<ExecutableState> get() {
-                        return new CopyOnWriteArraySet<ExecutableState>();
-                    }
-                });
+        VALID_STATE_TRANSFER = Multimaps.newSetMultimap(Maps.<ExecutableState, Collection<ExecutableState>> newEnumMap(ExecutableState.class), new Supplier<Set<ExecutableState>>() {
+            @Override
+            public Set<ExecutableState> get() {
+                return new CopyOnWriteArraySet<ExecutableState>();
+            }
+        });
 
         //scheduler
         VALID_STATE_TRANSFER.put(ExecutableState.READY, ExecutableState.RUNNING);
@@ -66,9 +64,11 @@ public enum ExecutableState {
         VALID_STATE_TRANSFER.put(ExecutableState.ERROR, ExecutableState.DISCARDED);
         VALID_STATE_TRANSFER.put(ExecutableState.ERROR, ExecutableState.READY);
 
+
         VALID_STATE_TRANSFER.put(ExecutableState.READY, ExecutableState.STOPPED);
         VALID_STATE_TRANSFER.put(ExecutableState.RUNNING, ExecutableState.STOPPED);
 
+
         //rollback
         VALID_STATE_TRANSFER.put(ExecutableState.SUCCEED, ExecutableState.READY);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
index 128fa35..8b6b5aa 100644
--- a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
+++ b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
@@ -121,9 +121,7 @@ public class DefaultScheduler implements Scheduler<AbstractExecutable>, Connecti
                         logger.warn(jobDesc + " fail to schedule", ex);
                     }
                 }
-                logger.info("Job Fetcher: " + nRunning + " should running, " + runningJobs.size() + " actual running, "
-                        + nStopped + " stopped, " + nReady + " ready, " + nSUCCEED + " already succeed, " + nError
-                        + " error, " + nDiscarded + " discarded, " + nOthers + " others");
+                logger.info("Job Fetcher: " + nRunning + " should running, " + runningJobs.size() + " actual running, " + nStopped + " stopped, " + nReady + " ready, " + nSUCCEED + " already succeed, " + nError + " error, " + nDiscarded + " discarded, " + nOthers + " others");
             } catch (Exception e) {
                 logger.warn("Job Fetcher caught a exception " + e);
             }
@@ -191,7 +189,7 @@ public class DefaultScheduler implements Scheduler<AbstractExecutable>, Connecti
     @Override
     public synchronized void init(JobEngineConfig jobEngineConfig, JobLock lock) throws SchedulerException {
         jobLock = lock;
-
+        
         String serverMode = jobEngineConfig.getConfig().getServerMode();
         if (!("job".equals(serverMode.toLowerCase()) || "all".equals(serverMode.toLowerCase()))) {
             logger.info("server mode: " + serverMode + ", no need to run job scheduler");
@@ -215,15 +213,13 @@ public class DefaultScheduler implements Scheduler<AbstractExecutable>, Connecti
         //load all executable, set them to a consistent status
         fetcherPool = Executors.newScheduledThreadPool(1);
         int corePoolSize = jobEngineConfig.getMaxConcurrentJobLimit();
-        jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS,
-                new SynchronousQueue<Runnable>());
+        jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue<Runnable>());
         context = new DefaultContext(Maps.<String, Executable> newConcurrentMap(), jobEngineConfig.getConfig());
 
         executableManager.resumeAllRunningJobs();
 
         fetcher = new FetcherRunner();
-        fetcherPool.scheduleAtFixedRate(fetcher, 10, ExecutableConstants.DEFAULT_SCHEDULER_INTERVAL_SECONDS,
-                TimeUnit.SECONDS);
+        fetcherPool.scheduleAtFixedRate(fetcher, 10, ExecutableConstants.DEFAULT_SCHEDULER_INTERVAL_SECONDS, TimeUnit.SECONDS);
         hasStarted = true;
     }
 


[14/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopShellExecutable.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopShellExecutable.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopShellExecutable.java
index c6b8f56..ce19500 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopShellExecutable.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopShellExecutable.java
@@ -52,8 +52,7 @@ public class HadoopShellExecutable extends AbstractExecutable {
         Preconditions.checkNotNull(mapReduceJobClass);
         Preconditions.checkNotNull(params);
         try {
-            final Constructor<? extends AbstractHadoopJob> constructor = ClassUtil
-                    .forName(mapReduceJobClass, AbstractHadoopJob.class).getConstructor();
+            final Constructor<? extends AbstractHadoopJob> constructor = ClassUtil.forName(mapReduceJobClass, AbstractHadoopJob.class).getConstructor();
             final AbstractHadoopJob job = constructor.newInstance();
             String[] args = params.trim().split("\\s+");
             logger.info("parameters of the HadoopShellExecutable: {}", params);
@@ -69,8 +68,7 @@ public class HadoopShellExecutable extends AbstractExecutable {
                 result = 2;
             }
             log.append("result code:").append(result);
-            return result == 0 ? new ExecuteResult(ExecuteResult.State.SUCCEED, log.toString())
-                    : new ExecuteResult(ExecuteResult.State.FAILED, log.toString());
+            return result == 0 ? new ExecuteResult(ExecuteResult.State.SUCCEED, log.toString()) : new ExecuteResult(ExecuteResult.State.FAILED, log.toString());
         } catch (ReflectiveOperationException e) {
             logger.error("error getMapReduceJobClass, class name:" + getParam(KEY_MR_JOB), e);
             return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopStatusChecker.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopStatusChecker.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopStatusChecker.java
index 8e2d634..d32928f 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopStatusChecker.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopStatusChecker.java
@@ -55,11 +55,9 @@ public class HadoopStatusChecker {
         }
         JobStepStatusEnum status = null;
         try {
-            final Pair<RMAppState, FinalApplicationStatus> result = new HadoopStatusGetter(yarnUrl, mrJobID)
-                    .get(useKerberosAuth);
+            final Pair<RMAppState, FinalApplicationStatus> result = new HadoopStatusGetter(yarnUrl, mrJobID).get(useKerberosAuth);
             logger.debug("State of Hadoop job: " + mrJobID + ":" + result.getLeft() + "-" + result.getRight());
-            output.append(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S").format(new Date()) + " - State of Hadoop job: "
-                    + mrJobID + ":" + result.getLeft() + " - " + result.getRight() + "\n");
+            output.append(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S").format(new Date()) + " - State of Hadoop job: " + mrJobID + ":" + result.getLeft() + " - " + result.getRight() + "\n");
 
             switch (result.getRight()) {
             case SUCCEEDED:

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/JobInfoConverter.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/JobInfoConverter.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/JobInfoConverter.java
index f20e0a1..189e019 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/JobInfoConverter.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/JobInfoConverter.java
@@ -80,9 +80,7 @@ public class JobInfoConverter {
         }
         if (task instanceof MapReduceExecutable) {
             result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams());
-            result.setExecWaitTime(
-                    AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L)
-                            / 1000);
+            result.setExecWaitTime(AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L) / 1000);
         }
         if (task instanceof HadoopShellExecutable) {
             result.setExecCmd(((HadoopShellExecutable) task).getJobParams());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/MapReduceExecutable.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/MapReduceExecutable.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/MapReduceExecutable.java
index 02c8f45..07efb34 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/MapReduceExecutable.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/MapReduceExecutable.java
@@ -108,8 +108,7 @@ public class MapReduceExecutable extends AbstractExecutable {
                 job = new Cluster(conf).getJob(JobID.forName(extra.get(ExecutableConstants.MR_JOB_ID)));
                 logger.info("mr_job_id:" + extra.get(ExecutableConstants.MR_JOB_ID) + " resumed");
             } else {
-                final Constructor<? extends AbstractHadoopJob> constructor = ClassUtil
-                        .forName(mapReduceJobClass, AbstractHadoopJob.class).getConstructor();
+                final Constructor<? extends AbstractHadoopJob> constructor = ClassUtil.forName(mapReduceJobClass, AbstractHadoopJob.class).getConstructor();
                 final AbstractHadoopJob hadoopJob = constructor.newInstance();
                 hadoopJob.setConf(HadoopUtil.getCurrentConfiguration());
                 hadoopJob.setAsync(true); // so the ToolRunner.run() returns right away
@@ -155,8 +154,7 @@ public class MapReduceExecutable extends AbstractExecutable {
                     mgr.updateJobOutput(getId(), ExecutableState.ERROR, hadoopCmdOutput.getInfo(), "killed by admin");
                     return new ExecuteResult(ExecuteResult.State.FAILED, "killed by admin");
                 }
-                if (status == JobStepStatusEnum.WAITING && (newStatus == JobStepStatusEnum.FINISHED
-                        || newStatus == JobStepStatusEnum.ERROR || newStatus == JobStepStatusEnum.RUNNING)) {
+                if (status == JobStepStatusEnum.WAITING && (newStatus == JobStepStatusEnum.FINISHED || newStatus == JobStepStatusEnum.ERROR || newStatus == JobStepStatusEnum.RUNNING)) {
                     final long waitTime = System.currentTimeMillis() - getStartTime();
                     setMapReduceWaitTime(waitTime);
                 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/NDCuboidBuilder.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/NDCuboidBuilder.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/NDCuboidBuilder.java
index dfb0b8b..9ab42ea 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/NDCuboidBuilder.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/NDCuboidBuilder.java
@@ -18,8 +18,6 @@
 
 package org.apache.kylin.engine.mr.common;
 
-import java.io.Serializable;
-
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.SplittedBytes;
@@ -32,6 +30,8 @@ import org.apache.kylin.cube.kv.RowKeyEncoderProvider;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.Serializable;
+
 /**
  */
 public class NDCuboidBuilder implements Serializable {
@@ -57,6 +57,7 @@ public class NDCuboidBuilder implements Serializable {
         this.rowKeySplitter = new RowKeySplitter(cubeSegment, 65, 256);
     }
 
+
     public Pair<Integer, ByteArray> buildKey(Cuboid parentCuboid, Cuboid childCuboid, SplittedBytes[] splitBuffers) {
         RowKeyEncoder rowkeyEncoder = rowKeyEncoderProvider.getRowkeyEncoder(childCuboid);
 
@@ -66,7 +67,7 @@ public class NDCuboidBuilder implements Serializable {
         long mask = Long.highestOneBit(parentCuboid.getId());
         long parentCuboidId = parentCuboid.getId();
         long childCuboidId = childCuboid.getId();
-        long parentCuboidIdActualLength = (long) Long.SIZE - Long.numberOfLeadingZeros(parentCuboid.getId());
+        long parentCuboidIdActualLength = (long)Long.SIZE - Long.numberOfLeadingZeros(parentCuboid.getId());
         int index = rowKeySplitter.getBodySplitOffset(); // skip shard and cuboidId
         for (int i = 0; i < parentCuboidIdActualLength; i++) {
             if ((mask & parentCuboidId) > 0) {// if the this bit position equals

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/BaseCuboidMapperBase.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/BaseCuboidMapperBase.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/BaseCuboidMapperBase.java
index e6f976a..93e413b 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/BaseCuboidMapperBase.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/BaseCuboidMapperBase.java
@@ -18,10 +18,6 @@
 
 package org.apache.kylin.engine.mr.steps;
 
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
 import org.apache.hadoop.io.Text;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Bytes;
@@ -38,6 +34,10 @@ import org.apache.kylin.engine.mr.common.BatchConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+
 /**
  */
 abstract public class BaseCuboidMapperBase<KEYIN, VALUEIN> extends KylinMapper<KEYIN, VALUEIN, Text, Text> {
@@ -66,12 +66,12 @@ abstract public class BaseCuboidMapperBase<KEYIN, VALUEIN> extends KylinMapper<K
         cube = CubeManager.getInstance(kylinConfig).getCube(cubeName);
         cubeDesc = cube.getDescriptor();
         cubeSegment = cube.getSegmentById(segmentID);
-        CubeJoinedFlatTableEnrich intermediateTableDesc = new CubeJoinedFlatTableEnrich(
-                EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
+        CubeJoinedFlatTableEnrich intermediateTableDesc = new CubeJoinedFlatTableEnrich(EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
         baseCuboidBuilder = new BaseCuboidBuilder(kylinConfig, cubeDesc, cubeSegment, intermediateTableDesc);
 
     }
 
+
     protected void outputKV(String[] flatRow, Context context) throws IOException, InterruptedException {
         byte[] rowKey = baseCuboidBuilder.buildKey(flatRow);
         outputKey.set(rowKey, 0, rowKey.length);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CreateDictionaryJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CreateDictionaryJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CreateDictionaryJob.java
index 6b5c8d1..98ebbb4 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CreateDictionaryJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CreateDictionaryJob.java
@@ -75,15 +75,13 @@ public class CreateDictionaryJob extends AbstractHadoopJob {
                 Path colDir = new Path(factColumnsInputPath, col.getIdentity());
                 FileSystem fs = HadoopUtil.getWorkingFileSystem();
 
-                Path dictFile = HadoopUtil.getFilterOnlyPath(fs, colDir,
-                        col.getName() + FactDistinctColumnsReducer.DICT_FILE_POSTFIX);
+                Path dictFile = HadoopUtil.getFilterOnlyPath(fs, colDir, col.getName() + FactDistinctColumnsReducer.DICT_FILE_POSTFIX);
                 if (dictFile == null) {
                     logger.info("Dict for '" + col.getName() + "' not pre-built.");
                     return null;
                 }
 
-                try (SequenceFile.Reader reader = new SequenceFile.Reader(HadoopUtil.getCurrentConfiguration(),
-                        SequenceFile.Reader.file(dictFile))) {
+                try (SequenceFile.Reader reader = new SequenceFile.Reader(HadoopUtil.getCurrentConfiguration(), SequenceFile.Reader.file(dictFile))) {
                     NullWritable key = NullWritable.get();
                     BytesWritable value = new BytesWritable();
                     reader.next(key, value);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CubingExecutableUtil.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CubingExecutableUtil.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CubingExecutableUtil.java
index acc224e..65c5869 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CubingExecutableUtil.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CubingExecutableUtil.java
@@ -23,17 +23,17 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
-import javax.annotation.Nullable;
-
+import com.google.common.base.Function;
+import com.google.common.collect.Iterables;
 import org.apache.commons.lang.StringUtils;
+
+import com.google.common.collect.Lists;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.job.execution.ExecutableContext;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
+import javax.annotation.Nullable;
 
 public class CubingExecutableUtil {
 
@@ -78,14 +78,13 @@ public class CubingExecutableUtil {
         final CubeInstance cube = mgr.getCube(cubeName);
 
         if (cube == null) {
-            String cubeList = StringUtils
-                    .join(Iterables.transform(mgr.listAllCubes(), new Function<CubeInstance, String>() {
-                        @Nullable
-                        @Override
-                        public String apply(@Nullable CubeInstance input) {
-                            return input.getName();
-                        }
-                    }).iterator(), ",");
+            String cubeList = StringUtils.join(Iterables.transform(mgr.listAllCubes(), new Function<CubeInstance, String>() {
+                @Nullable
+                @Override
+                public String apply(@Nullable CubeInstance input) {
+                    return input.getName();
+                }
+            }).iterator(), ",");
 
             throw new IllegalStateException("target cube name: " + cubeName + " cube list: " + cubeList);
         }
@@ -93,14 +92,13 @@ public class CubingExecutableUtil {
         final CubeSegment newSegment = cube.getSegmentById(segmentId);
 
         if (newSegment == null) {
-            String segmentList = StringUtils
-                    .join(Iterables.transform(cube.getSegments(), new Function<CubeSegment, String>() {
-                        @Nullable
-                        @Override
-                        public String apply(@Nullable CubeSegment input) {
-                            return input.getUuid();
-                        }
-                    }).iterator(), ",");
+            String segmentList = StringUtils.join(Iterables.transform(cube.getSegments(), new Function<CubeSegment, String>() {
+                @Nullable
+                @Override
+                public String apply(@Nullable CubeSegment input) {
+                    return input.getUuid();
+                }
+            }).iterator(), ",");
 
             throw new IllegalStateException("target segment id: " + segmentId + " segment list: " + segmentList);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
index fcea420..6a8ba4c 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
@@ -93,8 +93,7 @@ public class CuboidJob extends AbstractHadoopJob {
             CubeSegment segment = cube.getSegmentById(segmentID);
 
             if (checkSkip(cubingJobId)) {
-                logger.info(
-                        "Skip job " + getOptionValue(OPTION_JOB_NAME) + " for " + segmentID + "[" + segmentID + "]");
+                logger.info("Skip job " + getOptionValue(OPTION_JOB_NAME) + " for " + segmentID + "[" + segmentID + "]");
                 return 0;
             }
 
@@ -142,8 +141,7 @@ public class CuboidJob extends AbstractHadoopJob {
 
         if ("FLAT_TABLE".equals(input)) {
             // base cuboid case
-            IMRTableInputFormat flatTableInputFormat = MRUtil.getBatchCubingInputSide(cubeSeg)
-                    .getFlatTableInputFormat();
+            IMRTableInputFormat flatTableInputFormat = MRUtil.getBatchCubingInputSide(cubeSeg).getFlatTableInputFormat();
             flatTableInputFormat.configureJob(job);
         } else {
             // n-dimension cuboid case

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java
index f7e8e4b..495be77 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.List;
 
+import com.google.common.collect.Lists;
 import org.apache.hadoop.io.Text;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeManager;
@@ -35,8 +36,6 @@ import org.apache.kylin.metadata.model.MeasureDesc;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
-
 /**
  * @author George Song (ysong1)
  * 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsCombiner.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsCombiner.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsCombiner.java
index 4a5cf07..a367bc6 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsCombiner.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsCombiner.java
@@ -26,8 +26,7 @@ import org.apache.kylin.engine.mr.KylinReducer;
 /**
  * @author yangli9
  */
-public class FactDistinctColumnsCombiner
-        extends KylinReducer<SelfDefineSortableKey, Text, SelfDefineSortableKey, Text> {
+public class FactDistinctColumnsCombiner extends KylinReducer<SelfDefineSortableKey, Text, SelfDefineSortableKey, Text> {
 
     @Override
     protected void setup(Context context) throws IOException {
@@ -35,8 +34,7 @@ public class FactDistinctColumnsCombiner
     }
 
     @Override
-    public void doReduce(SelfDefineSortableKey key, Iterable<Text> values, Context context)
-            throws IOException, InterruptedException {
+    public void doReduce(SelfDefineSortableKey key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
 
         // for hll, each key only has one output, no need to do local combine;
         // for normal col, values are empty text

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
index fa6c62f..ee0989a 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
@@ -94,9 +94,7 @@ public class FactDistinctColumnsJob extends AbstractHadoopJob {
             }
 
             if (reducerCount > 255) {
-                throw new IllegalArgumentException(
-                        "The max reducer number for FactDistinctColumnsJob is 255, but now it is " + reducerCount
-                                + ", decrease 'kylin.engine.mr.uhc-reducer-count'");
+                throw new IllegalArgumentException("The max reducer number for FactDistinctColumnsJob is 255, but now it is " + reducerCount + ", decrease 'kylin.engine.mr.uhc-reducer-count'");
             }
 
             job.getConfiguration().set(BatchConstants.CFG_CUBE_NAME, cubeName);
@@ -154,14 +152,11 @@ public class FactDistinctColumnsJob extends AbstractHadoopJob {
         job.setNumReduceTasks(numberOfReducers);
 
         //make each reducer output to respective dir
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_COLUMN, SequenceFileOutputFormat.class,
-                NullWritable.class, Text.class);
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_DICT, SequenceFileOutputFormat.class,
-                NullWritable.class, BytesWritable.class);
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_STATISTICS, SequenceFileOutputFormat.class,
-                LongWritable.class, BytesWritable.class);
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_PARTITION, TextOutputFormat.class,
-                NullWritable.class, LongWritable.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_COLUMN, SequenceFileOutputFormat.class, NullWritable.class, Text.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_DICT, SequenceFileOutputFormat.class, NullWritable.class, BytesWritable.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_STATISTICS, SequenceFileOutputFormat.class, LongWritable.class, BytesWritable.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_PARTITION, TextOutputFormat.class, NullWritable.class, LongWritable.class);
+
 
         FileOutputFormat.setOutputPath(job, output);
         job.getConfiguration().set(BatchConstants.CFG_OUTPUT_PATH, output.toString());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapper.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapper.java
index 480ef95..713b7f7 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapper.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapper.java
@@ -43,6 +43,8 @@ import com.google.common.hash.HashFunction;
 import com.google.common.hash.Hasher;
 import com.google.common.hash.Hashing;
 
+
+
 /**
  */
 public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperBase<KEYIN, Object> {
@@ -53,6 +55,7 @@ public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperB
         BYTES
     }
 
+
     protected boolean collectStatistics = false;
     protected CuboidScheduler cuboidScheduler = null;
     protected int nRowKey;
@@ -84,8 +87,7 @@ public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperB
         tmpbuf = ByteBuffer.allocate(4096);
         collectStatistics = Boolean.parseBoolean(context.getConfiguration().get(BatchConstants.CFG_STATISTICS_ENABLED));
         if (collectStatistics) {
-            samplingPercentage = Integer
-                    .parseInt(context.getConfiguration().get(BatchConstants.CFG_STATISTICS_SAMPLING_PERCENT));
+            samplingPercentage = Integer.parseInt(context.getConfiguration().get(BatchConstants.CFG_STATISTICS_SAMPLING_PERCENT));
             cuboidScheduler = new CuboidScheduler(cubeDesc);
             nRowKey = cubeDesc.getRowkey().getRowKeyColumns().length;
 
@@ -101,6 +103,7 @@ public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperB
                 allCuboidsHLL[i] = new HLLCounter(cubeDesc.getConfig().getCubeStatsHLLPrecision(), RegisterType.DENSE);
             }
 
+
             TblColRef partitionColRef = cubeDesc.getModel().getPartitionDesc().getPartitionDateColumnRef();
             if (partitionColRef != null) {
                 partitionColumnIndex = intermediateTableDesc.getColumnIndex(partitionColRef);
@@ -126,9 +129,7 @@ public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperB
                 isUsePutRowKeyToHllNewAlgorithm = true;
                 rowHashCodesLong = new long[nRowKey];
                 hf = Hashing.murmur3_128();
-                logger.info(
-                        "Found KylinVersion : {}. Use new algorithm for cuboid sampling. About the details of the new algorithm, please refer to KYLIN-2518",
-                        cubeDesc.getVersion());
+                logger.info("Found KylinVersion : {}. Use new algorithm for cuboid sampling. About the details of the new algorithm, please refer to KYLIN-2518", cubeDesc.getVersion());
             }
         }
 
@@ -159,7 +160,7 @@ public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperB
     public void doMap(KEYIN key, Object record, Context context) throws IOException, InterruptedException {
         Collection<String[]> rowCollection = flatTableInputFormat.parseMapperInput(record);
 
-        for (String[] row : rowCollection) {
+        for (String[] row: rowCollection) {
             context.getCounter(RawDataCounter.BYTES).increment(countSizeInBytes(row));
             for (int i = 0; i < factDictCols.size(); i++) {
                 String fieldValue = row[dictionaryColumnIndex[i]];
@@ -172,8 +173,7 @@ public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperB
                     reducerIndex = columnIndexToReducerBeginId.get(i);
                 } else {
                     //for the uhc
-                    reducerIndex = columnIndexToReducerBeginId.get(i)
-                            + (fieldValue.hashCode() & 0x7fffffff) % uhcReducerCount;
+                    reducerIndex = columnIndexToReducerBeginId.get(i) + (fieldValue.hashCode() & 0x7fffffff) % uhcReducerCount;
                 }
 
                 tmpbuf.clear();
@@ -192,8 +192,7 @@ public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperB
 
                 // log a few rows for troubleshooting
                 if (rowCount < 10) {
-                    logger.info("Sample output: " + factDictCols.get(i) + " '" + fieldValue + "' => reducer "
-                            + reducerIndex);
+                    logger.info("Sample output: " + factDictCols.get(i) + " '" + fieldValue + "' => reducer " + reducerIndex);
                 }
             }
 
@@ -302,6 +301,7 @@ public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperB
         }
     }
 
+
     private int countNewSize(int oldSize, int dataSize) {
         int newSize = oldSize * 2;
         while (newSize < dataSize) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapperBase.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapperBase.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapperBase.java
index 2e55a52..458af69 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapperBase.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapperBase.java
@@ -43,8 +43,7 @@ import org.apache.kylin.metadata.model.TblColRef;
 
 /**
  */
-abstract public class FactDistinctColumnsMapperBase<KEYIN, VALUEIN>
-        extends KylinMapper<KEYIN, VALUEIN, SelfDefineSortableKey, Text> {
+abstract public class FactDistinctColumnsMapperBase<KEYIN, VALUEIN> extends KylinMapper<KEYIN, VALUEIN, SelfDefineSortableKey, Text> {
 
     protected String cubeName;
     protected CubeInstance cube;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
index f5f03e2..7f01c3a 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
@@ -26,6 +26,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import com.google.common.base.Preconditions;
 import org.apache.commons.io.output.ByteArrayOutputStream;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.BytesWritable;
@@ -51,7 +52,6 @@ import org.apache.kylin.metadata.model.TblColRef;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
@@ -112,8 +112,7 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
             isStatistics = true;
             baseCuboidRowCountInMappers = Lists.newArrayList();
             cuboidHLLMap = Maps.newHashMap();
-            samplingPercentage = Integer
-                    .parseInt(context.getConfiguration().get(BatchConstants.CFG_STATISTICS_SAMPLING_PERCENT));
+            samplingPercentage = Integer.parseInt(context.getConfiguration().get(BatchConstants.CFG_STATISTICS_SAMPLING_PERCENT));
             logger.info("Reducer " + taskId + " handling stats");
         } else if (collectStatistics && (taskId == numberOfTasks - 2)) {
             // partition col
@@ -134,7 +133,7 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
             if (cubeDesc.getDictionaryBuilderClass(col) != null) { // only works with default dictionary builder
                 buildDictInReducer = false;
             }
-            if (config.getUHCReducerCount() > 1) {
+            if(config.getUHCReducerCount() > 1) {
                 int[] uhcIndex = CubeManager.getInstance(config).getUHCIndex(cubeDesc);
                 int colIndex = reducerIdToColumnIndex.get(taskId);
                 if (uhcIndex[colIndex] == 1)
@@ -163,8 +162,7 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
     }
 
     @Override
-    public void doReduce(SelfDefineSortableKey skey, Iterable<Text> values, Context context)
-            throws IOException, InterruptedException {
+    public void doReduce(SelfDefineSortableKey skey, Iterable<Text> values, Context context) throws IOException, InterruptedException {
         Text key = skey.getText();
         if (isStatistics) {
             // for hll
@@ -245,12 +243,9 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
             // output written to baseDir/colName/colName.pci-r-00000 (etc)
             String partitionFileName = col.getIdentity() + "/" + col.getName() + PARTITION_COL_INFO_FILE_POSTFIX;
 
-            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(), new LongWritable(timeMinValue),
-                    partitionFileName);
-            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(), new LongWritable(timeMaxValue),
-                    partitionFileName);
-            logger.info("write partition info for col : " + col.getName() + "  minValue:" + timeMinValue + " maxValue:"
-                    + timeMaxValue);
+            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(), new LongWritable(timeMinValue), partitionFileName);
+            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(), new LongWritable(timeMaxValue), partitionFileName);
+            logger.info("write partition info for col : " + col.getName() + "  minValue:" + timeMinValue + " maxValue:" + timeMaxValue);
         }
     }
 
@@ -258,13 +253,11 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
         // output written to baseDir/colName/colName.rldict-r-00000 (etc)
         String dictFileName = col.getIdentity() + "/" + col.getName() + DICT_FILE_POSTFIX;
 
-        try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
-                DataOutputStream outputStream = new DataOutputStream(baos);) {
+        try (ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream outputStream = new DataOutputStream(baos);) {
             outputStream.writeUTF(dict.getClass().getName());
             dict.write(outputStream);
 
-            mos.write(BatchConstants.CFG_OUTPUT_DICT, NullWritable.get(), new BytesWritable(baos.toByteArray()),
-                    dictFileName);
+            mos.write(BatchConstants.CFG_OUTPUT_DICT, NullWritable.get(), new BytesWritable(baos.toByteArray()), dictFileName);
         }
     }
 
@@ -280,23 +273,19 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
             grandTotal += hll.getCountEstimate();
         }
         double mapperOverlapRatio = grandTotal == 0 ? 0 : (double) totalRowsBeforeMerge / grandTotal;
-        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-1),
-                new BytesWritable(Bytes.toBytes(mapperOverlapRatio)), statisticsFileName);
+        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-1), new BytesWritable(Bytes.toBytes(mapperOverlapRatio)), statisticsFileName);
 
         // mapper number at key -2
-        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-2),
-                new BytesWritable(Bytes.toBytes(baseCuboidRowCountInMappers.size())), statisticsFileName);
+        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-2), new BytesWritable(Bytes.toBytes(baseCuboidRowCountInMappers.size())), statisticsFileName);
 
         // sampling percentage at key 0
-        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(0L),
-                new BytesWritable(Bytes.toBytes(samplingPercentage)), statisticsFileName);
+        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(0L), new BytesWritable(Bytes.toBytes(samplingPercentage)), statisticsFileName);
 
         for (long i : allCuboids) {
             valueBuf.clear();
             cuboidHLLMap.get(i).writeRegisters(valueBuf);
             valueBuf.flip();
-            mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(i),
-                    new BytesWritable(valueBuf.array(), valueBuf.limit()), statisticsFileName);
+            mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(i), new BytesWritable(valueBuf.array(), valueBuf.limit()), statisticsFileName);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/HiveToBaseCuboidMapper.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/HiveToBaseCuboidMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/HiveToBaseCuboidMapper.java
index d55b775..a04fb43 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/HiveToBaseCuboidMapper.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/HiveToBaseCuboidMapper.java
@@ -40,7 +40,7 @@ public class HiveToBaseCuboidMapper<KEYIN> extends BaseCuboidMapperBase<KEYIN, O
     @Override
     public void doMap(KEYIN key, Object value, Context context) throws IOException, InterruptedException {
         Collection<String[]> rowCollection = flatTableInputFormat.parseMapperInput(value);
-        for (String[] row : rowCollection) {
+        for (String[] row: rowCollection) {
             try {
                 outputKV(row, context);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
index 7d8320a..73a2eb9 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
@@ -111,8 +111,7 @@ public class InMemCuboidJob extends AbstractHadoopJob {
             job.setOutputValueClass(Text.class);
 
             // set input
-            IMRTableInputFormat flatTableInputFormat = MRUtil.getBatchCubingInputSide(segment)
-                    .getFlatTableInputFormat();
+            IMRTableInputFormat flatTableInputFormat = MRUtil.getBatchCubingInputSide(segment).getFlatTableInputFormat();
             flatTableInputFormat.configureJob(job);
 
             // set output

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidMapper.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidMapper.java
index 65d1525..eee189c 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidMapper.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidMapper.java
@@ -101,8 +101,7 @@ public class InMemCuboidMapper<KEYIN> extends KylinMapper<KEYIN, Object, ByteArr
         cubeBuilder.setConcurrentThreads(taskCount);
 
         ExecutorService executorService = Executors.newSingleThreadExecutor();
-        future = executorService.submit(
-                cubeBuilder.buildAsRunnable(queue, new MapContextGTRecordWriter(context, cubeDesc, cubeSegment)));
+        future = executorService.submit(cubeBuilder.buildAsRunnable(queue, new MapContextGTRecordWriter(context, cubeDesc, cubeSegment)));
 
     }
 
@@ -120,7 +119,7 @@ public class InMemCuboidMapper<KEYIN> extends KylinMapper<KEYIN, Object, ByteArr
         // put each row to the queue
         Collection<String[]> rowCollection = flatTableInputFormat.parseMapperInput(record);
 
-        for (String[] row : rowCollection) {
+        for(String[] row: rowCollection) {
             List<String> rowAsList = Arrays.asList(row);
             while (!future.isDone()) {
                 if (queue.offer(rowAsList, 1, TimeUnit.SECONDS)) {
@@ -143,8 +142,7 @@ public class InMemCuboidMapper<KEYIN> extends KylinMapper<KEYIN, Object, ByteArr
         try {
             future.get();
         } catch (Exception e) {
-            throw new IOException("Failed to build cube in mapper " + context.getTaskAttemptID().getTaskID().getId(),
-                    e);
+            throw new IOException("Failed to build cube in mapper " + context.getTaskAttemptID().getTaskID().getId(), e);
         }
         queue.clear();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java
index 65ba841..244889f 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java
@@ -74,8 +74,7 @@ public class InMemCuboidReducer extends KylinReducer<ByteArrayWritable, ByteArra
     }
 
     @Override
-    public void doReduce(ByteArrayWritable key, Iterable<ByteArrayWritable> values, Context context)
-            throws IOException, InterruptedException {
+    public void doReduce(ByteArrayWritable key, Iterable<ByteArrayWritable> values, Context context) throws IOException, InterruptedException {
 
         aggs.reset();
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
index c1a55da..d183f90 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
@@ -31,8 +31,7 @@ public class MapContextGTRecordWriter extends KVGTRecordWriter {
 
     protected MapContext<?, ?, ByteArrayWritable, ByteArrayWritable> mapContext;
 
-    public MapContextGTRecordWriter(MapContext<?, ?, ByteArrayWritable, ByteArrayWritable> mapContext,
-            CubeDesc cubeDesc, CubeSegment cubeSegment) {
+    public MapContextGTRecordWriter(MapContext<?, ?, ByteArrayWritable, ByteArrayWritable> mapContext, CubeDesc cubeDesc, CubeSegment cubeSegment) {
         super(cubeDesc, cubeSegment);
         this.mapContext = mapContext;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java
index 7c50f23..a603fc8 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapper.java
@@ -110,8 +110,7 @@ public class MergeCuboidMapper extends KylinMapper<Text, Text, Text, Text> {
 
         // decide which source segment
         FileSplit fileSplit = (FileSplit) context.getInputSplit();
-        IMROutput2.IMRMergeOutputFormat outputFormat = MRUtil.getBatchMergeOutputSide2(mergedCubeSegment)
-                .getOuputFormat();
+        IMROutput2.IMRMergeOutputFormat outputFormat = MRUtil.getBatchMergeOutputSide2(mergedCubeSegment).getOuputFormat();
         sourceCubeSegment = outputFormat.findSourceSegment(fileSplit, cube);
 
         rowKeySplitter = new RowKeySplitter(sourceCubeSegment, 65, 255);
@@ -185,8 +184,7 @@ public class MergeCuboidMapper extends KylinMapper<Text, Text, Text, Text> {
                     System.arraycopy(oldBuf, 0, newKeyBodyBuf, 0, oldBuf.length);
                 }
 
-                int idInSourceDict = BytesUtil.readUnsigned(splittedByteses[useSplit].value, 0,
-                        splittedByteses[useSplit].length);
+                int idInSourceDict = BytesUtil.readUnsigned(splittedByteses[useSplit].value, 0, splittedByteses[useSplit].length);
                 int idInMergedDict;
 
                 //int size = sourceDict.getValueBytesFromId(idInSourceDict, newKeyBodyBuf, bufOffset);
@@ -207,8 +205,7 @@ public class MergeCuboidMapper extends KylinMapper<Text, Text, Text, Text> {
                     System.arraycopy(oldBuf, 0, newKeyBodyBuf, 0, oldBuf.length);
                 }
 
-                System.arraycopy(splittedByteses[useSplit].value, 0, newKeyBodyBuf, bufOffset,
-                        splittedByteses[useSplit].length);
+                System.arraycopy(splittedByteses[useSplit].value, 0, newKeyBodyBuf, bufOffset, splittedByteses[useSplit].length);
                 bufOffset += splittedByteses[useSplit].length;
             }
         }
@@ -242,14 +239,13 @@ public class MergeCuboidMapper extends KylinMapper<Text, Text, Text, Text> {
         Boolean ret = dimensionsNeedDict.get(col);
         if (ret != null)
             return ret;
-
+        
         ret = cubeDesc.getRowkey().isUseDictionary(col);
         if (ret) {
-            TableRef srcTable = DictionaryManager.getInstance(config).decideSourceData(cubeDesc.getModel(), col)
-                    .getTableRef();
+            TableRef srcTable = DictionaryManager.getInstance(config).decideSourceData(cubeDesc.getModel(), col).getTableRef();
             ret = cubeDesc.getModel().isFactTable(srcTable);
         }
-
+        
         dimensionsNeedDict.put(col, ret);
         return ret;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeDictionaryStep.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeDictionaryStep.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeDictionaryStep.java
index f6658c8..4ca132c 100755
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeDictionaryStep.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeDictionaryStep.java
@@ -103,8 +103,7 @@ public class MergeDictionaryStep extends AbstractExecutable {
      * @param newSeg
      * @throws IOException
      */
-    private void makeDictForNewSegment(KylinConfig conf, CubeInstance cube, CubeSegment newSeg,
-            List<CubeSegment> mergingSegments) throws IOException {
+    private void makeDictForNewSegment(KylinConfig conf, CubeInstance cube, CubeSegment newSeg, List<CubeSegment> mergingSegments) throws IOException {
         HashSet<TblColRef> colsNeedMeringDict = new HashSet<TblColRef>();
         HashSet<TblColRef> colsNeedCopyDict = new HashSet<TblColRef>();
         DictionaryManager dictMgr = DictionaryManager.getInstance(conf);
@@ -143,8 +142,7 @@ public class MergeDictionaryStep extends AbstractExecutable {
         }
     }
 
-    private DictionaryInfo mergeDictionaries(DictionaryManager dictMgr, CubeSegment cubeSeg, List<DictionaryInfo> dicts,
-            TblColRef col) throws IOException {
+    private DictionaryInfo mergeDictionaries(DictionaryManager dictMgr, CubeSegment cubeSeg, List<DictionaryInfo> dicts, TblColRef col) throws IOException {
         DictionaryInfo dictInfo = dictMgr.mergeDictionary(dicts);
         if (dictInfo != null)
             cubeSeg.putDictResPath(col, dictInfo.getResourcePath());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeStatisticsStep.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeStatisticsStep.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeStatisticsStep.java
index 17c4d03..04d8231 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeStatisticsStep.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeStatisticsStep.java
@@ -76,8 +76,7 @@ public class MergeStatisticsStep extends AbstractExecutable {
 
             int averageSamplingPercentage = 0;
             for (String segmentId : CubingExecutableUtil.getMergingSegmentIds(this.getParams())) {
-                String fileKey = CubeSegment
-                        .getStatisticsResourcePath(CubingExecutableUtil.getCubeName(this.getParams()), segmentId);
+                String fileKey = CubeSegment.getStatisticsResourcePath(CubingExecutableUtil.getCubeName(this.getParams()), segmentId);
                 InputStream is = rs.getResource(fileKey).inputStream;
                 File tempFile = null;
                 FileOutputStream tempFileStream = null;
@@ -121,13 +120,9 @@ public class MergeStatisticsStep extends AbstractExecutable {
                         tempFile.delete();
                 }
             }
-            averageSamplingPercentage = averageSamplingPercentage
-                    / CubingExecutableUtil.getMergingSegmentIds(this.getParams()).size();
-            CubeStatsWriter.writeCuboidStatistics(conf,
-                    new Path(CubingExecutableUtil.getMergedStatisticsPath(this.getParams())), cuboidHLLMap,
-                    averageSamplingPercentage);
-            Path statisticsFilePath = new Path(CubingExecutableUtil.getMergedStatisticsPath(this.getParams()),
-                    BatchConstants.CFG_STATISTICS_CUBOID_ESTIMATION_FILENAME);
+            averageSamplingPercentage = averageSamplingPercentage / CubingExecutableUtil.getMergingSegmentIds(this.getParams()).size();
+            CubeStatsWriter.writeCuboidStatistics(conf, new Path(CubingExecutableUtil.getMergedStatisticsPath(this.getParams())), cuboidHLLMap, averageSamplingPercentage);
+            Path statisticsFilePath = new Path(CubingExecutableUtil.getMergedStatisticsPath(this.getParams()), BatchConstants.CFG_STATISTICS_CUBOID_ESTIMATION_FILENAME);
             FileSystem fs = HadoopUtil.getFileSystem(statisticsFilePath, conf);
             FSDataInputStream is = fs.open(statisticsFilePath);
             try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MetadataCleanupJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MetadataCleanupJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MetadataCleanupJob.java
index 9b41a8e..eee2c00 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MetadataCleanupJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MetadataCleanupJob.java
@@ -47,8 +47,7 @@ import com.google.common.collect.Sets;
 public class MetadataCleanupJob extends AbstractHadoopJob {
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false)
-            .withDescription("Delete the unused metadata").create("delete");
+    private static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false).withDescription("Delete the unused metadata").create("delete");
 
     protected static final Logger logger = LoggerFactory.getLogger(MetadataCleanupJob.class);
 
@@ -101,8 +100,7 @@ public class MetadataCleanupJob extends AbstractHadoopJob {
         List<String> toDeleteResource = Lists.newArrayList();
 
         // two level resources, snapshot tables and cube statistics
-        for (String resourceRoot : new String[] { ResourceStore.SNAPSHOT_RESOURCE_ROOT,
-                ResourceStore.CUBE_STATISTICS_ROOT }) {
+        for (String resourceRoot : new String[] { ResourceStore.SNAPSHOT_RESOURCE_ROOT, ResourceStore.CUBE_STATISTICS_ROOT }) {
             NavigableSet<String> snapshotTables = getStore().listResources(resourceRoot);
 
             if (snapshotTables != null) {
@@ -151,9 +149,7 @@ public class MetadataCleanupJob extends AbstractHadoopJob {
         for (ExecutablePO executable : allExecutable) {
             long lastModified = executable.getLastModified();
             ExecutableOutputPO output = executableDao.getJobOutput(executable.getUuid());
-            if (System.currentTimeMillis() - lastModified > TIME_THREADSHOLD_FOR_JOB
-                    && (ExecutableState.SUCCEED.toString().equals(output.getStatus())
-                            || ExecutableState.DISCARDED.toString().equals(output.getStatus()))) {
+            if (System.currentTimeMillis() - lastModified > TIME_THREADSHOLD_FOR_JOB && (ExecutableState.SUCCEED.toString().equals(output.getStatus()) || ExecutableState.DISCARDED.toString().equals(output.getStatus()))) {
                 toDeleteResource.add(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + executable.getUuid());
                 toDeleteResource.add(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + executable.getUuid());
 
@@ -164,8 +160,7 @@ public class MetadataCleanupJob extends AbstractHadoopJob {
         }
 
         if (toDeleteResource.size() > 0) {
-            logger.info(
-                    "The following resources have no reference or is too old, will be cleaned from metadata store: \n");
+            logger.info("The following resources have no reference or is too old, will be cleaned from metadata store: \n");
 
             for (String s : toDeleteResource) {
                 logger.info(s);
@@ -180,8 +175,7 @@ public class MetadataCleanupJob extends AbstractHadoopJob {
     }
 
     public static void main(String[] args) throws Exception {
-        logger.warn(
-                "org.apache.kylin.engine.mr.steps.MetadataCleanupJob is deprecated, use org.apache.kylin.tool.MetadataCleanupJob instead");
+        logger.warn("org.apache.kylin.engine.mr.steps.MetadataCleanupJob is deprecated, use org.apache.kylin.tool.MetadataCleanupJob instead");
 
         int exitCode = ToolRunner.run(new MetadataCleanupJob(), args);
         System.exit(exitCode);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/NDCuboidMapper.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/NDCuboidMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/NDCuboidMapper.java
index 8bf6d4b..b924edc 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/NDCuboidMapper.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/NDCuboidMapper.java
@@ -18,9 +18,6 @@
 
 package org.apache.kylin.engine.mr.steps;
 
-import java.io.IOException;
-import java.util.Collection;
-
 import org.apache.hadoop.io.Text;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ByteArray;
@@ -39,6 +36,9 @@ import org.apache.kylin.engine.mr.common.NDCuboidBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+import java.util.Collection;
+
 /**
  * @author George Song (ysong1)
  * 
@@ -79,6 +79,8 @@ public class NDCuboidMapper extends KylinMapper<Text, Text, Text, Text> {
         rowKeySplitter = new RowKeySplitter(cubeSegment, 65, 256);
     }
 
+
+
     @Override
     public void doMap(Text key, Text value, Context context) throws IOException, InterruptedException {
         long cuboidId = rowKeySplitter.split(key.getBytes());
@@ -103,8 +105,7 @@ public class NDCuboidMapper extends KylinMapper<Text, Text, Text, Text> {
 
         for (Long child : myChildren) {
             Cuboid childCuboid = Cuboid.findById(cubeDesc, child);
-            Pair<Integer, ByteArray> result = ndCuboidBuilder.buildKey(parentCuboid, childCuboid,
-                    rowKeySplitter.getSplitBuffers());
+            Pair<Integer, ByteArray> result = ndCuboidBuilder.buildKey(parentCuboid, childCuboid, rowKeySplitter.getSplitBuffers());
             outputKey.set(result.getSecond().array(), 0, result.getFirst());
             context.write(outputKey, value);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/ReducerNumSizing.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/ReducerNumSizing.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/ReducerNumSizing.java
index 8acd499..5c0555a 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/ReducerNumSizing.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/ReducerNumSizing.java
@@ -34,15 +34,13 @@ public class ReducerNumSizing {
 
     private static final Logger logger = LoggerFactory.getLogger(ReducerNumSizing.class);
 
-    public static int getLayeredCubingReduceTaskNum(CubeSegment cubeSegment, double totalMapInputMB, int level)
-            throws ClassNotFoundException, IOException, InterruptedException, JobException {
+    public static int getLayeredCubingReduceTaskNum(CubeSegment cubeSegment, double totalMapInputMB, int level) throws ClassNotFoundException, IOException, InterruptedException, JobException {
         CubeDesc cubeDesc = cubeSegment.getCubeDesc();
         KylinConfig kylinConfig = cubeDesc.getConfig();
 
         double perReduceInputMB = kylinConfig.getDefaultHadoopJobReducerInputMB();
         double reduceCountRatio = kylinConfig.getDefaultHadoopJobReducerCountRatio();
-        logger.info("Having per reduce MB " + perReduceInputMB + ", reduce count ratio " + reduceCountRatio + ", level "
-                + level);
+        logger.info("Having per reduce MB " + perReduceInputMB + ", reduce count ratio " + reduceCountRatio + ", level " + level);
 
         CubeStatsReader cubeStatsReader = new CubeStatsReader(cubeSegment, kylinConfig);
 
@@ -52,8 +50,7 @@ public class ReducerNumSizing {
             //merge case
             double estimatedSize = cubeStatsReader.estimateCubeSize();
             adjustedCurrentLayerSizeEst = estimatedSize > totalMapInputMB ? totalMapInputMB : estimatedSize;
-            logger.debug("estimated size {}, input size {}, adjustedCurrentLayerSizeEst: {}", estimatedSize,
-                    totalMapInputMB, adjustedCurrentLayerSizeEst);
+            logger.debug("estimated size {}, input size {}, adjustedCurrentLayerSizeEst: {}", estimatedSize, totalMapInputMB, adjustedCurrentLayerSizeEst);
         } else if (level == 0) {
             //base cuboid case TODO: the estimation could be very WRONG because it has no correction
             adjustedCurrentLayerSizeEst = cubeStatsReader.estimateLayerSize(0);
@@ -62,9 +59,7 @@ public class ReducerNumSizing {
             parentLayerSizeEst = cubeStatsReader.estimateLayerSize(level - 1);
             currentLayerSizeEst = cubeStatsReader.estimateLayerSize(level);
             adjustedCurrentLayerSizeEst = totalMapInputMB / parentLayerSizeEst * currentLayerSizeEst;
-            logger.debug(
-                    "totalMapInputMB: {}, parentLayerSizeEst: {}, currentLayerSizeEst: {}, adjustedCurrentLayerSizeEst: {}",
-                    totalMapInputMB, parentLayerSizeEst, currentLayerSizeEst, adjustedCurrentLayerSizeEst);
+            logger.debug("totalMapInputMB: {}, parentLayerSizeEst: {}, currentLayerSizeEst: {}, adjustedCurrentLayerSizeEst: {}", totalMapInputMB, parentLayerSizeEst, currentLayerSizeEst, adjustedCurrentLayerSizeEst);
         }
 
         // number of reduce tasks

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
index 89534fe..3419949 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
@@ -39,8 +39,7 @@ import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
 public class RowKeyDistributionCheckerJob extends AbstractHadoopJob {
 
     @SuppressWarnings("static-access")
-    protected static final Option ROW_KEY_STATS_FILE_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true)
-            .withDescription("rowKeyStatsFilePath").create("rowKeyStatsFilePath");
+    protected static final Option ROW_KEY_STATS_FILE_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("rowKeyStatsFilePath").create("rowKeyStatsFilePath");
 
     @Override
     public int run(String[] args) throws Exception {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerMapper.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerMapper.java
index 0af1b85..eab57d1 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerMapper.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerMapper.java
@@ -68,7 +68,7 @@ public class RowKeyDistributionCheckerMapper extends KylinMapper<Text, Text, Tex
         for (Text t : keyList) {
             if (key.compareTo(t) < 0) {
                 Long v = resultMap.get(t);
-                long length = (long) key.getLength() + value.getLength();
+                long length = (long)key.getLength() + value.getLength();
                 v += length;
                 resultMap.put(t, v);
                 break;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerReducer.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerReducer.java
index 1aa406f..d203e8c 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerReducer.java
@@ -38,8 +38,7 @@ public class RowKeyDistributionCheckerReducer extends KylinReducer<Text, LongWri
     }
 
     @Override
-    public void doReduce(Text key, Iterable<LongWritable> values, Context context)
-            throws IOException, InterruptedException {
+    public void doReduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
 
         long length = 0;
         for (LongWritable v : values) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SaveStatisticsStep.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SaveStatisticsStep.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SaveStatisticsStep.java
index 859cd2e..28f99fb 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SaveStatisticsStep.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SaveStatisticsStep.java
@@ -54,17 +54,14 @@ public class SaveStatisticsStep extends AbstractExecutable {
 
     @Override
     protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
-        CubeSegment newSegment = CubingExecutableUtil.findSegment(context,
-                CubingExecutableUtil.getCubeName(this.getParams()),
-                CubingExecutableUtil.getSegmentId(this.getParams()));
+        CubeSegment newSegment = CubingExecutableUtil.findSegment(context, CubingExecutableUtil.getCubeName(this.getParams()), CubingExecutableUtil.getSegmentId(this.getParams()));
         KylinConfig kylinConf = newSegment.getConfig();
 
         ResourceStore rs = ResourceStore.getStore(kylinConf);
         try {
             FileSystem fs = HadoopUtil.getWorkingFileSystem();
             Path statisticsDir = new Path(CubingExecutableUtil.getStatisticsPath(this.getParams()));
-            Path statisticsFilePath = HadoopUtil.getFilterOnlyPath(fs, statisticsDir,
-                    BatchConstants.CFG_OUTPUT_STATISTICS);
+            Path statisticsFilePath = HadoopUtil.getFilterOnlyPath(fs, statisticsDir, BatchConstants.CFG_OUTPUT_STATISTICS);
             if (statisticsFilePath == null) {
                 throw new IOException("fail to find the statistics file in base dir: " + statisticsDir);
             }
@@ -114,8 +111,7 @@ public class SaveStatisticsStep extends AbstractExecutable {
                 double mapperOverlapRatio = cubeStats.getMapperOverlapRatioOfFirstBuild();
                 double overlapThreshold = kylinConf.getCubeAlgorithmAutoThreshold();
                 logger.info("mapperNumber for " + seg + " is " + mapperNumber + " and threshold is " + mapperNumLimit);
-                logger.info("mapperOverlapRatio for " + seg + " is " + mapperOverlapRatio + " and threshold is "
-                        + overlapThreshold);
+                logger.info("mapperOverlapRatio for " + seg + " is " + mapperOverlapRatio + " and threshold is " + overlapThreshold);
 
                 // in-mem cubing is good when
                 // 1) the cluster has enough mapper slots to run in parallel

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java
index bb4152e..c75abea 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java
@@ -28,7 +28,9 @@ import org.apache.kylin.metadata.datatype.DataType;
 public class SelfDefineSortableKey implements WritableComparable<SelfDefineSortableKey> {
 
     public enum TypeFlag {
-        NONE_NUMERIC_TYPE, INTEGER_FAMILY_TYPE, DOUBLE_FAMILY_TYPE
+        NONE_NUMERIC_TYPE,
+        INTEGER_FAMILY_TYPE,
+        DOUBLE_FAMILY_TYPE
     }
 
     private byte typeId; //non-numeric(0000 0000) int(0000 0001) other numberic(0000 0010)
@@ -59,6 +61,7 @@ public class SelfDefineSortableKey implements WritableComparable<SelfDefineSorta
         }
     }
 
+
     public void init(Text key, DataType type) {
         init(key, getTypeIdByDatatype(type));
     }
@@ -110,6 +113,7 @@ public class SelfDefineSortableKey implements WritableComparable<SelfDefineSorta
         return (typeId == TypeFlag.INTEGER_FAMILY_TYPE.ordinal());
     }
 
+
     public byte getTypeIdByDatatype(DataType type) {
         if (!type.isNumberFamily()) {
             return (byte) TypeFlag.NONE_NUMERIC_TYPE.ordinal();
@@ -125,3 +129,5 @@ public class SelfDefineSortableKey implements WritableComparable<SelfDefineSorta
     }
 
 }
+
+

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
index 32eaebd..2efd718 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
@@ -88,8 +88,7 @@ public class UpdateCubeInfoAfterBuildStep extends AbstractExecutable {
         final String factColumnsInputPath = this.getParams().get(BatchConstants.CFG_OUTPUT_PATH);
         Path colDir = new Path(factColumnsInputPath, partitionCol.getIdentity());
         FileSystem fs = HadoopUtil.getWorkingFileSystem();
-        Path outputFile = HadoopUtil.getFilterOnlyPath(fs, colDir,
-                partitionCol.getName() + FactDistinctColumnsReducer.PARTITION_COL_INFO_FILE_POSTFIX);
+        Path outputFile = HadoopUtil.getFilterOnlyPath(fs, colDir, partitionCol.getName() + FactDistinctColumnsReducer.PARTITION_COL_INFO_FILE_POSTFIX);
         if (outputFile == null) {
             throw new IOException("fail to find the partition file in base dir: " + colDir);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterMergeStep.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterMergeStep.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterMergeStep.java
index 8447e44..add5c42 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterMergeStep.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterMergeStep.java
@@ -51,8 +51,7 @@ public class UpdateCubeInfoAfterMergeStep extends AbstractExecutable {
 
         CubeSegment mergedSegment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
         if (mergedSegment == null) {
-            return new ExecuteResult(ExecuteResult.State.FAILED,
-                    "there is no segment with id:" + CubingExecutableUtil.getSegmentId(this.getParams()));
+            return new ExecuteResult(ExecuteResult.State.FAILED, "there is no segment with id:" + CubingExecutableUtil.getSegmentId(this.getParams()));
         }
 
         CubingJob cubingJob = (CubingJob) getManager().getJob(CubingExecutableUtil.getCubingJobId(this.getParams()));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java
index 1ad6687..39c5bac 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java
@@ -297,7 +297,7 @@ public class SortedColumnReaderTest {
         }
         return result;
     }
-
+    
     private String qualify(String path) {
         String absolutePath = new File(path).getAbsolutePath();
         if (absolutePath.startsWith("/"))

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/test/java/org/apache/kylin/engine/mr/TableReaderTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/TableReaderTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/TableReaderTest.java
index e1adbb3..4c43dbc 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/TableReaderTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/TableReaderTest.java
@@ -35,11 +35,9 @@ public class TableReaderTest {
     @Test
     public void testBasicReader() throws IOException {
         File f = new File("src/test/resources/dict/DW_SITES");
-        DFSFileTableReader reader = new DFSFileTableReader("file://" + f.getAbsolutePath(), DFSFileTable.DELIM_AUTO,
-                10);
+        DFSFileTableReader reader = new DFSFileTableReader("file://" + f.getAbsolutePath(), DFSFileTable.DELIM_AUTO, 10);
         while (reader.next()) {
-            assertEquals("[-1, Korea Auction.co.kr, S, 48, 0, 111, 2009-02-11, , DW_OFFPLAT, ]",
-                    Arrays.toString(reader.getRow()));
+            assertEquals("[-1, Korea Auction.co.kr, S, 48, 0, 111, 2009-02-11, , DW_OFFPLAT, ]", Arrays.toString(reader.getRow()));
             break;
         }
         reader.close();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/CubeReducerTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/CubeReducerTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/CubeReducerTest.java
index 32e80fc..7616df2 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/CubeReducerTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/CubeReducerTest.java
@@ -77,8 +77,7 @@ public class CubeReducerTest extends LocalFileMetadataTestCase {
 
         reduceDriver.getConfiguration().set(BatchConstants.CFG_CUBE_NAME, "test_kylin_cube_with_slr_ready");
 
-        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready")
-                .getDescriptor();
+        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready").getDescriptor();
         BufferedMeasureCodec codec = new BufferedMeasureCodec(cubeDesc.getMeasures());
 
         Text key1 = new Text("72010ustech");
@@ -102,12 +101,9 @@ public class CubeReducerTest extends LocalFileMetadataTestCase {
 
         List<Pair<Text, Text>> result = reduceDriver.run();
 
-        Pair<Text, Text> p1 = new Pair<Text, Text>(new Text("72010ustech"),
-                newValueText(codec, "45.43", "10", "20.34", 3, 600));
-        Pair<Text, Text> p2 = new Pair<Text, Text>(new Text("1tech"),
-                newValueText(codec, "35.43", "15.09", "20.34", 2, 1500));
-        Pair<Text, Text> p3 = new Pair<Text, Text>(new Text("0"),
-                newValueText(codec, "146.52", "146.52", "146.52", 0, 0));
+        Pair<Text, Text> p1 = new Pair<Text, Text>(new Text("72010ustech"), newValueText(codec, "45.43", "10", "20.34", 3, 600));
+        Pair<Text, Text> p2 = new Pair<Text, Text>(new Text("1tech"), newValueText(codec, "35.43", "15.09", "20.34", 2, 1500));
+        Pair<Text, Text> p3 = new Pair<Text, Text>(new Text("0"), newValueText(codec, "146.52", "146.52", "146.52", 0, 0));
 
         assertEquals(3, result.size());
 
@@ -121,8 +117,7 @@ public class CubeReducerTest extends LocalFileMetadataTestCase {
         reduceDriver.getConfiguration().set(BatchConstants.CFG_CUBE_NAME, "test_kylin_cube_with_slr_ready");
         reduceDriver.getConfiguration().setInt(BatchConstants.CFG_CUBE_CUBOID_LEVEL, 1);
 
-        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready")
-                .getDescriptor();
+        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready").getDescriptor();
         MeasureDesc measureDesc = cubeDesc.getMeasures().get(0);
         FunctionDesc functionDesc = measureDesc.getFunction();
         Field field = FunctionDesc.class.getDeclaredField("measureType");
@@ -153,10 +148,8 @@ public class CubeReducerTest extends LocalFileMetadataTestCase {
 
         List<Pair<Text, Text>> result = reduceDriver.run();
 
-        Pair<Text, Text> p1 = new Pair<Text, Text>(new Text("72010ustech"),
-                newValueText(codec, "0", "10", "20.34", 3, 600));
-        Pair<Text, Text> p2 = new Pair<Text, Text>(new Text("1tech"),
-                newValueText(codec, "0", "15.09", "20.34", 2, 1500));
+        Pair<Text, Text> p1 = new Pair<Text, Text>(new Text("72010ustech"), newValueText(codec, "0", "10", "20.34", 3, 600));
+        Pair<Text, Text> p2 = new Pair<Text, Text>(new Text("1tech"), newValueText(codec, "0", "15.09", "20.34", 2, 1500));
         Pair<Text, Text> p3 = new Pair<Text, Text>(new Text("0"), newValueText(codec, "0", "146.52", "146.52", 0, 0));
 
         assertEquals(3, result.size());
@@ -166,10 +159,8 @@ public class CubeReducerTest extends LocalFileMetadataTestCase {
         assertTrue(result.contains(p3));
     }
 
-    private Text newValueText(BufferedMeasureCodec codec, String sum, String min, String max, int count,
-            int item_count) {
-        Object[] values = new Object[] { new BigDecimal(sum), new BigDecimal(min), new BigDecimal(max), new Long(count),
-                new Long(item_count) };
+    private Text newValueText(BufferedMeasureCodec codec, String sum, String min, String max, int count, int item_count) {
+        Object[] values = new Object[] { new BigDecimal(sum), new BigDecimal(min), new BigDecimal(max), new Long(count), new Long(item_count) };
 
         ByteBuffer buf = codec.encode(values);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidJobTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidJobTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidJobTest.java
index 63e09ac..2e2ebf9 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidJobTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidJobTest.java
@@ -80,8 +80,7 @@ public class MergeCuboidJobTest extends LocalFileMetadataTestCase {
         // CubeManager cubeManager =
         // CubeManager.getInstanceFromEnv(getTestConfig());
 
-        String[] args = { "-input", baseFolder.getAbsolutePath() + "," + eightFoler.getAbsolutePath(), "-cubename",
-                cubeName, "-segmentname", "20130331080000_20131212080000", "-output", output, "-jobname", jobname };
+        String[] args = { "-input", baseFolder.getAbsolutePath() + "," + eightFoler.getAbsolutePath(), "-cubename", cubeName, "-segmentname", "20130331080000_20131212080000", "-output", output, "-jobname", jobname };
         assertEquals("Job failed", 0, ToolRunner.run(conf, new MergeCuboidJob(), args));
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapperTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapperTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapperTest.java
index f73c645..04af4fe 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapperTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MergeCuboidMapperTest.java
@@ -52,7 +52,7 @@ import org.slf4j.LoggerFactory;
 
 @SuppressWarnings("rawtypes")
 public class MergeCuboidMapperTest extends LocalFileMetadataTestCase {
-
+    
     private static final Logger logger = LoggerFactory.getLogger(MergeCuboidMapperTest.class);
 
     MapDriver<Text, Text, Text, Text> mapDriver;
@@ -75,8 +75,7 @@ public class MergeCuboidMapperTest extends LocalFileMetadataTestCase {
         List<String> values = new ArrayList<>();
         values.add("eee");
         values.add("fff");
-        Dictionary<String> dict = DictionaryGenerator.buildDictionary(DataType.getType(newDictInfo.getDataType()),
-                new IterableDictionaryValueEnumerator(values));
+        Dictionary<String> dict = DictionaryGenerator.buildDictionary(DataType.getType(newDictInfo.getDataType()), new IterableDictionaryValueEnumerator(values));
         dictionaryManager.trySaveNewDict(dict, newDictInfo);
         dict.dump(System.out);
 
@@ -128,8 +127,7 @@ public class MergeCuboidMapperTest extends LocalFileMetadataTestCase {
                 values.add("ccc");
             else
                 values.add("bbb");
-            Dictionary<String> dict = DictionaryGenerator.buildDictionary(DataType.getType(newDictInfo.getDataType()),
-                    new IterableDictionaryValueEnumerator(values));
+            Dictionary<String> dict = DictionaryGenerator.buildDictionary(DataType.getType(newDictInfo.getDataType()), new IterableDictionaryValueEnumerator(values));
             dictionaryManager.trySaveNewDict(dict, newDictInfo);
             dict.dump(System.out);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NDCuboidJobTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NDCuboidJobTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NDCuboidJobTest.java
index e15d463..989ed72 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NDCuboidJobTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NDCuboidJobTest.java
@@ -66,8 +66,7 @@ public class NDCuboidJobTest extends LocalFileMetadataTestCase {
 
         FileUtil.fullyDelete(new File(output));
 
-        String[] args = { "-input", input, "-cubename", cubeName, "-segmentname", segmentName, "-output", output,
-                "-jobname", jobname, "-level", level };
+        String[] args = { "-input", input, "-cubename", cubeName, "-segmentname", segmentName, "-output", output, "-jobname", jobname, "-level", level };
         assertEquals("Job failed", 0, ToolRunner.run(conf, new NDCuboidJob(), args));
     }
 
@@ -82,8 +81,7 @@ public class NDCuboidJobTest extends LocalFileMetadataTestCase {
 
         FileUtil.fullyDelete(new File(output));
 
-        String[] args = { "-input", input, "-cubename", cubeName, "-segmentname", segmentName, "-output", output,
-                "-jobname", jobname, "-level", level };
+        String[] args = { "-input", input, "-cubename", cubeName, "-segmentname", segmentName, "-output", output, "-jobname", jobname, "-level", level };
         assertEquals("Job failed", 0, ToolRunner.run(conf, new NDCuboidJob(), args));
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NDCuboidMapperTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NDCuboidMapperTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NDCuboidMapperTest.java
index 3ee49f2..c0ce1a4 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NDCuboidMapperTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NDCuboidMapperTest.java
@@ -76,10 +76,8 @@ public class NDCuboidMapperTest extends LocalFileMetadataTestCase {
         mapReduceDriver.getConfiguration().set(BatchConstants.CFG_CUBE_NAME, cubeName);
         mapReduceDriver.getConfiguration().set(BatchConstants.CFG_CUBE_SEGMENT_ID, segmentID);
 
-        byte[] key = { 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -104, -106, -128, 11, 54, -105, 55, 9, 9, 9, 9, 9, 9, 9, 9, 9,
-                9, 9, 9, 9, 9, 9, 9, 9, 13, 71, 114, 65, 66, 73, 78, 9, 9, 9, 9, 9, 9, 9, 9, 0, 10, 0 };
-        byte[] value = { 14, 7, 23, -16, 56, 92, 114, -80, 118, 14, 7, 23, -16, 56, 92, 114, -80, 118, 14, 7, 23, -16,
-                56, 92, 114, -80, 118, 1, 1 };
+        byte[] key = { 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -104, -106, -128, 11, 54, -105, 55, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 13, 71, 114, 65, 66, 73, 78, 9, 9, 9, 9, 9, 9, 9, 9, 0, 10, 0 };
+        byte[] value = { 14, 7, 23, -16, 56, 92, 114, -80, 118, 14, 7, 23, -16, 56, 92, 114, -80, 118, 14, 7, 23, -16, 56, 92, 114, -80, 118, 1, 1 };
         Pair<Text, Text> input1 = new Pair<Text, Text>(new Text(key), new Text(value));
 
         mapReduceDriver.addInput(input1);
@@ -88,10 +86,8 @@ public class NDCuboidMapperTest extends LocalFileMetadataTestCase {
 
         assertEquals(4, result.size());
 
-        byte[] resultKey = { 0, 0, 0, 0, 0, 0, 0, 0, 1, 127, 0, -104, -106, -128, 55, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
-                9, 9, 9, 9, 9, 9, 13, 71, 114, 65, 66, 73, 78, 9, 9, 9, 9, 9, 9, 9, 9, 0, 10, 0 };
-        byte[] resultValue = { 14, 7, 23, -16, 56, 92, 114, -80, 118, 14, 7, 23, -16, 56, 92, 114, -80, 118, 14, 7, 23,
-                -16, 56, 92, 114, -80, 118, 1, 1 };
+        byte[] resultKey = { 0, 0, 0, 0, 0, 0, 0, 0, 1, 127, 0, -104, -106, -128, 55, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 13, 71, 114, 65, 66, 73, 78, 9, 9, 9, 9, 9, 9, 9, 9, 0, 10, 0 };
+        byte[] resultValue = { 14, 7, 23, -16, 56, 92, 114, -80, 118, 14, 7, 23, -16, 56, 92, 114, -80, 118, 14, 7, 23, -16, 56, 92, 114, -80, 118, 1, 1 };
         Pair<Text, Text> output1 = new Pair<Text, Text>(new Text(resultKey), new Text(resultValue));
 
         //As we will truncate decimal(KYLIN-766), value will no longer equals to resultValue
@@ -110,8 +106,7 @@ public class NDCuboidMapperTest extends LocalFileMetadataTestCase {
         System.out.println(Bytes.toLong(new byte[] { 0, 0, 0, 0, 0, 0, 1, -1 }));
         for (int i = 0; i < result.size(); i++) {
             byte[] bytes = new byte[result.get(i).getFirst().getLength()];
-            System.arraycopy(result.get(i).getFirst().getBytes(), RowConstants.ROWKEY_SHARDID_LEN, bytes, 0,
-                    result.get(i).getFirst().getLength() - RowConstants.ROWKEY_SHARDID_LEN);
+            System.arraycopy(result.get(i).getFirst().getBytes(), RowConstants.ROWKEY_SHARDID_LEN, bytes, 0, result.get(i).getFirst().getLength() - RowConstants.ROWKEY_SHARDID_LEN);
             System.out.println(Bytes.toLong(bytes));
             keySet[i] = Bytes.toLong(bytes);
         }


[48/67] [abbrv] kylin git commit: minor, fix license header

Posted by li...@apache.org.
minor, fix license header


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/05238234
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/05238234
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/05238234

Branch: refs/heads/master
Commit: 052382345a1bd0da6d4ae589e88a11f33b7f9a72
Parents: c1e2143
Author: lidongsjtu <li...@apache.org>
Authored: Tue May 30 00:41:45 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Tue May 30 11:40:16 2017 +0800

----------------------------------------------------------------------
 build/bin/load-hive-conf.sh | 17 ++++++++++++++++-
 1 file changed, 16 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/05238234/build/bin/load-hive-conf.sh
----------------------------------------------------------------------
diff --git a/build/bin/load-hive-conf.sh b/build/bin/load-hive-conf.sh
index 62467fa..7639a34 100644
--- a/build/bin/load-hive-conf.sh
+++ b/build/bin/load-hive-conf.sh
@@ -1,6 +1,21 @@
 #!/bin/bash
 
-# Kyligence Inc. License
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/header.sh
 


[08/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
index 831b7b4..972eea9 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
@@ -59,7 +60,6 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Row;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.Filter;
@@ -95,11 +95,9 @@ public class MockHTable implements Table {
     private final String tableName;
     private final List<String> columnFamilies = new ArrayList<>();
 
-    private NavigableMap<byte[], NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>> data = new TreeMap<>(
-            Bytes.BYTES_COMPARATOR);
+    private NavigableMap<byte[], NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>> data = new TreeMap<>(Bytes.BYTES_COMPARATOR);
 
-    private static List<KeyValue> toKeyValue(byte[] row,
-            NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, int maxVersions) {
+    private static List<KeyValue> toKeyValue(byte[] row, NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, int maxVersions) {
         return toKeyValue(row, rowdata, 0, Long.MAX_VALUE, maxVersions);
     }
 
@@ -164,9 +162,7 @@ public class MockHTable implements Table {
         throw new RuntimeException(this.getClass() + " does NOT implement this method.");
     }
 
-    private static List<KeyValue> toKeyValue(byte[] row,
-            NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, long timestampStart,
-            long timestampEnd, int maxVersions) {
+    private static List<KeyValue> toKeyValue(byte[] row, NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, long timestampStart, long timestampEnd, int maxVersions) {
         List<KeyValue> ret = new ArrayList<KeyValue>();
         for (byte[] family : rowdata.keySet())
             for (byte[] qualifier : rowdata.get(family).keySet()) {
@@ -241,14 +237,12 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public <R> void batchCallback(List<? extends Row> actions, Object[] results, Batch.Callback<R> callback)
-            throws IOException, InterruptedException {
+    public <R> void batchCallback(List<? extends Row> actions, Object[] results, Batch.Callback<R> callback) throws IOException, InterruptedException {
 
     }
 
     @Override
-    public <R> Object[] batchCallback(List<? extends Row> actions, Batch.Callback<R> callback)
-            throws IOException, InterruptedException {
+    public <R> Object[] batchCallback(List<? extends Row> actions, Batch.Callback<R> callback) throws IOException, InterruptedException {
         return new Object[0];
     }
 
@@ -273,12 +267,10 @@ public class MockHTable implements Table {
                 for (byte[] qualifier : qualifiers) {
                     if (qualifier == null)
                         qualifier = "".getBytes();
-                    if (!data.get(row).containsKey(family) || !data.get(row).get(family).containsKey(qualifier)
-                            || data.get(row).get(family).get(qualifier).isEmpty())
+                    if (!data.get(row).containsKey(family) || !data.get(row).get(family).containsKey(qualifier) || data.get(row).get(family).get(qualifier).isEmpty())
                         continue;
                     Map.Entry<Long, byte[]> timestampAndValue = data.get(row).get(family).get(qualifier).lastEntry();
-                    kvs.add(new KeyValue(row, family, qualifier, timestampAndValue.getKey(),
-                            timestampAndValue.getValue()));
+                    kvs.add(new KeyValue(row, family, qualifier, timestampAndValue.getKey(), timestampAndValue.getValue()));
                 }
             }
         }
@@ -327,8 +319,7 @@ public class MockHTable implements Table {
 
             List<KeyValue> kvs = null;
             if (!scan.hasFamilies()) {
-                kvs = toKeyValue(row, data.get(row), scan.getTimeRange().getMin(), scan.getTimeRange().getMax(),
-                        scan.getMaxVersions());
+                kvs = toKeyValue(row, data.get(row), scan.getTimeRange().getMin(), scan.getTimeRange().getMax(), scan.getMaxVersions());
             } else {
                 kvs = new ArrayList<KeyValue>();
                 for (byte[] family : scan.getFamilyMap().keySet()) {
@@ -482,19 +473,16 @@ public class MockHTable implements Table {
     @Override
     public void put(Put put) throws IOException {
         byte[] row = put.getRow();
-        NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowData = forceFind(data, row,
-                new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>(Bytes.BYTES_COMPARATOR));
+        NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowData = forceFind(data, row, new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>(Bytes.BYTES_COMPARATOR));
         for (byte[] family : put.getFamilyMap().keySet()) {
             if (columnFamilies.contains(new String(family)) == false) {
                 throw new RuntimeException("Not Exists columnFamily : " + new String(family));
             }
-            NavigableMap<byte[], NavigableMap<Long, byte[]>> familyData = forceFind(rowData, family,
-                    new TreeMap<byte[], NavigableMap<Long, byte[]>>(Bytes.BYTES_COMPARATOR));
+            NavigableMap<byte[], NavigableMap<Long, byte[]>> familyData = forceFind(rowData, family, new TreeMap<byte[], NavigableMap<Long, byte[]>>(Bytes.BYTES_COMPARATOR));
             for (KeyValue kv : put.getFamilyMap().get(family)) {
                 kv.updateLatestStamp(Bytes.toBytes(System.currentTimeMillis()));
                 byte[] qualifier = kv.getQualifier();
-                NavigableMap<Long, byte[]> qualifierData = forceFind(familyData, qualifier,
-                        new TreeMap<Long, byte[]>());
+                NavigableMap<Long, byte[]> qualifierData = forceFind(familyData, qualifier, new TreeMap<Long, byte[]>());
                 qualifierData.put(kv.getTimestamp(), kv.getValue());
             }
         }
@@ -513,13 +501,9 @@ public class MockHTable implements Table {
 
     private boolean check(byte[] row, byte[] family, byte[] qualifier, byte[] value) {
         if (value == null || value.length == 0)
-            return !data.containsKey(row) || !data.get(row).containsKey(family)
-                    || !data.get(row).get(family).containsKey(qualifier);
+            return !data.containsKey(row) || !data.get(row).containsKey(family) || !data.get(row).get(family).containsKey(qualifier);
         else
-            return data.containsKey(row) && data.get(row).containsKey(family)
-                    && data.get(row).get(family).containsKey(qualifier)
-                    && !data.get(row).get(family).get(qualifier).isEmpty()
-                    && Arrays.equals(data.get(row).get(family).get(qualifier).lastEntry().getValue(), value);
+            return data.containsKey(row) && data.get(row).containsKey(family) && data.get(row).get(family).containsKey(qualifier) && !data.get(row).get(family).get(qualifier).isEmpty() && Arrays.equals(data.get(row).get(family).get(qualifier).lastEntry().getValue(), value);
     }
 
     /**
@@ -535,8 +519,7 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp,
-            byte[] bytes3, Put put) throws IOException {
+    public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Put put) throws IOException {
         return false;
     }
 
@@ -589,8 +572,7 @@ public class MockHTable implements Table {
      * {@inheritDoc}
      */
     @Override
-    public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete)
-            throws IOException {
+    public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete) throws IOException {
         if (check(row, family, qualifier, value)) {
             delete(delete);
             return true;
@@ -599,8 +581,7 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp,
-            byte[] bytes3, Delete delete) throws IOException {
+    public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Delete delete) throws IOException {
         return false;
     }
 
@@ -621,8 +602,7 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, Durability durability)
-            throws IOException {
+    public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, Durability durability) throws IOException {
         return 0;
     }
 
@@ -640,15 +620,13 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public <T extends Service, R> Map<byte[], R> coprocessorService(Class<T> service, byte[] startKey, byte[] endKey,
-            Batch.Call<T, R> callable) throws ServiceException, Throwable {
+    public <T extends Service, R> Map<byte[], R> coprocessorService(Class<T> service, byte[] startKey, byte[] endKey, Batch.Call<T, R> callable) throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
 
     @Override
-    public <T extends Service, R> void coprocessorService(Class<T> service, byte[] startKey, byte[] endKey,
-            Batch.Call<T, R> callable, Batch.Callback<R> callback) throws ServiceException, Throwable {
+    public <T extends Service, R> void coprocessorService(Class<T> service, byte[] startKey, byte[] endKey, Batch.Call<T, R> callable, Batch.Callback<R> callback) throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
@@ -671,24 +649,20 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public <R extends Message> Map<byte[], R> batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor,
-            Message request, byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable {
+    public <R extends Message> Map<byte[], R> batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor, Message request, byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
 
     @Override
-    public <R extends Message> void batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor,
-            Message request, byte[] startKey, byte[] endKey, R responsePrototype, Batch.Callback<R> callback)
-            throws ServiceException, Throwable {
+    public <R extends Message> void batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor, Message request, byte[] startKey, byte[] endKey, R responsePrototype, Batch.Callback<R> callback) throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
 
     //@Override  (only since 0.98.8)
-    public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp,
-            byte[] value, RowMutations mutation) throws IOException {
+    public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp, byte[] value, RowMutations mutation) throws IOException {
         throw new NotImplementedException();
 
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java b/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
index 983b2ec..095b74a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
@@ -47,8 +47,7 @@ public class PasswordPlaceholderConfigurer extends PropertyPlaceholderConfigurer
     /**
      * thisIsAsecretKey
      */
-    private static byte[] key = { 0x74, 0x68, 0x69, 0x73, 0x49, 0x73, 0x41, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4b,
-            0x65, 0x79 };
+    private static byte[] key = { 0x74, 0x68, 0x69, 0x73, 0x49, 0x73, 0x41, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4b, 0x65, 0x79 };
 
     /**
      * The PasswordPlaceholderConfigurer will read Kylin properties as the Spring resource
@@ -113,8 +112,7 @@ public class PasswordPlaceholderConfigurer extends PropertyPlaceholderConfigurer
     }
 
     private static void printUsage() {
-        System.out.println(
-                "Usage: java org.apache.kylin.rest.security.PasswordPlaceholderConfigurer <EncryptMethod> <your_password>");
+        System.out.println("Usage: java org.apache.kylin.rest.security.PasswordPlaceholderConfigurer <EncryptMethod> <your_password>");
         System.out.println("EncryptMethod: AES or BCrypt");
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
index 6c2f765..98cef3a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
@@ -51,8 +51,7 @@ public class RealAclHBaseStorage implements AclHBaseStorage {
             return aclTableName;
         } else if (clazz == LegacyUserService.class) {
             userTableName = tableNameBase + USER_TABLE_NAME;
-            HBaseConnection.createHTableIfNeeded(hbaseUrl, userTableName, USER_AUTHORITY_FAMILY,
-                    QueryService.USER_QUERY_FAMILY);
+            HBaseConnection.createHTableIfNeeded(hbaseUrl, userTableName, USER_AUTHORITY_FAMILY, QueryService.USER_QUERY_FAMILY);
             return userTableName;
         } else {
             throw new IllegalStateException("prepareHBaseTable for unknown class: " + clazz);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/security/UnauthorisedEntryPoint.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/UnauthorisedEntryPoint.java b/server-base/src/main/java/org/apache/kylin/rest/security/UnauthorisedEntryPoint.java
index 06fdb00..253bae3 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/UnauthorisedEntryPoint.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/UnauthorisedEntryPoint.java
@@ -37,8 +37,7 @@ import org.springframework.stereotype.Component;
 @Component(value = "unauthorisedEntryPoint")
 public class UnauthorisedEntryPoint implements AuthenticationEntryPoint {
 
-    public void commence(HttpServletRequest request, HttpServletResponse response, AuthenticationException exception)
-            throws IOException, ServletException {
+    public void commence(HttpServletRequest request, HttpServletResponse response, AuthenticationException exception) throws IOException, ServletException {
         response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized");
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/AccessService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AccessService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AccessService.java
index 52c539d..9ae372a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AccessService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AccessService.java
@@ -306,8 +306,7 @@ public class AccessService {
         // Cause there is a circle reference in AccessControlEntry, it needs to
         // set acl to null as a workaround.
         for (AccessControlEntry ace : acl.getEntries()) {
-            accessControlEntities
-                    .add(new AccessEntryResponse(ace.getId(), ace.getSid(), ace.getPermission(), ace.isGranting()));
+            accessControlEntities.add(new AccessEntryResponse(ace.getId(), ace.getSid(), ace.getPermission(), ace.isGranting()));
         }
 
         return accessControlEntities;
@@ -323,8 +322,7 @@ public class AccessService {
         Message msg = MsgPicker.getMsg();
 
         // Can't revoke admin permission from domain object owner
-        if (acl.getOwner().equals(acl.getEntries().get(indexOfAce).getSid())
-                && BasePermission.ADMINISTRATION.equals(acl.getEntries().get(indexOfAce).getPermission())) {
+        if (acl.getOwner().equals(acl.getEntries().get(indexOfAce).getSid()) && BasePermission.ADMINISTRATION.equals(acl.getEntries().get(indexOfAce).getPermission())) {
             throw new ForbiddenException(msg.getREVOKE_ADMIN_PERMISSION());
         }
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
index 5bbfe83..6292c00 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
@@ -107,8 +107,7 @@ public class AclService implements MutableAclService {
     public List<ObjectIdentity> findChildren(ObjectIdentity parentIdentity) {
         List<ObjectIdentity> oids = new ArrayList<ObjectIdentity>();
         try {
-            List<AclRecord> allAclRecords = aclStore.getAllResources(String.valueOf(DIR_PREFIX), AclRecord.class,
-                    AclRecordSerializer.getInstance());
+            List<AclRecord> allAclRecords = aclStore.getAllResources(String.valueOf(DIR_PREFIX), AclRecord.class, AclRecordSerializer.getInstance());
             for (AclRecord record : allAclRecords) {
                 DomainObjectInfo parent = record.getParentDomainObjectInfo();
                 if (parent != null && parent.getId().equals(String.valueOf(parentIdentity.getIdentifier()))) {
@@ -149,13 +148,10 @@ public class AclService implements MutableAclService {
         Map<ObjectIdentity, Acl> aclMaps = new HashMap<ObjectIdentity, Acl>();
         try {
             for (ObjectIdentity oid : oids) {
-                AclRecord record = aclStore.getResource(getQueryKeyById(String.valueOf(oid.getIdentifier())),
-                        AclRecord.class, AclRecordSerializer.getInstance());
+                AclRecord record = aclStore.getResource(getQueryKeyById(String.valueOf(oid.getIdentifier())), AclRecord.class, AclRecordSerializer.getInstance());
                 if (record != null) {
                     SidInfo owner = record.getOwnerInfo();
-                    Sid ownerSid = (null == owner) ? null
-                            : (owner.isPrincipal() ? new PrincipalSid(owner.getSid())
-                                    : new GrantedAuthoritySid(owner.getSid()));
+                    Sid ownerSid = (null == owner) ? null : (owner.isPrincipal() ? new PrincipalSid(owner.getSid()) : new GrantedAuthoritySid(owner.getSid()));
                     boolean entriesInheriting = record.isEntriesInheriting();
 
                     Acl parentAcl = null;
@@ -165,8 +161,7 @@ public class AclService implements MutableAclService {
                         parentAcl = readAclById(parentObject, null);
                     }
 
-                    AclImpl acl = new AclImpl(oid, oid.getIdentifier(), aclAuthorizationStrategy,
-                            permissionGrantingStrategy, parentAcl, null, entriesInheriting, ownerSid);
+                    AclImpl acl = new AclImpl(oid, oid.getIdentifier(), aclAuthorizationStrategy, permissionGrantingStrategy, parentAcl, null, entriesInheriting, ownerSid);
                     genAces(sids, record, acl);
 
                     aclMaps.put(oid, acl);
@@ -196,8 +191,7 @@ public class AclService implements MutableAclService {
         PrincipalSid sid = new PrincipalSid(auth);
         try {
             AclRecord record = new AclRecord(new DomainObjectInfo(objectIdentity), null, new SidInfo(sid), true, null);
-            aclStore.putResource(getQueryKeyById(String.valueOf(objectIdentity.getIdentifier())), record, 0,
-                    AclRecordSerializer.getInstance());
+            aclStore.putResource(getQueryKeyById(String.valueOf(objectIdentity.getIdentifier())), record, 0, AclRecordSerializer.getInstance());
             logger.debug("ACL of " + objectIdentity + " created successfully.");
         } catch (IOException e) {
             throw new InternalErrorException(e);
@@ -263,8 +257,7 @@ public class AclService implements MutableAclService {
         return (MutableAcl) readAclById(mutableAcl.getObjectIdentity());
     }
 
-    protected void genAces(List<Sid> sids, AclRecord record, AclImpl acl)
-            throws JsonParseException, JsonMappingException, IOException {
+    protected void genAces(List<Sid> sids, AclRecord record, AclImpl acl) throws JsonParseException, JsonMappingException, IOException {
         List<AceInfo> aceInfos = new ArrayList<AceInfo>();
         Map<String, AceInfo> allAceInfos = record.getAllAceInfo();
         if (allAceInfos != null) {
@@ -294,10 +287,8 @@ public class AclService implements MutableAclService {
             AceInfo aceInfo = aceInfos.get(i);
 
             if (null != aceInfo) {
-                Sid sid = aceInfo.getSidInfo().isPrincipal() ? new PrincipalSid(aceInfo.getSidInfo().getSid())
-                        : new GrantedAuthoritySid(aceInfo.getSidInfo().getSid());
-                AccessControlEntry ace = new AccessControlEntryImpl(Long.valueOf(i), acl, sid,
-                        aclPermissionFactory.buildFromMask(aceInfo.getPermissionMask()), true, false, false);
+                Sid sid = aceInfo.getSidInfo().isPrincipal() ? new PrincipalSid(aceInfo.getSidInfo().getSid()) : new GrantedAuthoritySid(aceInfo.getSidInfo().getSid());
+                AccessControlEntry ace = new AccessControlEntryImpl(Long.valueOf(i), acl, sid, aclPermissionFactory.buildFromMask(aceInfo.getPermissionMask()), true, false, false);
                 newAces.add(ace);
             }
         }
@@ -365,8 +356,7 @@ class AclRecord extends RootPersistentEntity {
     public AclRecord() {
     }
 
-    public AclRecord(DomainObjectInfo domainObjectInfo, DomainObjectInfo parentDomainObjectInfo, SidInfo ownerInfo,
-            boolean entriesInheriting, Map<String, AceInfo> allAceInfo) {
+    public AclRecord(DomainObjectInfo domainObjectInfo, DomainObjectInfo parentDomainObjectInfo, SidInfo ownerInfo, boolean entriesInheriting, Map<String, AceInfo> allAceInfo) {
         this.domainObjectInfo = domainObjectInfo;
         this.parentDomainObjectInfo = parentDomainObjectInfo;
         this.ownerInfo = ownerInfo;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
index 4aeb450..d88ca74 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
@@ -54,13 +54,11 @@ public class AclTableMigrationTool {
 
     private static final Serializer<SidInfo> sidSerializer = new Serializer<SidInfo>(SidInfo.class);
 
-    private static final Serializer<DomainObjectInfo> domainObjSerializer = new Serializer<DomainObjectInfo>(
-            DomainObjectInfo.class);
+    private static final Serializer<DomainObjectInfo> domainObjSerializer = new Serializer<DomainObjectInfo>(DomainObjectInfo.class);
 
     private static final Serializer<AceInfo> aceSerializer = new Serializer<AceInfo>(AceInfo.class);
 
-    private static final Serializer<UserGrantedAuthority[]> ugaSerializer = new Serializer<UserGrantedAuthority[]>(
-            UserGrantedAuthority[].class);
+    private static final Serializer<UserGrantedAuthority[]> ugaSerializer = new Serializer<UserGrantedAuthority[]>(UserGrantedAuthority[].class);
 
     public static final String MIGRATE_OK_PREFIX = AclService.DIR_PREFIX + "MIGRATE_OK_";
 
@@ -72,8 +70,7 @@ public class AclTableMigrationTool {
             return;
         } else {
             if (!kylinConfig.getServerMode().equals("all")) {
-                throw new IllegalStateException(
-                        "Please make sure that you have config kylin.server.mode=all before migrating data");
+                throw new IllegalStateException("Please make sure that you have config kylin.server.mode=all before migrating data");
             }
             logger.info("Start to migrate acl table data");
             ResourceStore store = ResourceStore.getStore(kylinConfig);
@@ -132,8 +129,7 @@ public class AclTableMigrationTool {
                         record.setEntriesInheriting(getInheriting(result));
                         record.setAllAceInfo(getAllAceInfo(result));
                         store.deleteResource(AclService.getQueryKeyById(object.getId()));
-                        store.putResource(AclService.getQueryKeyById(object.getId()), record, 0,
-                                AclService.AclRecordSerializer.getInstance());
+                        store.putResource(AclService.getQueryKeyById(object.getId()), record, 0, AclService.AclRecordSerializer.getInstance());
                         result = rs.next();
                     }
                 }
@@ -152,8 +148,7 @@ public class AclTableMigrationTool {
                         User user = hbaseRowToUser(result);
                         UserInfo userInfo = convert(user);
                         store.deleteResource(UserService.getId(userInfo.getUsername()));
-                        store.putResource(UserService.getId(userInfo.getUsername()), userInfo, 0,
-                                UserService.UserInfoSerializer.getInstance());
+                        store.putResource(UserService.getId(userInfo.getUsername()), userInfo, 0, UserService.UserInfoSerializer.getInstance());
                         result = rs.next();
                     }
                 }
@@ -176,8 +171,7 @@ public class AclTableMigrationTool {
         return store.exists(MIGRATE_OK_PREFIX + tableName);
     }
 
-    private void convertToResourceStore(KylinConfig kylinConfig, String tableName, ResourceStore store,
-            ResultConverter converter) throws IOException {
+    private void convertToResourceStore(KylinConfig kylinConfig, String tableName, ResourceStore store, ResultConverter converter) throws IOException {
 
         Table table = null;
         ResultScanner rs = null;
@@ -186,8 +180,7 @@ public class AclTableMigrationTool {
             table = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(tableName));
             rs = table.getScanner(scan);
             converter.convertResult(rs, store);
-            store.putResource(MIGRATE_OK_PREFIX + tableName, new StringEntity(tableName + " migrated"),
-                    StringEntity.serializer);
+            store.putResource(MIGRATE_OK_PREFIX + tableName, new StringEntity(tableName + " migrated"), StringEntity.serializer);
         } finally {
             IOUtils.closeQuietly(rs);
             IOUtils.closeQuietly(table);
@@ -196,8 +189,7 @@ public class AclTableMigrationTool {
     }
 
     private DomainObjectInfo getDomainObjectInfoFromRs(Result result) {
-        String type = String.valueOf(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY),
-                Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_TYPE_COLUMN)));
+        String type = String.valueOf(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_TYPE_COLUMN)));
         String id = String.valueOf(result.getRow());
         DomainObjectInfo newInfo = new DomainObjectInfo();
         newInfo.setId(id);
@@ -206,21 +198,17 @@ public class AclTableMigrationTool {
     }
 
     private DomainObjectInfo getParentDomainObjectInfoFromRs(Result result) throws IOException {
-        DomainObjectInfo parentInfo = domainObjSerializer
-                .deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY),
-                        Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_PARENT_COLUMN)));
+        DomainObjectInfo parentInfo = domainObjSerializer.deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_PARENT_COLUMN)));
         return parentInfo;
     }
 
     private boolean getInheriting(Result result) {
-        boolean entriesInheriting = Bytes.toBoolean(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY),
-                Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN)));
+        boolean entriesInheriting = Bytes.toBoolean(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN)));
         return entriesInheriting;
     }
 
     private SidInfo getOwnerSidInfo(Result result) throws IOException {
-        SidInfo owner = sidSerializer.deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY),
-                Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_OWNER_COLUMN)));
+        SidInfo owner = sidSerializer.deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_OWNER_COLUMN)));
         return owner;
     }
 
@@ -257,8 +245,7 @@ public class AclTableMigrationTool {
 
         String username = Bytes.toString(result.getRow());
 
-        byte[] valueBytes = result.getValue(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY),
-                Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
+        byte[] valueBytes = result.getValue(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
         UserGrantedAuthority[] deserialized = ugaSerializer.deserialize(valueBytes);
 
         String password = "";

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/BadQueryDetector.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/BadQueryDetector.java b/server-base/src/main/java/org/apache/kylin/rest/service/BadQueryDetector.java
index 3e4bf01..64f91b1 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/BadQueryDetector.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/BadQueryDetector.java
@@ -95,8 +95,7 @@ public class BadQueryDetector extends Thread {
         notifiers.add(notifier);
     }
 
-    private void notify(String adj, float runningSec, long startTime, String project, String sql, String user,
-            Thread t) {
+    private void notify(String adj, float runningSec, long startTime, String project, String sql, String user, Thread t) {
         for (Notifier notifier : notifiers) {
             try {
                 notifier.badQueryFound(adj, runningSec, startTime, project, sql, user, t);
@@ -141,8 +140,7 @@ public class BadQueryDetector extends Thread {
         for (Entry e : entries) {
             float runningSec = (float) (now - e.startTime) / 1000;
             if (runningSec >= alertRunningSec) {
-                notify("Slow", runningSec, e.startTime, e.sqlRequest.getProject(), e.sqlRequest.getSql(), e.user,
-                        e.thread);
+                notify("Slow", runningSec, e.startTime, e.sqlRequest.getProject(), e.sqlRequest.getSql(), e.user, e.thread);
                 dumpStackTrace(e.thread);
             } else {
                 break; // entries are sorted by startTime
@@ -173,16 +171,13 @@ public class BadQueryDetector extends Thread {
     }
 
     public interface Notifier {
-        void badQueryFound(String adj, float runningSec, long startTime, String project, String sql, String user,
-                Thread t);
+        void badQueryFound(String adj, float runningSec, long startTime, String project, String sql, String user, Thread t);
     }
 
     private class LoggerNotifier implements Notifier {
         @Override
-        public void badQueryFound(String adj, float runningSec, long startTime, String project, String sql, String user,
-                Thread t) {
-            logger.info("{} query has been running {} seconds (project:{}, thread: 0x{}, user:{}) -- {}", adj,
-                    runningSec, project, Long.toHexString(t.getId()), user, sql);
+        public void badQueryFound(String adj, float runningSec, long startTime, String project, String sql, String user, Thread t) {
+            logger.info("{} query has been running {} seconds (project:{}, thread: 0x{}, user:{}) -- {}", adj, runningSec, project, Long.toHexString(t.getId()), user, sql);
         }
     }
 
@@ -212,23 +207,18 @@ public class BadQueryDetector extends Thread {
         }
 
         @Override
-        public void badQueryFound(String adj, float runningSec, long startTime, String project, String sql, String user,
-                Thread t) {
+        public void badQueryFound(String adj, float runningSec, long startTime, String project, String sql, String user, Thread t) {
             try {
                 long cachingSeconds = (kylinConfig.getBadQueryDefaultAlertingSeconds() + 1) * 30L;
                 Pair<Long, String> sqlPair = new Pair<>(startTime, sql);
                 if (!cacheQueue.contains(sqlPair)) {
-                    badQueryManager.addEntryToProject(sql, startTime, adj, runningSec, serverHostname, t.getName(),
-                            user, project);
+                    badQueryManager.addEntryToProject(sql, startTime, adj, runningSec, serverHostname, t.getName(), user, project);
                     cacheQueue.add(sqlPair);
-                    while (!cacheQueue.isEmpty()
-                            && (System.currentTimeMillis() - cacheQueue.first().getFirst() > cachingSeconds * 1000
-                                    || cacheQueue.size() > kylinConfig.getBadQueryHistoryNum() * 3)) {
+                    while (!cacheQueue.isEmpty() && (System.currentTimeMillis() - cacheQueue.first().getFirst() > cachingSeconds * 1000 || cacheQueue.size() > kylinConfig.getBadQueryHistoryNum() * 3)) {
                         cacheQueue.pollFirst();
                     }
                 } else {
-                    badQueryManager.updateEntryToProject(sql, startTime, adj, runningSec, serverHostname, t.getName(),
-                            user, project);
+                    badQueryManager.updateEntryToProject(sql, startTime, adj, runningSec, serverHostname, t.getName(), user, project);
                 }
             } catch (IOException e) {
                 logger.error("Error in bad query persistence.", e);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
index 47033b3..96b34ed 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
@@ -70,8 +70,7 @@ public class CacheService extends BasicService {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
             if ("cube".equals(entity) && event == Event.UPDATE) {
                 final String cubeName = cacheKey;
                 new Thread() { // do not block the event broadcast thread

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index c04eef0..22baabe 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -114,8 +114,7 @@ public class CubeService extends BasicService {
 
         if (modelName != null) {
             for (CubeInstance cubeInstance : cubeInstances) {
-                boolean isCubeMatch = cubeInstance.getDescriptor().getModelName().toLowerCase()
-                        .equals(modelName.toLowerCase());
+                boolean isCubeMatch = cubeInstance.getDescriptor().getModelName().toLowerCase().equals(modelName.toLowerCase());
                 if (isCubeMatch) {
                     filterModelCubes.add(cubeInstance);
                 }
@@ -126,8 +125,7 @@ public class CubeService extends BasicService {
 
         List<CubeInstance> filterCubes = new ArrayList<CubeInstance>();
         for (CubeInstance cubeInstance : filterModelCubes) {
-            boolean isCubeMatch = (null == cubeName)
-                    || cubeInstance.getName().toLowerCase().contains(cubeName.toLowerCase());
+            boolean isCubeMatch = (null == cubeName) || cubeInstance.getName().toLowerCase().contains(cubeName.toLowerCase());
 
             if (isCubeMatch) {
                 filterCubes.add(cubeInstance);
@@ -137,8 +135,7 @@ public class CubeService extends BasicService {
         return filterCubes;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
     public CubeInstance updateCubeCost(CubeInstance cube, int cost) throws IOException {
 
         if (cube.getCost() == cost) {
@@ -226,8 +223,7 @@ public class CubeService extends BasicService {
             if (projectDataModel.getType() == RealizationType.CUBE) {
                 CubeInstance cube = getCubeManager().getCube(projectDataModel.getRealization());
                 if (cube == null) {
-                    logger.error("Project " + projectName + " contains realization " + projectDataModel.getRealization()
-                            + " which is not found by CubeManager");
+                    logger.error("Project " + projectName + " contains realization " + projectDataModel.getRealization() + " which is not found by CubeManager");
                     continue;
                 }
                 if (cube.equals(target)) {
@@ -238,14 +234,11 @@ public class CubeService extends BasicService {
         return false;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
-    public CubeDesc updateCubeAndDesc(CubeInstance cube, CubeDesc desc, String newProjectName, boolean forceUpdate)
-            throws IOException {
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
+    public CubeDesc updateCubeAndDesc(CubeInstance cube, CubeDesc desc, String newProjectName, boolean forceUpdate) throws IOException {
         Message msg = MsgPicker.getMsg();
 
-        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null,
-                EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING));
+        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING));
         if (!cubingJobs.isEmpty()) {
             throw new BadRequestException(String.format(msg.getDISCARD_JOB_FIRST(), cube.getName()));
         }
@@ -264,21 +257,18 @@ public class CubeService extends BasicService {
         ProjectManager projectManager = getProjectManager();
         if (!isCubeInProject(newProjectName, cube)) {
             String owner = SecurityContextHolder.getContext().getAuthentication().getName();
-            ProjectInstance newProject = projectManager.moveRealizationToProject(RealizationType.CUBE, cube.getName(),
-                    newProjectName, owner);
+            ProjectInstance newProject = projectManager.moveRealizationToProject(RealizationType.CUBE, cube.getName(), newProjectName, owner);
             accessService.inherit(cube, newProject);
         }
 
         return updatedCubeDesc;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
     public void deleteCube(CubeInstance cube) throws IOException {
         Message msg = MsgPicker.getMsg();
 
-        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null,
-                EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING, ExecutableState.ERROR));
+        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING, ExecutableState.ERROR));
         if (!cubingJobs.isEmpty()) {
             throw new BadRequestException(String.format(msg.getDISCARD_JOB_FIRST(), cube.getName()));
         }
@@ -303,8 +293,7 @@ public class CubeService extends BasicService {
      * @throws IOException
      * @throws JobException
      */
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
     public CubeInstance purgeCube(CubeInstance cube) throws IOException {
         Message msg = MsgPicker.getMsg();
 
@@ -326,8 +315,7 @@ public class CubeService extends BasicService {
      * @throws IOException
      * @throws JobException
      */
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
     public CubeInstance disableCube(CubeInstance cube) throws IOException {
         Message msg = MsgPicker.getMsg();
 
@@ -356,8 +344,7 @@ public class CubeService extends BasicService {
      * @return
      * @throws IOException
      */
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
     public CubeInstance enableCube(CubeInstance cube) throws IOException {
         Message msg = MsgPicker.getMsg();
 
@@ -372,14 +359,12 @@ public class CubeService extends BasicService {
             throw new BadRequestException(String.format(msg.getNO_READY_SEGMENT(), cubeName));
         }
 
-        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null,
-                EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING));
+        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING));
         if (!cubingJobs.isEmpty()) {
             throw new BadRequestException(msg.getENABLE_WITH_RUNNING_JOB());
         }
         if (!cube.getDescriptor().checkSignature()) {
-            throw new BadRequestException(
-                    String.format(msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
+            throw new BadRequestException(String.format(msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
         }
 
         try {
@@ -409,8 +394,7 @@ public class CubeService extends BasicService {
             }
         }
 
-        metrics.increase("aveStorage",
-                (metrics.get("totalCubes") == 0) ? 0 : metrics.get("totalStorage") / metrics.get("totalCubes"));
+        metrics.increase("aveStorage", (metrics.get("totalCubes") == 0) ? 0 : metrics.get("totalStorage") / metrics.get("totalCubes"));
 
         return metrics;
     }
@@ -451,31 +435,26 @@ public class CubeService extends BasicService {
         return hr;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
     public void updateCubeNotifyList(CubeInstance cube, List<String> notifyList) throws IOException {
         CubeDesc desc = cube.getDescriptor();
         desc.setNotifyList(notifyList);
         getCubeDescManager().updateCubeDesc(desc);
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
-    public CubeInstance rebuildLookupSnapshot(CubeInstance cube, String segmentName, String lookupTable)
-            throws IOException {
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
+    public CubeInstance rebuildLookupSnapshot(CubeInstance cube, String segmentName, String lookupTable) throws IOException {
         CubeSegment seg = cube.getSegment(segmentName, SegmentStatusEnum.READY);
         getCubeManager().buildSnapshotTable(seg, lookupTable);
 
         return cube;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
     public CubeInstance deleteSegment(CubeInstance cube, String segmentName) throws IOException {
         Message msg = MsgPicker.getMsg();
 
-        if (!segmentName.equals(cube.getSegments().get(0).getName())
-                && !segmentName.equals(cube.getSegments().get(cube.getSegments().size() - 1).getName())) {
+        if (!segmentName.equals(cube.getSegments().get(0).getName()) && !segmentName.equals(cube.getSegments().get(cube.getSegments().size() - 1).getName())) {
             throw new BadRequestException(String.format(msg.getDELETE_NOT_FIRST_LAST_SEG(), segmentName));
         }
         CubeSegment toDelete = null;
@@ -525,8 +504,7 @@ public class CubeService extends BasicService {
     public void updateOnNewSegmentReady(String cubeName) {
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         String serverMode = kylinConfig.getServerMode();
-        if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase())
-                || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) {
+        if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase()) || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) {
             CubeInstance cube = getCubeManager().getCube(cubeName);
             if (cube != null) {
                 CubeSegment seg = cube.getLatestBuiltSegment();
@@ -584,8 +562,7 @@ public class CubeService extends BasicService {
                 cube = getCubeManager().getCube(cubeName);
                 Pair<Long, Long> offsets = getCubeManager().autoMergeCubeSegments(cube);
                 if (offsets != null) {
-                    CubeSegment newSeg = getCubeManager().mergeSegments(cube, 0, 0, offsets.getFirst(),
-                            offsets.getSecond(), true);
+                    CubeSegment newSeg = getCubeManager().mergeSegments(cube, 0, 0, offsets.getFirst(), offsets.getSecond(), true);
                     logger.debug("Will submit merge job on " + newSeg);
                     DefaultChainedExecutable job = EngineFactory.createBatchMergeJob(newSeg, "SYSTEM");
                     getExecutableManager().addJob(job);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java b/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
index 2067b34..35b018c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
@@ -52,8 +52,7 @@ public class DiagnosisService extends BasicService {
 
         File[] files = destDir.listFiles();
         if (files == null) {
-            throw new BadRequestException(
-                    String.format(msg.getDIAG_PACKAGE_NOT_AVAILABLE(), destDir.getAbsolutePath()));
+            throw new BadRequestException(String.format(msg.getDIAG_PACKAGE_NOT_AVAILABLE(), destDir.getAbsolutePath()));
         }
         for (File subDir : files) {
             if (subDir.isDirectory()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/DomainObjectInfo.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/DomainObjectInfo.java b/server-base/src/main/java/org/apache/kylin/rest/service/DomainObjectInfo.java
index 524a426..f07a65e 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/DomainObjectInfo.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/DomainObjectInfo.java
@@ -20,6 +20,7 @@ package org.apache.kylin.rest.service;
 
 import org.springframework.security.acls.model.ObjectIdentity;
 
+
 class DomainObjectInfo {
     private String id;
     private String type;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java b/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
index 9eccddc..f3742de 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
@@ -42,16 +42,13 @@ public class EncodingService extends BasicService {
         Message msg = MsgPicker.getMsg();
 
         if (dataType.isIntegerFamily()) {
-            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME,
-                    DictionaryDimEnc.ENCODING_NAME, IntegerDimEnc.ENCODING_NAME);
+            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME, IntegerDimEnc.ENCODING_NAME);
         } else if (dataType.isNumberFamily()) { //numbers include integers
             return Lists.newArrayList(DictionaryDimEnc.ENCODING_NAME);
         } else if (dataType.isDateTimeFamily()) {
-            return Lists.newArrayList(DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME,
-                    DictionaryDimEnc.ENCODING_NAME);
+            return Lists.newArrayList(DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME);
         } else if (dataType.isStringFamily()) {
-            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME,
-                    FixedLenDimEnc.ENCODING_NAME, //
+            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME, FixedLenDimEnc.ENCODING_NAME, //
                     FixedLenHexDimEnc.ENCODING_NAME, IntegerDimEnc.ENCODING_NAME);
         } else {
             throw new BadRequestException(String.format(msg.getVALID_ENCODING_NOT_AVAILABLE(), dataType));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/HybridService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/HybridService.java b/server-base/src/main/java/org/apache/kylin/rest/service/HybridService.java
index 5b1ba2d..acb82b5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/HybridService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/HybridService.java
@@ -42,8 +42,7 @@ public class HybridService extends BasicService {
     private static final Logger logger = LoggerFactory.getLogger(HybridService.class);
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or " + Constant.ACCESS_HAS_ROLE_MODELER)
-    public HybridInstance createHybridCube(String hybridName, String projectName, String modelName,
-            String[] cubeNames) {
+    public HybridInstance createHybridCube(String hybridName, String projectName, String modelName, String[] cubeNames) {
         List<String> args = new ArrayList<String>();
         args.add("-name");
         args.add(hybridName);
@@ -64,10 +63,8 @@ public class HybridService extends BasicService {
         return getHybridInstance(hybridName);
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
-    public HybridInstance updateHybridCube(String hybridName, String projectName, String modelName,
-            String[] cubeNames) {
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
+    public HybridInstance updateHybridCube(String hybridName, String projectName, String modelName, String[] cubeNames) {
         List<String> args = new ArrayList<String>();
         args.add("-name");
         args.add(hybridName);
@@ -88,8 +85,7 @@ public class HybridService extends BasicService {
         return getHybridInstance(hybridName);
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
     public void deleteHybridCube(String hybridName, String projectName, String modelName) {
         List<String> args = new ArrayList<String>();
         args.add("-name");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
index 4741a24..056f7b1 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
@@ -107,8 +107,7 @@ public class JobService extends BasicService implements InitializingBean {
         TimeZone.setDefault(tzone);
 
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        final Scheduler<AbstractExecutable> scheduler = (Scheduler<AbstractExecutable>) SchedulerFactory
-                .scheduler(kylinConfig.getSchedulerType());
+        final Scheduler<AbstractExecutable> scheduler = (Scheduler<AbstractExecutable>) SchedulerFactory.scheduler(kylinConfig.getSchedulerType());
 
         jobLock = (JobLock) ClassUtil.newInstance(kylinConfig.getJobControllerLock());
 
@@ -155,22 +154,22 @@ public class JobService extends BasicService implements InitializingBean {
         Message msg = MsgPicker.getMsg();
 
         switch (status) {
-        case DISCARDED:
-            return ExecutableState.DISCARDED;
-        case ERROR:
-            return ExecutableState.ERROR;
-        case FINISHED:
-            return ExecutableState.SUCCEED;
-        case NEW:
-            return ExecutableState.READY;
-        case PENDING:
-            return ExecutableState.READY;
-        case RUNNING:
-            return ExecutableState.RUNNING;
-        case STOPPED:
-            return ExecutableState.STOPPED;
-        default:
-            throw new BadRequestException(String.format(msg.getILLEGAL_EXECUTABLE_STATE(), status));
+            case DISCARDED:
+                return ExecutableState.DISCARDED;
+            case ERROR:
+                return ExecutableState.ERROR;
+            case FINISHED:
+                return ExecutableState.SUCCEED;
+            case NEW:
+                return ExecutableState.READY;
+            case PENDING:
+                return ExecutableState.READY;
+            case RUNNING:
+                return ExecutableState.RUNNING;
+            case STOPPED:
+                return ExecutableState.STOPPED;
+            default:
+                throw new BadRequestException(String.format(msg.getILLEGAL_EXECUTABLE_STATE(), status));
         }
     }
 
@@ -178,30 +177,28 @@ public class JobService extends BasicService implements InitializingBean {
         Message msg = MsgPicker.getMsg();
 
         switch (timeFilter) {
-        case LAST_ONE_DAY:
-            calendar.add(Calendar.DAY_OF_MONTH, -1);
-            return calendar.getTimeInMillis();
-        case LAST_ONE_WEEK:
-            calendar.add(Calendar.WEEK_OF_MONTH, -1);
-            return calendar.getTimeInMillis();
-        case LAST_ONE_MONTH:
-            calendar.add(Calendar.MONTH, -1);
-            return calendar.getTimeInMillis();
-        case LAST_ONE_YEAR:
-            calendar.add(Calendar.YEAR, -1);
-            return calendar.getTimeInMillis();
-        case ALL:
-            return 0;
-        default:
-            throw new BadRequestException(String.format(msg.getILLEGAL_TIME_FILTER(), timeFilter));
+            case LAST_ONE_DAY:
+                calendar.add(Calendar.DAY_OF_MONTH, -1);
+                return calendar.getTimeInMillis();
+            case LAST_ONE_WEEK:
+                calendar.add(Calendar.WEEK_OF_MONTH, -1);
+                return calendar.getTimeInMillis();
+            case LAST_ONE_MONTH:
+                calendar.add(Calendar.MONTH, -1);
+                return calendar.getTimeInMillis();
+            case LAST_ONE_YEAR:
+                calendar.add(Calendar.YEAR, -1);
+                return calendar.getTimeInMillis();
+            case ALL:
+                return 0;
+            default:
+                throw new BadRequestException(String.format(msg.getILLEGAL_TIME_FILTER(), timeFilter));
         }
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
     public JobInstance submitJob(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, //
-            Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd,
-            CubeBuildTypeEnum buildType, boolean force, String submitter) throws IOException {
+                                 Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd, CubeBuildTypeEnum buildType, boolean force, String submitter) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
@@ -215,8 +212,7 @@ public class JobService extends BasicService implements InitializingBean {
         try {
             if (buildType == CubeBuildTypeEnum.BUILD) {
                 ISource source = SourceFactory.getSource(cube);
-                SourcePartition sourcePartition = new SourcePartition(startDate, endDate, startOffset, endOffset,
-                        sourcePartitionOffsetStart, sourcePartitionOffsetEnd);
+                SourcePartition sourcePartition = new SourcePartition(startDate, endDate, startOffset, endOffset, sourcePartitionOffsetStart, sourcePartitionOffsetEnd);
                 sourcePartition = source.enrichSourcePartitionBeforeBuild(cube, sourcePartition);
                 newSeg = getCubeManager().appendSegment(cube, sourcePartition);
                 job = EngineFactory.createBatchCubingJob(newSeg, submitter);
@@ -234,8 +230,7 @@ public class JobService extends BasicService implements InitializingBean {
 
         } catch (Exception e) {
             if (newSeg != null) {
-                logger.error("Job submission might failed for NEW segment {}, will clean the NEW segment from cube",
-                        newSeg.getName());
+                logger.error("Job submission might failed for NEW segment {}, will clean the NEW segment from cube", newSeg.getName());
                 try {
                     // Remove this segments
                     CubeUpdate cubeBuilder = new CubeUpdate(cube);
@@ -261,8 +256,7 @@ public class JobService extends BasicService implements InitializingBean {
         Message msg = MsgPicker.getMsg();
 
         if (!cube.getDescriptor().checkSignature())
-            throw new BadRequestException(
-                    String.format(msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
+            throw new BadRequestException(String.format(msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
     }
 
     public JobInstance getJobInstance(String uuid) {
@@ -302,20 +296,17 @@ public class JobService extends BasicService implements InitializingBean {
         return result;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')")
     public void resumeJob(JobInstance job) {
         getExecutableManager().resumeJob(job.getId());
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')")
     public void rollbackJob(JobInstance job, String stepId) {
         getExecutableManager().rollbackJob(job.getId(), stepId);
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')")
     public JobInstance cancelJob(JobInstance job) throws IOException {
         if (null == job.getRelatedCube() || null == getCubeManager().getCube(job.getRelatedCube())) {
             getExecutableManager().discardJob(job.getId());
@@ -338,15 +329,13 @@ public class JobService extends BasicService implements InitializingBean {
         return job;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')")
     public JobInstance pauseJob(JobInstance job) {
         getExecutableManager().pauseJob(job.getId());
         return job;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
-            + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')")
     public void dropJob(JobInstance job) throws IOException {
         cancelJob(job);
         getExecutableManager().deleteJob(job.getId());
@@ -356,9 +345,7 @@ public class JobService extends BasicService implements InitializingBean {
      * currently only support substring match
      * @return
      */
-    public List<JobInstance> searchJobs(final String cubeNameSubstring, final String projectName,
-            final List<JobStatusEnum> statusList, final Integer limitValue, final Integer offsetValue,
-            final JobTimeFilterEnum timeFilter) {
+    public List<JobInstance> searchJobs(final String cubeNameSubstring, final String projectName, final List<JobStatusEnum> statusList, final Integer limitValue, final Integer offsetValue, final JobTimeFilterEnum timeFilter) {
         Integer limit = (null == limitValue) ? 30 : limitValue;
         Integer offset = (null == offsetValue) ? 0 : offsetValue;
         List<JobInstance> jobs = searchJobs(cubeNameSubstring, projectName, statusList, timeFilter);
@@ -375,8 +362,7 @@ public class JobService extends BasicService implements InitializingBean {
         return jobs.subList(offset, offset + limit);
     }
 
-    public List<JobInstance> searchJobs(final String cubeNameSubstring, final String projectName,
-            final List<JobStatusEnum> statusList, final JobTimeFilterEnum timeFilter) {
+    public List<JobInstance> searchJobs(final String cubeNameSubstring, final String projectName, final List<JobStatusEnum> statusList, final JobTimeFilterEnum timeFilter) {
         Calendar calendar = Calendar.getInstance();
         calendar.setTime(new Date());
         long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
@@ -384,81 +370,69 @@ public class JobService extends BasicService implements InitializingBean {
         long timeEndInMillis = Long.MAX_VALUE;
         Set<ExecutableState> states = convertStatusEnumToStates(statusList);
         final Map<String, Output> allOutputs = getExecutableManager().getAllOutputs(timeStartInMillis, timeEndInMillis);
-        return Lists
-                .newArrayList(
-                        FluentIterable
-                                .from(searchCubingJobs(cubeNameSubstring, projectName, states, timeStartInMillis,
-                                        timeEndInMillis, allOutputs, false))
-                                .transform(new Function<CubingJob, JobInstance>() {
-                                    @Override
-                                    public JobInstance apply(CubingJob cubingJob) {
-                                        return JobInfoConverter.parseToJobInstance(cubingJob, allOutputs);
-                                    }
-                                }));
+        return Lists.newArrayList(FluentIterable.from(searchCubingJobs(cubeNameSubstring, projectName, states, timeStartInMillis, timeEndInMillis, allOutputs, false)).transform(new Function<CubingJob, JobInstance>() {
+            @Override
+            public JobInstance apply(CubingJob cubingJob) {
+                return JobInfoConverter.parseToJobInstance(cubingJob, allOutputs);
+            }
+        }));
     }
 
-    public List<CubingJob> searchCubingJobs(final String cubeName, final String projectName,
-            final Set<ExecutableState> statusList, long timeStartInMillis, long timeEndInMillis,
-            final Map<String, Output> allOutputs, final boolean cubeNameExactMatch) {
-        List<CubingJob> results = Lists.newArrayList(FluentIterable.from(
-                getExecutableManager().getAllAbstractExecutables(timeStartInMillis, timeEndInMillis, CubingJob.class))
-                .filter(new Predicate<AbstractExecutable>() {
-                    @Override
-                    public boolean apply(AbstractExecutable executable) {
-                        if (executable instanceof CubingJob) {
-                            if (StringUtils.isEmpty(cubeName)) {
-                                return true;
-                            }
-                            String executableCubeName = CubingExecutableUtil.getCubeName(executable.getParams());
-                            if (executableCubeName == null)
-                                return true;
-                            if (cubeNameExactMatch)
-                                return executableCubeName.equalsIgnoreCase(cubeName);
-                            else
-                                return executableCubeName.toLowerCase().contains(cubeName.toLowerCase());
-                        } else {
-                            return false;
-                        }
-                    }
-                }).transform(new Function<AbstractExecutable, CubingJob>() {
-                    @Override
-                    public CubingJob apply(AbstractExecutable executable) {
-                        return (CubingJob) executable;
-                    }
-                }).filter(Predicates.and(new Predicate<CubingJob>() {
-                    @Override
-                    public boolean apply(CubingJob executable) {
-                        if (null == projectName || null == getProjectManager().getProject(projectName)) {
-                            return true;
-                        } else {
-                            return projectName.equals(executable.getProjectName());
-                        }
-                    }
-                }, new Predicate<CubingJob>() {
-                    @Override
-                    public boolean apply(CubingJob executable) {
-                        try {
-                            Output output = allOutputs.get(executable.getId());
-                            ExecutableState state = output.getState();
-                            boolean ret = statusList.contains(state);
-                            return ret;
-                        } catch (Exception e) {
-                            throw e;
-                        }
+    public List<CubingJob> searchCubingJobs(final String cubeName, final String projectName, final Set<ExecutableState> statusList, long timeStartInMillis, long timeEndInMillis, final Map<String, Output> allOutputs, final boolean cubeNameExactMatch) {
+        List<CubingJob> results = Lists.newArrayList(FluentIterable.from(getExecutableManager().getAllAbstractExecutables(timeStartInMillis, timeEndInMillis, CubingJob.class)).filter(new Predicate<AbstractExecutable>() {
+            @Override
+            public boolean apply(AbstractExecutable executable) {
+                if (executable instanceof CubingJob) {
+                    if (StringUtils.isEmpty(cubeName)) {
+                        return true;
                     }
-                })));
+                    String executableCubeName = CubingExecutableUtil.getCubeName(executable.getParams());
+                    if (executableCubeName == null)
+                        return true;
+                    if (cubeNameExactMatch)
+                        return executableCubeName.equalsIgnoreCase(cubeName);
+                    else
+                        return executableCubeName.toLowerCase().contains(cubeName.toLowerCase());
+                } else {
+                    return false;
+                }
+            }
+        }).transform(new Function<AbstractExecutable, CubingJob>() {
+            @Override
+            public CubingJob apply(AbstractExecutable executable) {
+                return (CubingJob) executable;
+            }
+        }).filter(Predicates.and(new Predicate<CubingJob>() {
+            @Override
+            public boolean apply(CubingJob executable) {
+                if (null == projectName || null == getProjectManager().getProject(projectName)) {
+                    return true;
+                } else {
+                    return projectName.equals(executable.getProjectName());
+                }
+            }
+        }, new Predicate<CubingJob>() {
+            @Override
+            public boolean apply(CubingJob executable) {
+                try {
+                    Output output = allOutputs.get(executable.getId());
+                    ExecutableState state = output.getState();
+                    boolean ret = statusList.contains(state);
+                    return ret;
+                } catch (Exception e) {
+                    throw e;
+                }
+            }
+        })));
         return results;
     }
 
-    public List<CubingJob> listAllCubingJobs(final String cubeName, final String projectName,
-            final Set<ExecutableState> statusList) {
-        return searchCubingJobs(cubeName, projectName, statusList, 0L, Long.MAX_VALUE,
-                getExecutableManager().getAllOutputs(), true);
+    public List<CubingJob> listAllCubingJobs(final String cubeName, final String projectName, final Set<ExecutableState> statusList) {
+        return searchCubingJobs(cubeName, projectName, statusList, 0L, Long.MAX_VALUE, getExecutableManager().getAllOutputs(), true);
     }
 
     public List<CubingJob> listAllCubingJobs(final String cubeName, final String projectName) {
-        return searchCubingJobs(cubeName, projectName, EnumSet.allOf(ExecutableState.class), 0L, Long.MAX_VALUE,
-                getExecutableManager().getAllOutputs(), true);
+        return searchCubingJobs(cubeName, projectName, EnumSet.allOf(ExecutableState.class), 0L, Long.MAX_VALUE, getExecutableManager().getAllOutputs(), true);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java b/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java
index a5af1fe..0dbe6f2 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java
@@ -51,8 +51,7 @@ public class KafkaConfigService extends BasicService {
         return kafkaConfigs;
     }
 
-    public List<KafkaConfig> getKafkaConfigs(final String kafkaConfigName, final Integer limit, final Integer offset)
-            throws IOException {
+    public List<KafkaConfig> getKafkaConfigs(final String kafkaConfigName, final Integer limit, final Integer offset) throws IOException {
 
         List<KafkaConfig> kafkaConfigs;
         kafkaConfigs = listAllKafkaConfigs(kafkaConfigName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/service/LegacyAclService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/LegacyAclService.java b/server-base/src/main/java/org/apache/kylin/rest/service/LegacyAclService.java
index 77877e9..8ab6ebe 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/LegacyAclService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/LegacyAclService.java
@@ -128,17 +128,14 @@ public class LegacyAclService implements MutableAclService {
             htable = aclHBaseStorage.getTable(aclTableName);
 
             Scan scan = new Scan();
-            SingleColumnValueFilter parentFilter = new SingleColumnValueFilter(
-                    Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN),
-                    CompareOp.EQUAL, domainObjSerializer.serialize(new DomainObjectInfo(parentIdentity)));
+            SingleColumnValueFilter parentFilter = new SingleColumnValueFilter(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), CompareOp.EQUAL, domainObjSerializer.serialize(new DomainObjectInfo(parentIdentity)));
             parentFilter.setFilterIfMissing(true);
             scan.setFilter(parentFilter);
 
             ResultScanner scanner = htable.getScanner(scan);
             for (Result result = scanner.next(); result != null; result = scanner.next()) {
                 String id = Bytes.toString(result.getRow());
-                String type = Bytes.toString(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY),
-                        Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN)));
+                String type = Bytes.toString(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN)));
 
                 oids.add(new ObjectIdentityImpl(type, id));
             }
@@ -184,27 +181,18 @@ public class LegacyAclService implements MutableAclService {
                 result = htable.get(new Get(Bytes.toBytes(String.valueOf(oid.getIdentifier()))));
 
                 if (null != result && !result.isEmpty()) {
-                    SidInfo owner = sidSerializer
-                            .deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY),
-                                    Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN)));
-                    Sid ownerSid = (null == owner) ? null
-                            : (owner.isPrincipal() ? new PrincipalSid(owner.getSid())
-                                    : new GrantedAuthoritySid(owner.getSid()));
-                    boolean entriesInheriting = Bytes
-                            .toBoolean(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY),
-                                    Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN)));
+                    SidInfo owner = sidSerializer.deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN)));
+                    Sid ownerSid = (null == owner) ? null : (owner.isPrincipal() ? new PrincipalSid(owner.getSid()) : new GrantedAuthoritySid(owner.getSid()));
+                    boolean entriesInheriting = Bytes.toBoolean(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN)));
 
                     Acl parentAcl = null;
-                    DomainObjectInfo parentInfo = domainObjSerializer
-                            .deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY),
-                                    Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN)));
+                    DomainObjectInfo parentInfo = domainObjSerializer.deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN)));
                     if (null != parentInfo) {
                         ObjectIdentity parentObj = new ObjectIdentityImpl(parentInfo.getType(), parentInfo.getId());
                         parentAcl = readAclById(parentObj, null);
                     }
 
-                    AclImpl acl = new AclImpl(oid, oid.getIdentifier(), aclAuthorizationStrategy,
-                            permissionGrantingStrategy, parentAcl, null, entriesInheriting, ownerSid);
+                    AclImpl acl = new AclImpl(oid, oid.getIdentifier(), aclAuthorizationStrategy, permissionGrantingStrategy, parentAcl, null, entriesInheriting, ownerSid);
                     genAces(sids, result, acl);
 
                     aclMaps.put(oid, acl);
@@ -242,12 +230,9 @@ public class LegacyAclService implements MutableAclService {
             htable = aclHBaseStorage.getTable(aclTableName);
 
             Put put = new Put(Bytes.toBytes(String.valueOf(objectIdentity.getIdentifier())));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN),
-                    Bytes.toBytes(objectIdentity.getType()));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN),
-                    sidSerializer.serialize(new SidInfo(sid)));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY),
-                    Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
+            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
+            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
+            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
 
             htable.put(put);
 
@@ -307,15 +292,12 @@ public class LegacyAclService implements MutableAclService {
             Put put = new Put(Bytes.toBytes(String.valueOf(acl.getObjectIdentity().getIdentifier())));
 
             if (null != acl.getParentAcl()) {
-                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY),
-                        Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN),
-                        domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
+                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
             }
 
             for (AccessControlEntry ace : acl.getEntries()) {
                 AceInfo aceInfo = new AceInfo(ace);
-                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY),
-                        Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
+                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
             }
 
             if (!put.isEmpty()) {
@@ -332,8 +314,7 @@ public class LegacyAclService implements MutableAclService {
         return (MutableAcl) readAclById(acl.getObjectIdentity());
     }
 
-    private void genAces(List<Sid> sids, Result result, AclImpl acl)
-            throws JsonParseException, JsonMappingException, IOException {
+    private void genAces(List<Sid> sids, Result result, AclImpl acl) throws JsonParseException, JsonMappingException, IOException {
         List<AceInfo> aceInfos = new ArrayList<AceInfo>();
         if (null != sids) {
             // Just return aces in sids
@@ -345,15 +326,13 @@ public class LegacyAclService implements MutableAclService {
                     sidName = ((GrantedAuthoritySid) sid).getGrantedAuthority();
                 }
 
-                AceInfo aceInfo = aceSerializer.deserialize(
-                        result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(sidName)));
+                AceInfo aceInfo = aceSerializer.deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(sidName)));
                 if (null != aceInfo) {
                     aceInfos.add(aceInfo);
                 }
             }
         } else {
-            NavigableMap<byte[], byte[]> familyMap = result
-                    .getFamilyMap(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY));
+            NavigableMap<byte[], byte[]> familyMap = result.getFamilyMap(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY));
             for (byte[] qualifier : familyMap.keySet()) {
                 AceInfo aceInfo = aceSerializer.deserialize(familyMap.get(qualifier));
 
@@ -368,10 +347,8 @@ public class LegacyAclService implements MutableAclService {
             AceInfo aceInfo = aceInfos.get(i);
 
             if (null != aceInfo) {
-                Sid sid = aceInfo.getSidInfo().isPrincipal() ? new PrincipalSid(aceInfo.getSidInfo().getSid())
-                        : new GrantedAuthoritySid(aceInfo.getSidInfo().getSid());
-                AccessControlEntry ace = new AccessControlEntryImpl(Long.valueOf(i), acl, sid,
-                        aclPermissionFactory.buildFromMask(aceInfo.getPermissionMask()), true, false, false);
+                Sid sid = aceInfo.getSidInfo().isPrincipal() ? new PrincipalSid(aceInfo.getSidInfo().getSid()) : new GrantedAuthoritySid(aceInfo.getSidInfo().getSid());
+                AccessControlEntry ace = new AccessControlEntryImpl(Long.valueOf(i), acl, sid, aclPermissionFactory.buildFromMask(aceInfo.getPermissionMask()), true, false, false);
                 newAces.add(ace);
             }
         }
@@ -387,4 +364,5 @@ public class LegacyAclService implements MutableAclService {
         }
     }
 
+
 }


[05/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
index c58f227..2b2e490 100755
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterDecorator.java
@@ -60,14 +60,12 @@ public class FilterDecorator implements TupleFilterSerializer.Decorator {
         return inevaluableColumns;
     }
 
-    private TupleFilter replaceConstantsWithLocalDict(CompareTupleFilter oldCompareFilter,
-            CompareTupleFilter newCompareFilter) {
+    private TupleFilter replaceConstantsWithLocalDict(CompareTupleFilter oldCompareFilter, CompareTupleFilter newCompareFilter) {
         //TODO localdict: (performance issue) transalte() with roundingflag 0 will use try catch exceptions to deal with non-existing entries
         return replaceConstantsWithGlobalDict(oldCompareFilter, newCompareFilter);
     }
 
-    private TupleFilter replaceConstantsWithGlobalDict(CompareTupleFilter oldCompareFilter,
-            CompareTupleFilter newCompareFilter) {
+    private TupleFilter replaceConstantsWithGlobalDict(CompareTupleFilter oldCompareFilter, CompareTupleFilter newCompareFilter) {
         Collection<String> constValues = (Collection<String>) oldCompareFilter.getValues();
         String firstValue = constValues.iterator().next();
         TblColRef col = newCompareFilter.getColumn();
@@ -219,4 +217,4 @@ public class FilterDecorator implements TupleFilterSerializer.Decorator {
         columnIO.writeColumn(column, v, roundingFlag, DimensionEncoding.NULL, id, 0);
         return Dictionary.dictIdToString(id, 0, id.length);
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/HBaseScannerBenchmark.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/HBaseScannerBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/HBaseScannerBenchmark.java
index 56f78dc..3eecba1 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/HBaseScannerBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/HBaseScannerBenchmark.java
@@ -109,8 +109,7 @@ public class HBaseScannerBenchmark {
     private void testScanRaw(String msg) throws IOException {
         long t = System.currentTimeMillis();
 
-        IGTScanner scan = simpleStore.scan(new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null)
-                .setFilterPushDown(null).createGTScanRequest());
+        IGTScanner scan = simpleStore.scan(new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest());
         ResultScanner innerScanner = ((SimpleHBaseStore.Reader) scan).getHBaseScanner();
         int count = 0;
         for (Result r : innerScanner) {
@@ -126,8 +125,7 @@ public class HBaseScannerBenchmark {
     private void testScanRecords(String msg) throws IOException {
         long t = System.currentTimeMillis();
 
-        IGTScanner scan = simpleStore.scan(new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null)
-                .setFilterPushDown(null).createGTScanRequest());
+        IGTScanner scan = simpleStore.scan(new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest());
         int count = 0;
         for (GTRecord rec : scan) {
             count++;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
index c38e8c8..b12173d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
@@ -170,16 +170,12 @@ public class SimpleHBaseStore implements IGTStore {
                 private void loadRecord(Result r) {
                     Cell[] cells = r.rawCells();
                     Cell cell = cells[0];
-                    if (Bytes.compareTo(CF_B, 0, CF_B.length, cell.getFamilyArray(), cell.getFamilyOffset(),
-                            cell.getFamilyLength()) != 0 //
-                            || Bytes.compareTo(COL_B, 0, COL_B.length, cell.getQualifierArray(),
-                                    cell.getQualifierOffset(), cell.getQualifierLength()) != 0)
+                    if (Bytes.compareTo(CF_B, 0, CF_B.length, cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()) != 0 //
+                            || Bytes.compareTo(COL_B, 0, COL_B.length, cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) != 0)
                         throw new IllegalStateException();
 
-                    rec.loadCellBlock(0, ByteBuffer.wrap(cell.getRowArray(), cell.getRowOffset() + ID_LEN,
-                            cell.getRowLength() - ID_LEN));
-                    rec.loadCellBlock(1,
-                            ByteBuffer.wrap(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
+                    rec.loadCellBlock(0, ByteBuffer.wrap(cell.getRowArray(), cell.getRowOffset() + ID_LEN, cell.getRowLength() - ID_LEN));
+                    rec.loadCellBlock(1, ByteBuffer.wrap(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
                 }
 
                 @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index 8586fac..e822ada 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -88,18 +88,14 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
 
         if (shardNum == totalShards) {
             //all shards
-            return Lists.newArrayList(
-                    Pair.newPair(getByteArrayForShort((short) 0), getByteArrayForShort((short) (shardNum - 1))));
+            return Lists.newArrayList(Pair.newPair(getByteArrayForShort((short) 0), getByteArrayForShort((short) (shardNum - 1))));
         } else if (baseShard + shardNum <= totalShards) {
             //endpoint end key is inclusive, so no need to append 0 or anything
-            return Lists.newArrayList(Pair.newPair(getByteArrayForShort(baseShard),
-                    getByteArrayForShort((short) (baseShard + shardNum - 1))));
+            return Lists.newArrayList(Pair.newPair(getByteArrayForShort(baseShard), getByteArrayForShort((short) (baseShard + shardNum - 1))));
         } else {
             //0,1,2,3,4 wants 4,0
-            return Lists.newArrayList(
-                    Pair.newPair(getByteArrayForShort(baseShard), getByteArrayForShort((short) (totalShards - 1))), //
-                    Pair.newPair(getByteArrayForShort((short) 0),
-                            getByteArrayForShort((short) (baseShard + shardNum - totalShards - 1))));
+            return Lists.newArrayList(Pair.newPair(getByteArrayForShort(baseShard), getByteArrayForShort((short) (totalShards - 1))), //
+                    Pair.newPair(getByteArrayForShort((short) 0), getByteArrayForShort((short) (baseShard + shardNum - totalShards - 1))));
         }
     }
 
@@ -141,18 +137,14 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
 
         final ExpectedSizeIterator epResultItr = new ExpectedSizeIterator(shardNum, coprocessorTimeout);
 
-        logger.info("Serialized scanRequestBytes {} bytes, rawScanBytesString {} bytes", scanRequestByteString.size(),
-                rawScanByteString.size());
+        logger.info("Serialized scanRequestBytes {} bytes, rawScanBytesString {} bytes", scanRequestByteString.size(), rawScanByteString.size());
 
-        logger.info(
-                "The scan {} for segment {} is as below with {} separate raw scans, shard part of start/end key is set to 0",
-                Integer.toHexString(System.identityHashCode(scanRequest)), cubeSeg, rawScans.size());
+        logger.info("The scan {} for segment {} is as below with {} separate raw scans, shard part of start/end key is set to 0", Integer.toHexString(System.identityHashCode(scanRequest)), cubeSeg, rawScans.size());
         for (RawScan rs : rawScans) {
             logScan(rs, cubeSeg.getStorageLocationIdentifier());
         }
 
-        logger.debug("Submitting rpc to {} shards starting from shard {}, scan range count {}", shardNum,
-                cuboidBaseShard, rawScans.size());
+        logger.debug("Submitting rpc to {} shards starting from shard {}, scan range count {}", shardNum, cuboidBaseShard, rawScans.size());
 
         // KylinConfig: use env instance instead of CubeSegment, because KylinConfig will share among queries
         // for different cubes until redeployment of coprocessor jar.
@@ -177,13 +169,11 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
                 @Override
                 public void run() {
 
-                    final String logHeader = String.format("<sub-thread for Query %s GTScanRequest %s>", queryId,
-                            Integer.toHexString(System.identityHashCode(scanRequest)));
+                    final String logHeader = String.format("<sub-thread for Query %s GTScanRequest %s>", queryId, Integer.toHexString(System.identityHashCode(scanRequest)));
                     final AtomicReference<RuntimeException> regionErrorHolder = new AtomicReference<>();
 
                     try {
-                        Table table = conn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()),
-                                HBaseConnection.getCoprocessorPool());
+                        Table table = conn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()), HBaseConnection.getCoprocessorPool());
 
                         final CubeVisitRequest request = builder.build();
                         final byte[] startKey = epRange.getFirst();
@@ -225,20 +215,15 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
                                             return;
                                         }
 
-                                        if (queryContext.getScannedBytes() > cubeSeg.getConfig()
-                                                .getQueryMaxScanBytes()) {
-                                            throw new ResourceLimitExceededException("Query scanned "
-                                                    + queryContext.getScannedBytes() + " bytes exceeds threshold "
-                                                    + cubeSeg.getConfig().getQueryMaxScanBytes());
+                                        if (queryContext.getScannedBytes() > cubeSeg.getConfig().getQueryMaxScanBytes()) {
+                                            throw new ResourceLimitExceededException("Query scanned " + queryContext.getScannedBytes() + " bytes exceeds threshold " + cubeSeg.getConfig().getQueryMaxScanBytes());
                                         }
 
                                         try {
                                             if (compressionResult) {
-                                                epResultItr.append(CompressionUtils.decompress(HBaseZeroCopyByteString
-                                                        .zeroCopyGetBytes(result.getCompressedRows())));
+                                                epResultItr.append(CompressionUtils.decompress(HBaseZeroCopyByteString.zeroCopyGetBytes(result.getCompressedRows())));
                                             } else {
-                                                epResultItr.append(HBaseZeroCopyByteString
-                                                        .zeroCopyGetBytes(result.getCompressedRows()));
+                                                epResultItr.append(HBaseZeroCopyByteString.zeroCopyGetBytes(result.getCompressedRows()));
                                             }
                                         } catch (IOException | DataFormatException e) {
                                             throw new RuntimeException(logHeader + "Error when decompressing", e);
@@ -293,8 +278,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
                     RawScan.serializer.serialize(rs, rawScanBuffer);
                 }
                 rawScanBuffer.flip();
-                rawScanByteString = HBaseZeroCopyByteString.wrap(rawScanBuffer.array(), rawScanBuffer.position(),
-                        rawScanBuffer.limit());
+                rawScanByteString = HBaseZeroCopyByteString.wrap(rawScanBuffer.array(), rawScanBuffer.position(), rawScanBuffer.limit());
                 break;
             } catch (BufferOverflowException boe) {
                 logger.info("Buffer size {} cannot hold the raw scans, resizing to 4 times", rawScanBufferSize);
@@ -309,16 +293,12 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
         Stats stats = result.getStats();
         byte[] compressedRows = HBaseZeroCopyByteString.zeroCopyGetBytes(result.getCompressedRows());
 
-        sb.append("Endpoint RPC returned from HTable ").append(cubeSeg.getStorageLocationIdentifier()).append(" Shard ")
-                .append(BytesUtil.toHex(region)).append(" on host: ").append(stats.getHostname()).append(".");
+        sb.append("Endpoint RPC returned from HTable ").append(cubeSeg.getStorageLocationIdentifier()).append(" Shard ").append(BytesUtil.toHex(region)).append(" on host: ").append(stats.getHostname()).append(".");
         sb.append("Total scanned row: ").append(stats.getScannedRowCount()).append(". ");
         sb.append("Total scanned bytes: ").append(stats.getScannedBytes()).append(". ");
         sb.append("Total filtered/aggred row: ").append(stats.getAggregatedRowCount()).append(". ");
-        sb.append("Time elapsed in EP: ").append(stats.getServiceEndTime() - stats.getServiceStartTime())
-                .append("(ms). ");
-        sb.append("Server CPU usage: ").append(stats.getSystemCpuLoad()).append(", server physical mem left: ")
-                .append(stats.getFreePhysicalMemorySize()).append(", server swap mem left:")
-                .append(stats.getFreeSwapSpaceSize()).append(".");
+        sb.append("Time elapsed in EP: ").append(stats.getServiceEndTime() - stats.getServiceStartTime()).append("(ms). ");
+        sb.append("Server CPU usage: ").append(stats.getSystemCpuLoad()).append(", server physical mem left: ").append(stats.getFreePhysicalMemorySize()).append(", server swap mem left:").append(stats.getFreeSwapSpaceSize()).append(".");
         sb.append("Etc message: ").append(stats.getEtcMsg()).append(".");
         sb.append("Normal Complete: ").append(stats.getNormalComplete() == 1).append(".");
         sb.append("Compressed row size: ").append(compressedRows.length);
@@ -328,8 +308,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
 
     private RuntimeException getCoprocessorException(CubeVisitResponse response) {
         if (!response.hasErrorInfo()) {
-            return new RuntimeException(
-                    "Coprocessor aborts due to scan timeout or other reasons, please re-deploy coprocessor to see concrete error message");
+            return new RuntimeException("Coprocessor aborts due to scan timeout or other reasons, please re-deploy coprocessor to see concrete error message");
         }
 
         CubeVisitResponse.ErrorInfo errorInfo = response.getErrorInfo();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
index 97d2373..db81646 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
@@ -72,7 +72,7 @@ public abstract class CubeHBaseRPC implements IGTStorage {
 
     public CubeHBaseRPC(ISegment segment, Cuboid cuboid, GTInfo fullGTInfo, StorageContext context) {
         Preconditions.checkArgument(segment instanceof CubeSegment, "segment must be CubeSegment");
-
+        
         this.cubeSeg = (CubeSegment) segment;
         this.cuboid = cuboid;
         this.fullGTInfo = fullGTInfo;
@@ -106,8 +106,7 @@ public abstract class CubeHBaseRPC implements IGTStorage {
         return scan;
     }
 
-    private RawScan preparedHBaseScan(GTRecord pkStart, GTRecord pkEnd, List<GTRecord> fuzzyKeys,
-            ImmutableBitSet selectedColBlocks) {
+    private RawScan preparedHBaseScan(GTRecord pkStart, GTRecord pkEnd, List<GTRecord> fuzzyKeys, ImmutableBitSet selectedColBlocks) {
         final List<Pair<byte[], byte[]>> selectedColumns = makeHBaseColumns(selectedColBlocks);
 
         LazyRowKeyEncoder encoder = new LazyRowKeyEncoder(cubeSeg, cuboid);
@@ -247,12 +246,10 @@ public abstract class CubeHBaseRPC implements IGTStorage {
         }
     }
 
-    private static List<org.apache.hadoop.hbase.util.Pair<byte[], byte[]>> convertToHBasePair(
-            List<org.apache.kylin.common.util.Pair<byte[], byte[]>> pairList) {
+    private static List<org.apache.hadoop.hbase.util.Pair<byte[], byte[]>> convertToHBasePair(List<org.apache.kylin.common.util.Pair<byte[], byte[]>> pairList) {
         List<org.apache.hadoop.hbase.util.Pair<byte[], byte[]>> result = Lists.newArrayList();
         for (org.apache.kylin.common.util.Pair<byte[], byte[]> pair : pairList) {
-            org.apache.hadoop.hbase.util.Pair<byte[], byte[]> element = new org.apache.hadoop.hbase.util.Pair<byte[], byte[]>(
-                    pair.getFirst(), pair.getSecond());
+            org.apache.hadoop.hbase.util.Pair<byte[], byte[]> element = new org.apache.hadoop.hbase.util.Pair<byte[], byte[]>(pair.getFirst(), pair.getSecond());
             result.add(element);
         }
 
@@ -295,24 +292,23 @@ public abstract class CubeHBaseRPC implements IGTStorage {
         } else {
             coopTimeout = cubeSeg.getConfig().getQueryCoprocessorTimeoutSeconds() * 1000;
         }
-
+        
         int rpcTimeout;
         Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
         rpcTimeout = hconf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
-
+        
         // HBase rpc timeout must be longer than coprocessor timeout
         if ((int) (coopTimeout * 1.1) > rpcTimeout) {
             rpcTimeout = (int) (coopTimeout * 1.1);
             hconf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, rpcTimeout);
         }
-
+        
         // coprocessor timeout is 0 by default
         if (coopTimeout <= 0) {
             coopTimeout = (int) (rpcTimeout * 0.9);
         }
-
-        logger.debug("{} = {} ms, use {} ms as timeout for coprocessor", HConstants.HBASE_RPC_TIMEOUT_KEY, rpcTimeout,
-                coopTimeout);
+        
+        logger.debug("{} = {} ms, use {} ms as timeout for coprocessor", HConstants.HBASE_RPC_TIMEOUT_KEY, rpcTimeout, coopTimeout);
         return coopTimeout;
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
index f258efb..951e2ef 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
@@ -106,8 +106,7 @@ public class CubeHBaseScanRPC extends CubeHBaseRPC {
         } else {
             List<byte[]> ret = Lists.newArrayList();
             for (short i = 0; i < cuboidShardNum; ++i) {
-                short shard = ShardingHash.normalize(cubeSeg.getCuboidBaseShard(cuboid.getId()), i,
-                        cubeSeg.getTotalShards(cuboid.getId()));
+                short shard = ShardingHash.normalize(cubeSeg.getCuboidBaseShard(cuboid.getId()), i, cubeSeg.getTotalShards(cuboid.getId()));
                 byte[] cookedKey = Arrays.copyOf(halfCookedKey, halfCookedKey.length);
                 BytesUtil.writeShort(shard, cookedKey, 0, RowConstants.ROWKEY_SHARDID_LEN);
                 ret.add(cookedKey);
@@ -194,13 +193,11 @@ public class CubeHBaseScanRPC extends CubeHBaseRPC {
             }
         };
 
-        IGTStore store = new HBaseReadonlyStore(cellListIterator, scanRequest, rawScans.get(0).hbaseColumns,
-                hbaseColumnsToGT, cubeSeg.getRowKeyPreambleSize(), false);
+        IGTStore store = new HBaseReadonlyStore(cellListIterator, scanRequest, rawScans.get(0).hbaseColumns, hbaseColumnsToGT, cubeSeg.getRowKeyPreambleSize(), false);
         IGTScanner rawScanner = store.scan(scanRequest);
 
         final IGTScanner decorateScanner = scanRequest.decorateScanner(rawScanner);
-        final TrimmedInfoGTRecordAdapter trimmedInfoGTRecordAdapter = new TrimmedInfoGTRecordAdapter(fullGTInfo,
-                decorateScanner.iterator());
+        final TrimmedInfoGTRecordAdapter trimmedInfoGTRecordAdapter = new TrimmedInfoGTRecordAdapter(fullGTInfo, decorateScanner.iterator());
 
         return new IGTScanner() {
             @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
index adc210e..59fe9e0 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
@@ -68,11 +68,8 @@ class ExpectedSizeIterator implements Iterator<byte[]> {
             }
 
             if (ret == null) {
-                throw new RuntimeException(
-                        "Timeout visiting cube! Check why coprocessor exception is not sent back? In coprocessor Self-termination is checked every "
-                                + //
-                                GTScanRequest.terminateCheckInterval + " scanned rows, the configured timeout("
-                                + coprocessorTimeout + ") cannot support this many scans?");
+                throw new RuntimeException("Timeout visiting cube! Check why coprocessor exception is not sent back? In coprocessor Self-termination is checked every " + //
+                        GTScanRequest.terminateCheckInterval + " scanned rows, the configured timeout(" + coprocessorTimeout + ") cannot support this many scans?");
             }
 
             return ret;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
index 2e1bac4..631e8e8 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
@@ -45,12 +45,11 @@ public class HBaseReadonlyStore implements IGTStore {
     private int rowkeyPreambleSize;
     private boolean withDelay = false;
 
+
     /**
      * @param withDelay is for test use
      */
-    public HBaseReadonlyStore(CellListIterator cellListIterator, GTScanRequest gtScanRequest,
-            List<Pair<byte[], byte[]>> hbaseColumns, List<List<Integer>> hbaseColumnsToGT, int rowkeyPreambleSize,
-            boolean withDelay) {
+    public HBaseReadonlyStore(CellListIterator cellListIterator, GTScanRequest gtScanRequest, List<Pair<byte[], byte[]>> hbaseColumns, List<List<Integer>> hbaseColumnsToGT, int rowkeyPreambleSize, boolean withDelay) {
         this.cellListIterator = cellListIterator;
         this.info = gtScanRequest.getInfo();
         this.hbaseColumns = hbaseColumns;
@@ -77,10 +76,8 @@ public class HBaseReadonlyStore implements IGTStore {
     //TODO: possible to use binary search as cells might be sorted?
     public static Cell findCell(List<Cell> cells, byte[] familyName, byte[] columnName) {
         for (Cell c : cells) {
-            if (BytesUtil.compareBytes(familyName, 0, c.getFamilyArray(), c.getFamilyOffset(), familyName.length) == 0
-                    && //
-                    BytesUtil.compareBytes(columnName, 0, c.getQualifierArray(), c.getQualifierOffset(),
-                            columnName.length) == 0) {
+            if (BytesUtil.compareBytes(familyName, 0, c.getFamilyArray(), c.getFamilyOffset(), familyName.length) == 0 && //
+                    BytesUtil.compareBytes(columnName, 0, c.getQualifierArray(), c.getQualifierOffset(), columnName.length) == 0) {
                 return c;
             }
         }
@@ -124,9 +121,7 @@ public class HBaseReadonlyStore implements IGTStore {
 
                         // dimensions, set to primary key, also the 0th column block
                         Cell firstCell = oneRow.get(0);
-                        ByteBuffer buf = byteBuffer(firstCell.getRowArray(),
-                                rowkeyPreambleSize + firstCell.getRowOffset(),
-                                firstCell.getRowLength() - rowkeyPreambleSize);
+                        ByteBuffer buf = byteBuffer(firstCell.getRowArray(), rowkeyPreambleSize + firstCell.getRowOffset(), firstCell.getRowLength() - rowkeyPreambleSize);
                         oneRecord.loadCellBlock(0, buf);
 
                         // metrics

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index bfe4f44..cde127e 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -149,8 +149,8 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
         private long rowCount;
         private long rowBytes;
 
-        ResourceTrackingCellListIterator(Iterator<List<Cell>> delegate, long rowCountLimit, long bytesLimit,
-                long timeout) {
+        ResourceTrackingCellListIterator(Iterator<List<Cell>> delegate,
+                                         long rowCountLimit, long bytesLimit, long timeout) {
             this.delegate = delegate;
             this.rowCountLimit = rowCountLimit;
             this.bytesLimit = bytesLimit;
@@ -164,8 +164,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
                 throw new ResourceLimitExceededException("scanned row count exceeds threshold " + rowCountLimit);
             }
             if (rowBytes > bytesLimit) {
-                throw new ResourceLimitExceededException(
-                        "scanned bytes " + rowBytes + " exceeds threshold " + bytesLimit);
+                throw new ResourceLimitExceededException("scanned bytes " + rowBytes + " exceeds threshold " + bytesLimit);
             }
             if ((rowCount % GTScanRequest.terminateCheckInterval == 1) && System.currentTimeMillis() > deadline) {
                 throw new KylinTimeoutException("coprocessor timeout after " + timeout + " ms");
@@ -196,8 +195,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
         if (shardLength == 0) {
             return;
         }
-        byte[] regionStartKey = ArrayUtils.isEmpty(region.getRegionInfo().getStartKey()) ? new byte[shardLength]
-                : region.getRegionInfo().getStartKey();
+        byte[] regionStartKey = ArrayUtils.isEmpty(region.getRegionInfo().getStartKey()) ? new byte[shardLength] : region.getRegionInfo().getStartKey();
         Bytes.putBytes(rawScan.startKey, 0, regionStartKey, 0, shardLength);
         Bytes.putBytes(rawScan.endKey, 0, regionStartKey, 0, shardLength);
     }
@@ -222,8 +220,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
 
     @SuppressWarnings("checkstyle:methodlength")
     @Override
-    public void visitCube(final RpcController controller, final CubeVisitProtos.CubeVisitRequest request,
-            RpcCallback<CubeVisitProtos.CubeVisitResponse> done) {
+    public void visitCube(final RpcController controller, final CubeVisitProtos.CubeVisitRequest request, RpcCallback<CubeVisitProtos.CubeVisitResponse> done) {
         List<RegionScanner> regionScanners = Lists.newArrayList();
         HRegion region = null;
 
@@ -237,7 +234,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
         try (SetThreadName ignored = new SetThreadName("Query %s", queryId)) {
             this.serviceStartTime = System.currentTimeMillis();
 
-            region = (HRegion) env.getRegion();
+            region = (HRegion)env.getRegion();
             region.startRegionOperation();
 
             // if user change kylin.properties on kylin server, need to manually redeploy coprocessor jar to update KylinConfig of Env.
@@ -247,15 +244,13 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
 
             debugGitTag = region.getTableDesc().getValue(IRealizationConstants.HTableGitTag);
 
-            final GTScanRequest scanReq = GTScanRequest.serializer
-                    .deserialize(ByteBuffer.wrap(HBaseZeroCopyByteString.zeroCopyGetBytes(request.getGtScanRequest())));
+            final GTScanRequest scanReq = GTScanRequest.serializer.deserialize(ByteBuffer.wrap(HBaseZeroCopyByteString.zeroCopyGetBytes(request.getGtScanRequest())));
             List<List<Integer>> hbaseColumnsToGT = Lists.newArrayList();
             for (IntList intList : request.getHbaseColumnsToGTList()) {
                 hbaseColumnsToGT.add(intList.getIntsList());
             }
             StorageSideBehavior behavior = StorageSideBehavior.valueOf(scanReq.getStorageBehavior());
-            final List<RawScan> hbaseRawScans = deserializeRawScans(
-                    ByteBuffer.wrap(HBaseZeroCopyByteString.zeroCopyGetBytes(request.getHbaseRawScan())));
+            final List<RawScan> hbaseRawScans = deserializeRawScans(ByteBuffer.wrap(HBaseZeroCopyByteString.zeroCopyGetBytes(request.getHbaseRawScan())));
 
             appendProfileInfo(sb, "start latency: " + (this.serviceStartTime - scanReq.getStartTime()));
 
@@ -264,8 +259,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
             for (RawScan hbaseRawScan : hbaseRawScans) {
                 if (request.getRowkeyPreambleSize() - RowConstants.ROWKEY_CUBOIDID_LEN > 0) {
                     //if has shard, fill region shard to raw scan start/end
-                    updateRawScanByCurrentRegion(hbaseRawScan, region,
-                            request.getRowkeyPreambleSize() - RowConstants.ROWKEY_CUBOIDID_LEN);
+                    updateRawScanByCurrentRegion(hbaseRawScan, region, request.getRowkeyPreambleSize() - RowConstants.ROWKEY_CUBOIDID_LEN);
                 }
 
                 Scan scan = CubeHBaseRPC.buildScan(hbaseRawScan);
@@ -296,17 +290,16 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
 
             final long storagePushDownLimit = scanReq.getStoragePushDownLimit();
 
-            ResourceTrackingCellListIterator cellListIterator = new ResourceTrackingCellListIterator(allCellLists,
+            ResourceTrackingCellListIterator cellListIterator = new ResourceTrackingCellListIterator(
+                    allCellLists,
                     scanReq.getStorageScanRowNumThreshold(), // for old client (scan threshold)
                     !request.hasMaxScanBytes() ? Long.MAX_VALUE : request.getMaxScanBytes(), // for new client
                     scanReq.getTimeout());
 
-            IGTStore store = new HBaseReadonlyStore(cellListIterator, scanReq, hbaseRawScans.get(0).hbaseColumns,
-                    hbaseColumnsToGT, request.getRowkeyPreambleSize(), behavior.delayToggledOn());
+            IGTStore store = new HBaseReadonlyStore(cellListIterator, scanReq, hbaseRawScans.get(0).hbaseColumns, hbaseColumnsToGT, request.getRowkeyPreambleSize(), behavior.delayToggledOn());
 
             IGTScanner rawScanner = store.scan(scanReq);
-            IGTScanner finalScanner = scanReq.decorateScanner(rawScanner, behavior.filterToggledOn(),
-                    behavior.aggrToggledOn(), false, request.getSpillEnabled());
+            IGTScanner finalScanner = scanReq.decorateScanner(rawScanner, behavior.filterToggledOn(), behavior.aggrToggledOn(), false, request.getSpillEnabled());
 
             ByteBuffer buffer = ByteBuffer.allocate(BufferedMeasureCodec.DEFAULT_BUFFER_SIZE);
 
@@ -337,20 +330,22 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
             } catch (KylinTimeoutException e) {
                 logger.info("Abort scan: {}", e.getMessage());
                 errorInfo = CubeVisitProtos.CubeVisitResponse.ErrorInfo.newBuilder()
-                        .setType(CubeVisitProtos.CubeVisitResponse.ErrorType.TIMEOUT).setMessage(e.getMessage())
+                        .setType(CubeVisitProtos.CubeVisitResponse.ErrorType.TIMEOUT)
+                        .setMessage(e.getMessage())
                         .build();
             } catch (ResourceLimitExceededException e) {
                 logger.info("Abort scan: {}", e.getMessage());
                 errorInfo = CubeVisitProtos.CubeVisitResponse.ErrorInfo.newBuilder()
                         .setType(CubeVisitProtos.CubeVisitResponse.ErrorType.RESOURCE_LIMIT_EXCEEDED)
-                        .setMessage(e.getMessage()).build();
+                        .setMessage(e.getMessage())
+                        .build();
             } finally {
                 finalScanner.close();
             }
 
             appendProfileInfo(sb, "agg done");
-            logger.info("Total scanned {} rows and {} bytes", cellListIterator.getTotalScannedRowCount(),
-                    cellListIterator.getTotalScannedRowBytes());
+            logger.info("Total scanned {} rows and {} bytes",
+                    cellListIterator.getTotalScannedRowCount(), cellListIterator.getTotalScannedRowBytes());
 
             //outputStream.close() is not necessary
             byte[] compressedAllRows;
@@ -368,8 +363,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
             appendProfileInfo(sb, "compress done");
             logger.info("Size of final result = {} ({} before compressing)", compressedAllRows.length, allRows.length);
 
-            OperatingSystemMXBean operatingSystemMXBean = (OperatingSystemMXBean) ManagementFactory
-                    .getOperatingSystemMXBean();
+            OperatingSystemMXBean operatingSystemMXBean = (OperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
             double systemCpuLoad = operatingSystemMXBean.getSystemCpuLoad();
             double freePhysicalMemorySize = operatingSystemMXBean.getFreePhysicalMemorySize();
             double freeSwapSpaceSize = operatingSystemMXBean.getFreeSwapSpaceSize();
@@ -383,15 +377,18 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
             }
             done.run(responseBuilder.//
                     setCompressedRows(HBaseZeroCopyByteString.wrap(compressedAllRows)).//too many array copies 
-                    setStats(CubeVisitProtos.CubeVisitResponse.Stats.newBuilder()
-                            .setAggregatedRowCount(cellListIterator.getTotalScannedRowCount() - finalRowCount)
-                            .setScannedRowCount(cellListIterator.getTotalScannedRowCount())
-                            .setScannedBytes(cellListIterator.getTotalScannedRowBytes())
-                            .setServiceStartTime(serviceStartTime).setServiceEndTime(System.currentTimeMillis())
-                            .setSystemCpuLoad(systemCpuLoad).setFreePhysicalMemorySize(freePhysicalMemorySize)
-                            .setFreeSwapSpaceSize(freeSwapSpaceSize)
-                            .setHostname(InetAddress.getLocalHost().getHostName()).setEtcMsg(sb.toString())
-                            .setNormalComplete(errorInfo == null ? 1 : 0).build())
+                    setStats(CubeVisitProtos.CubeVisitResponse.Stats.newBuilder().
+                            setAggregatedRowCount(cellListIterator.getTotalScannedRowCount() - finalRowCount).
+                            setScannedRowCount(cellListIterator.getTotalScannedRowCount()).
+                            setScannedBytes(cellListIterator.getTotalScannedRowBytes()).
+                            setServiceStartTime(serviceStartTime).
+                            setServiceEndTime(System.currentTimeMillis()).
+                            setSystemCpuLoad(systemCpuLoad).
+                            setFreePhysicalMemorySize(freePhysicalMemorySize).
+                            setFreeSwapSpaceSize(freeSwapSpaceSize).
+                            setHostname(InetAddress.getLocalHost().getHostName()).
+                            setEtcMsg(sb.toString()).
+                            setNormalComplete(errorInfo == null ? 1 : 0).build())
                     .build());
 
         } catch (IOException ioe) {


[37/67] [abbrv] kylin git commit: minor, refine load-hive-conf.sh

Posted by li...@apache.org.
minor, refine load-hive-conf.sh


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/9fafd27e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/9fafd27e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/9fafd27e

Branch: refs/heads/master
Commit: 9fafd27e7d2e7f1af9267921d9a6579528b17762
Parents: b6b0dc7
Author: Cheng Wang <ch...@kyligence.io>
Authored: Sat May 27 17:47:03 2017 +0800
Committer: Roger Shi <ro...@gmail.com>
Committed: Sat May 27 20:30:38 2017 +0800

----------------------------------------------------------------------
 build/bin/find-hive-dependency.sh |  5 +++--
 build/bin/load-hive-conf.sh       | 20 +++++++++++++-------
 2 files changed, 16 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/9fafd27e/build/bin/find-hive-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-hive-dependency.sh b/build/bin/find-hive-dependency.sh
index 6f28fd1..6d8fa65 100644
--- a/build/bin/find-hive-dependency.sh
+++ b/build/bin/find-hive-dependency.sh
@@ -18,6 +18,7 @@
 #
 
 source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/header.sh
+source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/load-hive-conf.sh
 
 echo Retrieving hive dependency...
 
@@ -27,9 +28,9 @@ hive_env=
 if [ "${client_mode}" == "beeline" ]
 then
     beeline_params=`bash ${KYLIN_HOME}/bin/get-properties.sh kylin.source.hive.beeline-params`
-    hive_env=`beeline ${beeline_params} --outputformat=dsv -e set 2>&1 | grep 'env:CLASSPATH' `
+    hive_env=`beeline ${hive_conf_properties} ${beeline_params} --outputformat=dsv -e set 2>&1 | grep 'env:CLASSPATH' `
 else
-    hive_env=`hive -e set 2>&1 | grep 'env:CLASSPATH'`
+    hive_env=`hive ${hive_conf_properties} -e set 2>&1 | grep 'env:CLASSPATH'`
 fi
 
 hive_classpath=`echo $hive_env | grep 'env:CLASSPATH' | awk -F '=' '{print $2}'`

http://git-wip-us.apache.org/repos/asf/kylin/blob/9fafd27e/build/bin/load-hive-conf.sh
----------------------------------------------------------------------
diff --git a/build/bin/load-hive-conf.sh b/build/bin/load-hive-conf.sh
index 0937719..62467fa 100644
--- a/build/bin/load-hive-conf.sh
+++ b/build/bin/load-hive-conf.sh
@@ -5,11 +5,17 @@
 source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/header.sh
 
 # source me
-mkdir -p ${KYLIN_HOME}/var
 hive_conf_dir="${KYLIN_HOME}/conf/kylin_hive_conf.xml"
-hive_conf_prop="${KYLIN_HOME}/var/hive_props"
-rm -rf ${hive_conf_prop}
-export ENABLE_CHECK_ENV=false
-${dir}/kylin.sh org.apache.kylin.tool.HiveConfigCLI ${hive_conf_dir} ${hive_conf_prop}
-[[ 0 == $? ]] || quit "Can not parse xml file: ${hive_conf_dir}, please check it."
-hive_conf_properties=`cat ${hive_conf_prop}`
+
+names=(`sed -n 's|<name>\(.*\)</name>|\1|p'  ${hive_conf_dir} | sed 's/ \+//g'`)
+values=(`sed -n 's|<value>\(.*\)</value>|\1|p'  ${hive_conf_dir} | sed 's/ \+//g'`)
+
+len_names=${#names[@]}
+len_values=${#values[@]}
+
+[[ $len_names == $len_values ]] || quit "Current hive conf file: ${hive_conf_dir} has inconsistent key value pairs."
+
+for((i=0;i<$len_names;i++))
+do
+    hive_conf_properties=${hive_conf_properties}" --hiveconf ${names[$i]}=${values[$i]} "
+done


[63/67] [abbrv] kylin git commit: KYLIN-2648 fix wrong FS in HDFSPathGarbageCollectionStep

Posted by li...@apache.org.
KYLIN-2648 fix wrong FS in HDFSPathGarbageCollectionStep


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/216ae033
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/216ae033
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/216ae033

Branch: refs/heads/master
Commit: 216ae0337bcb57ec75fc3f694ec8c13e137572a5
Parents: 21a1768
Author: Li Yang <li...@apache.org>
Authored: Fri Jun 2 19:19:24 2017 +0800
Committer: liyang-gmt8 <li...@apache.org>
Committed: Sun Jun 4 09:12:32 2017 +0800

----------------------------------------------------------------------
 .../hbase/steps/HDFSPathGarbageCollectionStep.java       | 11 ++++++-----
 1 file changed, 6 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/216ae033/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HDFSPathGarbageCollectionStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HDFSPathGarbageCollectionStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HDFSPathGarbageCollectionStep.java
index 4c0747e..86e8e6b 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HDFSPathGarbageCollectionStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HDFSPathGarbageCollectionStep.java
@@ -81,19 +81,20 @@ public class HDFSPathGarbageCollectionStep extends AbstractExecutable {
                 if (path.endsWith("*"))
                     path = path.substring(0, path.length() - 1);
 
-                Path oldPath = new Path(path);
+                Path oldPath = Path.getPathWithoutSchemeAndAuthority(new Path(path));
                 if (fileSystem.exists(oldPath)) {
                     fileSystem.delete(oldPath, true);
-                    logger.debug("HDFS path " + path + " is dropped.");
-                    output.append("HDFS path " + path + " is dropped.\n");
+                    logger.debug("HDFS path " + oldPath + " is dropped.");
+                    output.append("HDFS path " + oldPath + " is dropped.\n");
                 } else {
-                    logger.debug("HDFS path " + path + " not exists.");
-                    output.append("HDFS path " + path + " not exists.\n");
+                    logger.debug("HDFS path " + oldPath + " not exists.");
+                    output.append("HDFS path " + oldPath + " not exists.\n");
                 }
                 // If hbase was deployed on another cluster, the job dir is empty and should be dropped,
                 // because of rowkey_stats and hfile dirs are both dropped.
                 if (fileSystem.listStatus(oldPath.getParent()).length == 0) {
                     Path emptyJobPath = new Path(JobBuilderSupport.getJobWorkingDir(config, getJobId()));
+                    emptyJobPath = Path.getPathWithoutSchemeAndAuthority(emptyJobPath);
                     if (fileSystem.exists(emptyJobPath)) {
                         fileSystem.delete(emptyJobPath, true);
                         logger.debug("HDFS path " + emptyJobPath + " is empty and dropped.");


[54/67] [abbrv] kylin git commit: KYLIN-2575 translate computed column back to expression when sending to adhoc

Posted by li...@apache.org.
KYLIN-2575 translate computed column back to expression when sending to adhoc


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/7c381487
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/7c381487
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/7c381487

Branch: refs/heads/master
Commit: 7c381487069d2bcf3adb07b38dc0b0803d13a8b4
Parents: 508fc23
Author: Hongbin Ma <ma...@apache.org>
Authored: Tue May 30 19:14:46 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Tue May 30 23:47:23 2017 +0800

----------------------------------------------------------------------
 assembly/pom.xml                                |   6 -
 .../apache/kylin/metadata/MetadataManager.java  |  12 +-
 .../kylin/metadata/model/DataModelDesc.java     |   8 +-
 .../org/apache/kylin/query/KylinTestBase.java   |  33 ++--
 pom.xml                                         |   4 +
 server-base/pom.xml                             |  12 +-
 .../apache/kylin/rest/service/QueryService.java |   2 +-
 .../org/apache/kylin/rest/util/AdHocUtil.java   | 154 ++++++++++++++++++-
 .../apache/kylin/rest/util/AdHocUtilTest.java   |  98 ++++++++++++
 9 files changed, 296 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/7c381487/assembly/pom.xml
----------------------------------------------------------------------
diff --git a/assembly/pom.xml b/assembly/pom.xml
index dae7152..0a64dde 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -91,12 +91,6 @@
             <scope>test</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.mrunit</groupId>
-            <artifactId>mrunit</artifactId>
-            <classifier>hadoop2</classifier>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
             <groupId>org.apache.hbase</groupId>
             <artifactId>hbase-common</artifactId>
             <scope>provided</scope>

http://git-wip-us.apache.org/repos/asf/kylin/blob/7c381487/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
index f8e6832..2a894b9 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
@@ -116,13 +116,21 @@ public class MetadataManager {
     private CaseInsensitiveStringCache<ExternalFilterDesc> extFilterMap;
 
     public static class CCInfo {
-        public ComputedColumnDesc computedColumnDesc;
-        public Set<DataModelDesc> dataModelDescs;
+        private ComputedColumnDesc computedColumnDesc;
+        private Set<DataModelDesc> dataModelDescs;
 
         public CCInfo(ComputedColumnDesc computedColumnDesc, Set<DataModelDesc> dataModelDescs) {
             this.computedColumnDesc = computedColumnDesc;
             this.dataModelDescs = dataModelDescs;
         }
+
+        public ComputedColumnDesc getComputedColumnDesc() {
+            return computedColumnDesc;
+        }
+
+        public Set<DataModelDesc> getDataModelDescs() {
+            return dataModelDescs;
+        }
     }
 
     private Map<String, CCInfo> ccInfoMap = Maps.newHashMap();// this is to check any two models won't conflict computed columns

http://git-wip-us.apache.org/repos/asf/kylin/blob/7c381487/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
index e759bdf..f5092a8 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
@@ -476,13 +476,13 @@ public class DataModelDesc extends RootPersistentEntity {
             }
 
             CCInfo other = ccInfoMap.get(computedColumnDesc.getFullName());
-            if (other == null || (other.dataModelDescs.size() == 1 && other.dataModelDescs.contains(this))) {
+            if (other == null || (other.getDataModelDescs().size() == 1 && other.getDataModelDescs().contains(this))) {
                 ccInfoMap.put(computedColumnDesc.getFullName(), new CCInfo(computedColumnDesc, Sets.<DataModelDesc> newHashSet(this)));
-            } else if (other.computedColumnDesc.equals(computedColumnDesc)) {
-                other.dataModelDescs.add(this);
+            } else if (other.getComputedColumnDesc().equals(computedColumnDesc)) {
+                other.getDataModelDescs().add(this);
             } else {
                 throw new IllegalStateException(String.format("Computed column named %s is already defined in other models: %s. Please change another name, or try to keep consistent definition", //
-                        computedColumnDesc.getFullName(), other.dataModelDescs));
+                        computedColumnDesc.getFullName(), other.getDataModelDescs()));
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/7c381487/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index 42f3a44..0db5388 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -42,15 +42,14 @@ import java.util.Set;
 import java.util.TreeSet;
 import java.util.logging.LogManager;
 
-import com.google.common.collect.Lists;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.apache.kylin.query.relnode.OLAPContext;
 import org.apache.kylin.query.routing.rules.RemoveBlackoutRealizationsRule;
-import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.apache.kylin.rest.util.AdHocUtil;
 import org.dbunit.DatabaseUnitException;
 import org.dbunit.database.DatabaseConfig;
@@ -70,6 +69,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Lists;
 import com.google.common.io.Files;
 
 /**
@@ -110,7 +110,8 @@ public class KylinTestBase {
     // h2 (BIGINT)
     public static class TestH2DataTypeFactory extends H2DataTypeFactory {
         @Override
-        public DataType createDataType(int sqlType, String sqlTypeName, String tableName, String columnName) throws DataTypeException {
+        public DataType createDataType(int sqlType, String sqlTypeName, String tableName, String columnName)
+                throws DataTypeException {
 
             if ((columnName.startsWith("COL") || columnName.startsWith("col")) && sqlType == Types.BIGINT) {
                 return DataType.INTEGER;
@@ -223,7 +224,8 @@ public class KylinTestBase {
     // ////////////////////////////////////////////////////////////////////////////////////////
     // execute
 
-    protected ITable executeQuery(IDatabaseConnection dbConn, String queryName, String sql, boolean needSort) throws Exception {
+    protected ITable executeQuery(IDatabaseConnection dbConn, String queryName, String sql, boolean needSort)
+            throws Exception {
 
         // change join type to match current setting
         sql = changeJoinType(sql, joinType);
@@ -258,9 +260,9 @@ public class KylinTestBase {
 
             return output(resultSet, needDisplay);
         } catch (SQLException sqlException) {
-            List<List<String>> results =  Lists.newArrayList();
+            List<List<String>> results = Lists.newArrayList();
             List<SelectedColumnMeta> columnMetas = Lists.newArrayList();
-            AdHocUtil.doAdHocQuery(sql, results, columnMetas, sqlException);
+            AdHocUtil.doAdHocQuery(ProjectInstance.DEFAULT_PROJECT_NAME, sql, results, columnMetas, sqlException);
             return results.size();
         } finally {
             if (resultSet != null) {
@@ -280,7 +282,8 @@ public class KylinTestBase {
         }
     }
 
-    protected ITable executeDynamicQuery(IDatabaseConnection dbConn, String queryName, String sql, List<String> parameters, boolean needSort) throws Exception {
+    protected ITable executeDynamicQuery(IDatabaseConnection dbConn, String queryName, String sql,
+            List<String> parameters, boolean needSort) throws Exception {
 
         // change join type to match current setting
         sql = changeJoinType(sql, joinType);
@@ -316,7 +319,8 @@ public class KylinTestBase {
 
         String[] tokens = StringUtils.split(sql, null);// split white spaces
         for (int i = 0; i < tokens.length - 1; ++i) {
-            if ((tokens[i].equalsIgnoreCase("inner") || tokens[i].equalsIgnoreCase("left")) && tokens[i + 1].equalsIgnoreCase("join")) {
+            if ((tokens[i].equalsIgnoreCase("inner") || tokens[i].equalsIgnoreCase("left"))
+                    && tokens[i + 1].equalsIgnoreCase("join")) {
                 tokens[i] = targetType.toLowerCase();
             }
         }
@@ -407,7 +411,8 @@ public class KylinTestBase {
         }
     }
 
-    protected void execAndCompResultSize(String queryFolder, String[] exclusiveQuerys, boolean needSort) throws Exception {
+    protected void execAndCompResultSize(String queryFolder, String[] exclusiveQuerys, boolean needSort)
+            throws Exception {
         logger.info("---------- test folder: " + queryFolder);
         Set<String> exclusiveSet = buildExclusiveSet(exclusiveQuerys);
 
@@ -504,7 +509,6 @@ public class KylinTestBase {
         logger.info("Queries appended with limit: " + appendLimitQueries);
     }
 
-
     protected void execAndCompQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort) throws Exception {
         execAndCompQuery(queryFolder, exclusiveQuerys, needSort, new ICompareQueryTranslator() {
             @Override
@@ -518,7 +522,8 @@ public class KylinTestBase {
         });
     }
 
-    protected void execAndCompQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort, ICompareQueryTranslator translator) throws Exception {
+    protected void execAndCompQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort,
+            ICompareQueryTranslator translator) throws Exception {
         logger.info("---------- test folder: " + new File(queryFolder).getAbsolutePath());
         Set<String> exclusiveSet = buildExclusiveSet(exclusiveQuerys);
 
@@ -557,7 +562,8 @@ public class KylinTestBase {
         }
     }
 
-    protected void execAndCompDynamicQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort) throws Exception {
+    protected void execAndCompDynamicQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort)
+            throws Exception {
         logger.info("---------- test folder: " + queryFolder);
         Set<String> exclusiveSet = buildExclusiveSet(exclusiveQuerys);
 
@@ -683,7 +689,8 @@ public class KylinTestBase {
         cubeConnection = QueryDataSource.create(ProjectInstance.DEFAULT_PROJECT_NAME, config).getConnection();
 
         //setup h2
-        h2Connection = DriverManager.getConnection("jdbc:h2:mem:db" + (h2InstanceCount++) + ";CACHE_SIZE=32072", "sa", "");
+        h2Connection = DriverManager.getConnection("jdbc:h2:mem:db" + (h2InstanceCount++) + ";CACHE_SIZE=32072", "sa",
+                "");
         // Load H2 Tables (inner join)
         H2Database h2DB = new H2Database(h2Connection, config);
         h2DB.loadAllTables();

http://git-wip-us.apache.org/repos/asf/kylin/blob/7c381487/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 2fcc6fa..f887c8d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -93,6 +93,8 @@
         <h2.version>1.4.192</h2.version>
         <jetty.version>9.2.20.v20161216</jetty.version>
         <jamm.version>0.3.1</jamm.version>
+        <mockito.version>2.7.14</mockito.version>
+
 
         <!-- Commons -->
         <commons-lang3.version>3.4</commons-lang3.version>
@@ -730,6 +732,8 @@
                 <artifactId>opensaml</artifactId>
                 <version>${opensaml.version}</version>
             </dependency>
+
+
             <!-- Spring Core -->
             <dependency>
                 <groupId>org.springframework</groupId>

http://git-wip-us.apache.org/repos/asf/kylin/blob/7c381487/server-base/pom.xml
----------------------------------------------------------------------
diff --git a/server-base/pom.xml b/server-base/pom.xml
index b165b99..c7247a5 100644
--- a/server-base/pom.xml
+++ b/server-base/pom.xml
@@ -17,7 +17,8 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
@@ -47,7 +48,7 @@
                 </exclusion>
             </exclusions>
         </dependency>
-        
+
         <!-- these plug-in modules, should not have API dependencies -->
         <dependency>
             <groupId>org.apache.kylin</groupId>
@@ -134,6 +135,13 @@
             <type>test-jar</type>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-core</artifactId>
+            <scope>test</scope>
+            <!--MRUnit relies on older version of mockito, so cannot manage it globally-->
+            <version>${mockito.version}</version>
+        </dependency>
 
         <dependency>
             <groupId>org.apache.tomcat</groupId>

http://git-wip-us.apache.org/repos/asf/kylin/blob/7c381487/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 5130e55..06f9d80 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -729,7 +729,7 @@ public class QueryService extends BasicService {
                 results.add(oneRow);
             }
         } catch (SQLException sqlException) {
-            isAdHoc = AdHocUtil.doAdHocQuery(correctedSql, results, columnMetas, sqlException);
+            isAdHoc = AdHocUtil.doAdHocQuery(sqlRequest.getProject(), correctedSql, results, columnMetas, sqlException);
         } finally {
             close(resultSet, stat, conn);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/7c381487/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java b/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
index 678e58e..8221790 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
@@ -18,11 +18,25 @@
 
 package org.apache.kylin.rest.util;
 
+import static org.apache.kylin.metadata.MetadataManager.CCInfo;
+
 import java.sql.SQLException;
+import java.util.Collections;
 import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.annotation.Nullable;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.apache.commons.lang3.tuple.Triple;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.metadata.MetadataManager;
+import org.apache.kylin.metadata.model.DataModelDesc;
+import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.metadata.project.ProjectManager;
 import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.apache.kylin.query.routing.NoRealizationFoundException;
 import org.apache.kylin.rest.exception.InternalErrorException;
@@ -31,11 +45,19 @@ import org.apache.kylin.storage.adhoc.IAdhocConverter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+import com.google.common.base.Predicate;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
 public class AdHocUtil {
     private static final Logger logger = LoggerFactory.getLogger(AdHocUtil.class);
 
-    public static boolean doAdHocQuery(String sql, List<List<String>> results, List<SelectedColumnMeta> columnMetas, SQLException sqlException) throws Exception {
-        boolean isExpectedCause = (ExceptionUtils.getRootCause(sqlException).getClass().equals(NoRealizationFoundException.class));
+    public static boolean doAdHocQuery(String project, String sql, List<List<String>> results,
+            List<SelectedColumnMeta> columnMetas, SQLException sqlException) throws Exception {
+
+        boolean isExpectedCause = (ExceptionUtils.getRootCause(sqlException).getClass()
+                .equals(NoRealizationFoundException.class));
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         Boolean isAdHoc = false;
 
@@ -62,9 +84,10 @@ public class AdHocUtil {
             runner.init();
 
             try {
-                String adhocSql = converter.convert(sql);
-                if (!sql.equals(adhocSql)) {
-                    logger.info("the original query is converted to {} before delegating to adhoc", adhocSql);
+                String expandCC = restoreComputedColumnToExpr(sql, project);
+                String adhocSql = converter.convert(expandCC);
+                if (!adhocSql.equals(adhocSql)) {
+                    logger.info("before delegating to adhoc, the query is converted to {} ", adhocSql);
                 }
 
                 runner.executeQuery(adhocSql, results, columnMetas);
@@ -78,4 +101,125 @@ public class AdHocUtil {
 
         return isAdHoc;
     }
+
+    private final static Pattern identifierInSqlPattern = Pattern.compile(
+            //find pattern like "table"."column" or "column"
+            "((?<![\\p{L}_0-9\\.\\\"])(\\\"[\\p{L}_0-9]+\\\"\\.)?(\\\"[\\p{L}_0-9]+\\\")(?![\\p{L}_0-9\\.\\\"]))" + "|"
+            //find pattern like table.column or column
+                    + "((?<![\\p{L}_0-9\\.\\\"])([\\p{L}_0-9]+\\.)?([\\p{L}_0-9]+)(?![\\p{L}_0-9\\.\\\"]))");
+
+    private final static Pattern identifierInExprPattern = Pattern.compile(
+            // a.b.c
+            "((?<![\\p{L}_0-9\\.\\\"])([\\p{L}_0-9]+\\.)([\\p{L}_0-9]+\\.)([\\p{L}_0-9]+)(?![\\p{L}_0-9\\.\\\"]))");
+
+    private final static Pattern endWithAsPattern = Pattern.compile("\\s+as\\s+$", Pattern.CASE_INSENSITIVE);
+
+    public static String restoreComputedColumnToExpr(String beforeSql, String project) {
+        MetadataManager metadataManager = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv());
+        Map<String, CCInfo> ccInfoMap = metadataManager.getCcInfoMap();
+        final ProjectInstance projectInstance = ProjectManager.getInstance(KylinConfig.getInstanceFromEnv())
+                .getProject(project);
+
+        Iterable<CCInfo> projectCCInfo = Iterables.filter(ccInfoMap.values(), new Predicate<CCInfo>() {
+            @Override
+            public boolean apply(@Nullable CCInfo ccInfo) {
+                return Iterables.any(ccInfo.getDataModelDescs(), new Predicate<DataModelDesc>() {
+                    @Override
+                    public boolean apply(@Nullable DataModelDesc model) {
+                        return projectInstance.containsModel(model.getName());
+                    }
+                });
+            }
+        });
+
+        String afterSql = beforeSql;
+        for (CCInfo ccInfo : projectCCInfo) {
+            afterSql = restoreComputedColumnToExpr(afterSql, ccInfo);
+        }
+
+        if (!StringUtils.equals(beforeSql, afterSql)) {
+            logger.info("computed column in sql is expanded before sending to adhoc engine: " + afterSql);
+        }
+        return afterSql;
+    }
+
+    static String restoreComputedColumnToExpr(String sql, CCInfo ccInfo) {
+
+        String ccName = ccInfo.getComputedColumnDesc().getColumnName();
+        List<Triple<Integer, Integer, String>> replacements = Lists.newArrayList();
+        Matcher matcher = identifierInSqlPattern.matcher(sql);
+
+        while (matcher.find()) {
+            if (matcher.group(1) != null) { //with quote case: "TABLE"."COLUMN"
+
+                String quotedColumnName = matcher.group(3);
+                Preconditions.checkNotNull(quotedColumnName);
+                String columnName = StringUtils.strip(quotedColumnName, "\"");
+                if (!columnName.equalsIgnoreCase(ccName)) {
+                    continue;
+                }
+
+                if (matcher.group(2) != null) { // table name exist 
+                    String quotedTableAlias = StringUtils.strip(matcher.group(2), ".");
+                    String tableAlias = StringUtils.strip(quotedTableAlias, "\"");
+                    replacements.add(Triple.of(matcher.start(1), matcher.end(1),
+                            replaceIdentifierInExpr(ccInfo.getComputedColumnDesc().getExpression(), tableAlias, true)));
+                } else { //only column
+                    if (endWithAsPattern.matcher(sql.substring(0, matcher.start(1))).find()) {
+                        //select DEAL_AMOUNT as "deal_amount" case
+                        continue;
+                    }
+                    replacements.add(Triple.of(matcher.start(1), matcher.end(1),
+                            replaceIdentifierInExpr(ccInfo.getComputedColumnDesc().getExpression(), null, true)));
+                }
+            } else if (matcher.group(4) != null) { //without quote case: table.column or simply column
+                String columnName = matcher.group(6);
+                Preconditions.checkNotNull(columnName);
+                if (!columnName.equalsIgnoreCase(ccName)) {
+                    continue;
+                }
+
+                if (matcher.group(5) != null) { //table name exist
+                    String tableAlias = StringUtils.strip(matcher.group(5), ".");
+                    replacements.add(Triple.of(matcher.start(4), matcher.end(4), replaceIdentifierInExpr(
+                            ccInfo.getComputedColumnDesc().getExpression(), tableAlias, false)));
+
+                } else { //only column 
+                    if (endWithAsPattern.matcher(sql.substring(0, matcher.start(4))).find()) {
+                        //select DEAL_AMOUNT as deal_amount case
+                        continue;
+                    }
+                    replacements.add(Triple.of(matcher.start(4), matcher.end(4),
+                            replaceIdentifierInExpr(ccInfo.getComputedColumnDesc().getExpression(), null, false)));
+                }
+            }
+        }
+
+        Collections.reverse(replacements);
+        for (Triple<Integer, Integer, String> triple : replacements) {
+            sql = sql.substring(0, triple.getLeft()) + "(" + triple.getRight() + ")"
+                    + sql.substring(triple.getMiddle());
+        }
+        return sql;
+    }
+
+    // identifier in expr must be DB.TABLE.COLUMN, all TABLE in expr should be guaranteed to be same
+    static String replaceIdentifierInExpr(String expr, String tableAlias, boolean quoted) {
+        List<Triple<Integer, Integer, String>> replacements = Lists.newArrayList();
+        Matcher matcher = identifierInExprPattern.matcher(expr);
+        while (matcher.find()) {
+
+            String t = tableAlias == null ? StringUtils.strip(matcher.group(3), ".") : tableAlias;
+            String c = matcher.group(4);
+
+            String replacement = quoted ? "\"" + t.toUpperCase() + "\".\"" + c.toUpperCase() + "\"" : t + "." + c;
+            replacements.add(Triple.of(matcher.start(1), matcher.end(1), replacement));
+        }
+
+        Collections.reverse(replacements);
+        for (Triple<Integer, Integer, String> triple : replacements) {
+            expr = expr.substring(0, triple.getLeft()) + triple.getRight() + expr.substring(triple.getMiddle());
+        }
+        return expr;
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/7c381487/server-base/src/test/java/org/apache/kylin/rest/util/AdHocUtilTest.java
----------------------------------------------------------------------
diff --git a/server-base/src/test/java/org/apache/kylin/rest/util/AdHocUtilTest.java b/server-base/src/test/java/org/apache/kylin/rest/util/AdHocUtilTest.java
new file mode 100644
index 0000000..b93e2d3
--- /dev/null
+++ b/server-base/src/test/java/org/apache/kylin/rest/util/AdHocUtilTest.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.rest.util;
+
+import static org.apache.kylin.metadata.MetadataManager.CCInfo;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import org.apache.kylin.metadata.model.ComputedColumnDesc;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class AdHocUtilTest {
+
+    @Test
+    public void testReplaceIdentifierInExpr() {
+        {
+            String ret = AdHocUtil.replaceIdentifierInExpr("a.b.x * a.b.y", null, false);
+            Assert.assertEquals("b.x * b.y", ret);
+        }
+        {
+            String ret = AdHocUtil.replaceIdentifierInExpr("a_1.b_2.x_3 * a_1.b_2.y_3", null, false);
+            Assert.assertEquals("b_2.x_3 * b_2.y_3", ret);
+        }
+        {
+            String ret = AdHocUtil.replaceIdentifierInExpr("a.b.x * a.b.y", "c", false);
+            Assert.assertEquals("c.x * c.y", ret);
+        }
+        {
+            String ret = AdHocUtil.replaceIdentifierInExpr("a.b.x * a.b.y", "c", true);
+            Assert.assertEquals("\"C\".\"X\" * \"C\".\"Y\"", ret);
+        }
+        {
+            String ret = AdHocUtil.replaceIdentifierInExpr("substr(a.b.x,1,3)>a.b.y", "c", true);
+            Assert.assertEquals("substr(\"C\".\"X\",1,3)>\"C\".\"Y\"", ret);
+        }
+        {
+            String ret = AdHocUtil.replaceIdentifierInExpr("strcmp(substr(a.b.x,1,3),a.b.y) > 0", "c", true);
+            Assert.assertEquals("strcmp(substr(\"C\".\"X\",1,3),\"C\".\"Y\") > 0", ret);
+        }
+        {
+            String ret = AdHocUtil.replaceIdentifierInExpr("strcmp(substr(a.b.x,1,3),a.b.y) > 0", null, true);
+            Assert.assertEquals("strcmp(substr(\"B\".\"X\",1,3),\"B\".\"Y\") > 0", ret);
+        }
+        {
+            String ret = AdHocUtil.replaceIdentifierInExpr("strcmp(substr(a.b.x, 1, 3),a.b.y) > 0", null, false);
+            Assert.assertEquals("strcmp(substr(b.x, 1, 3),b.y) > 0", ret);
+        }
+    }
+
+    @Test
+    public void testRestoreComputedColumnToExpr() {
+
+        ComputedColumnDesc computedColumnDesc = mock(ComputedColumnDesc.class);
+        when(computedColumnDesc.getColumnName()).thenReturn("DEAL_AMOUNT");
+        when(computedColumnDesc.getExpression()).thenReturn("DB.TABLE.price * DB.TABLE.number");
+
+        CCInfo ccInfo = mock(CCInfo.class);
+        when(ccInfo.getComputedColumnDesc()).thenReturn(computedColumnDesc);
+
+        {
+            String ret = AdHocUtil.restoreComputedColumnToExpr(
+                    "select DEAL_AMOUNT from DB.TABLE group by date order by DEAL_AMOUNT", ccInfo);
+            Assert.assertEquals(
+                    "select (TABLE.price * TABLE.number) from DB.TABLE group by date order by (TABLE.price * TABLE.number)",
+                    ret);
+        }
+        {
+            String ret = AdHocUtil.restoreComputedColumnToExpr(
+                    "select DEAL_AMOUNT as DEAL_AMOUNT from DB.TABLE group by date order by DEAL_AMOUNT", ccInfo);
+            Assert.assertEquals(
+                    "select (TABLE.price * TABLE.number) as DEAL_AMOUNT from DB.TABLE group by date order by (TABLE.price * TABLE.number)",
+                    ret);
+        }
+        {
+            String ret = AdHocUtil.restoreComputedColumnToExpr(
+                    "select \"DEAL_AMOUNT\" AS deal_amount from DB.TABLE group by date order by DEAL_AMOUNT", ccInfo);
+            Assert.assertEquals(
+                    "select (\"TABLE\".\"PRICE\" * \"TABLE\".\"NUMBER\") AS deal_amount from DB.TABLE group by date order by (TABLE.price * TABLE.number)",
+                    ret);
+        }
+    }
+}


[53/67] [abbrv] kylin git commit: minor, fix sample cube metadata typo

Posted by li...@apache.org.
minor, fix sample cube metadata typo


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/508fc232
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/508fc232
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/508fc232

Branch: refs/heads/master
Commit: 508fc232183bf769d4aa81f19746faeb77aae761
Parents: 2cde336
Author: Roger Shi <ro...@hotmail.com>
Authored: Tue May 30 23:22:24 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Tue May 30 23:24:16 2017 +0800

----------------------------------------------------------------------
 examples/sample_cube/template/cube_desc/kylin_sales_cube.json | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/508fc232/examples/sample_cube/template/cube_desc/kylin_sales_cube.json
----------------------------------------------------------------------
diff --git a/examples/sample_cube/template/cube_desc/kylin_sales_cube.json b/examples/sample_cube/template/cube_desc/kylin_sales_cube.json
index 1ad73bb..fd18637 100644
--- a/examples/sample_cube/template/cube_desc/kylin_sales_cube.json
+++ b/examples/sample_cube/template/cube_desc/kylin_sales_cube.json
@@ -261,7 +261,7 @@
   "auto_merge_time_ranges" : null,
   "retention_range" : 0,
   "engine_type" : %default_engine_type%,
-  "storage_type" : %default_engine_type%,
+  "storage_type" : %default_storage_type%,
   "override_kylin_properties" : {
     "kylin.cube.aggrgroup.is-mandatory-only-valid" : "true"
   }


[64/67] [abbrv] kylin git commit: KYLIN-2515 code review

Posted by li...@apache.org.
KYLIN-2515 code review


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/f6cdd629
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/f6cdd629
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/f6cdd629

Branch: refs/heads/master
Commit: f6cdd629817d18b60d6885753eb9681cfa4b57f2
Parents: 216ae03
Author: Yang Li <li...@apache.org>
Authored: Sun Jun 4 16:31:31 2017 +0800
Committer: nichunen <zj...@sjtu.org>
Committed: Sun Jun 4 18:01:03 2017 +0800

----------------------------------------------------------------------
 build/conf/kylin.properties                     |   8 +-
 .../apache/kylin/common/KylinConfigBase.java    |  10 +-
 .../metadata/querymeta/ColumnMetaWithType.java  |   1 +
 .../metadata/querymeta/SelectedColumnMeta.java  |   1 +
 .../metadata/querymeta/TableMetaWithType.java   |   1 +
 .../source/adhocquery/HiveAdhocConverter.java   | 181 +++++++++++++++++++
 .../source/adhocquery/IAdHocConverter.java      |  25 +++
 .../kylin/source/adhocquery/IAdHocRunner.java   |  39 ++++
 .../adhocquery/HiveAdhocConverterTest.java      |  63 +++++++
 .../kylin/storage/adhoc/AdHocRunnerBase.java    |  48 -----
 .../kylin/storage/adhoc/HiveAdhocConverter.java | 180 ------------------
 .../kylin/storage/adhoc/IAdhocConverter.java    |  25 ---
 .../storage/adhoc/HiveAdhocConverterTest.java   |  62 -------
 .../test_case_data/sandbox/kylin.properties     |   8 +-
 .../kylin/rest/adhoc/AdHocRunnerJdbcImpl.java   |  32 ++--
 .../kylin/rest/adhoc/JdbcConnectionFactory.java |   5 +-
 .../kylin/rest/controller/QueryController.java  |   5 +-
 .../org/apache/kylin/rest/util/AdHocUtil.java   |  53 ++----
 18 files changed, 358 insertions(+), 389 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index 8949a42..44a38d2 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -237,13 +237,13 @@ kylin.engine.spark-conf.spark.history.fs.logDirectory=hdfs\:///kylin/spark-histo
 #kylin.engine.spark-conf.spark.executor.extraJavaOptions=-Dhdp.version=current
 
 ### AD-HOC QUERY ###
-#kylin.query.ad-hoc.runner.class-name=org.apache.kylin.rest.adhoc.AdHocRunnerJdbcImpl
+#kylin.query.ad-hoc.runner-class-name=org.apache.kylin.rest.adhoc.AdHocRunnerJdbcImpl
 
 #kylin.query.ad-hoc.jdbc.url=jdbc:hive2://sandbox:10000/default
 #kylin.query.ad-hoc.jdbc.driver=org.apache.hive.jdbc.HiveDriver
 #kylin.query.ad-hoc.jdbc.username=hive
 #kylin.query.ad-hoc.jdbc.password=
 
-#kylin.query.ad-hoc.pool.max-total=8
-#kylin.query.ad-hoc.pool.max-idle=8
-#kylin.query.ad-hoc.pool.min-idle=0
+#kylin.query.ad-hoc.jdbc.pool-max-total=8
+#kylin.query.ad-hoc.jdbc.pool-max-idle=8
+#kylin.query.ad-hoc.jdbc.pool-min-idle=0

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index f465949..c83c546 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -986,11 +986,11 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public String getAdHocRunnerClassName() {
-        return getOptional("kylin.query.ad-hoc.runner.class-name", "");
+        return getOptional("kylin.query.ad-hoc.runner-class-name", "");
     }
 
     public String getAdHocConverterClassName() {
-        return getOptional("kylin.query.ad-hoc.converter.class-name", "org.apache.kylin.storage.adhoc.HiveAdhocConverter");
+        return getOptional("kylin.query.ad-hoc.converter-class-name", "org.apache.kylin.source.adhocquery.HiveAdhocConverter");
     }
 
     public String getJdbcUrl() {
@@ -1010,15 +1010,15 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public int getPoolMaxTotal() {
-        return Integer.parseInt(this.getOptional("kylin.query.ad-hoc.pool.max-total", "8"));
+        return Integer.parseInt(this.getOptional("kylin.query.ad-hoc.jdbc.pool-max-total", "8"));
     }
 
     public int getPoolMaxIdle() {
-        return Integer.parseInt(this.getOptional("kylin.query.ad-hoc.pool.max-idle", "8"));
+        return Integer.parseInt(this.getOptional("kylin.query.ad-hoc.jdbc.pool-max-idle", "8"));
     }
 
     public int getPoolMinIdle() {
-        return Integer.parseInt(this.getOptional("kylin.query.ad-hoc.pool.min-idle", "0"));
+        return Integer.parseInt(this.getOptional("kylin.query.ad-hoc.jdbc.pool-min-idle", "0"));
     }
 
     // ============================================================================

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMetaWithType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMetaWithType.java b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMetaWithType.java
index e3cb86b..101ebd5 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMetaWithType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMetaWithType.java
@@ -24,6 +24,7 @@ import java.util.HashSet;
 /**
  * Created by luwei on 17-4-26.
  */
+@SuppressWarnings("serial")
 public class ColumnMetaWithType extends ColumnMeta {
     public static enum columnTypeEnum implements Serializable {
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/SelectedColumnMeta.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/SelectedColumnMeta.java b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/SelectedColumnMeta.java
index 9ba0da2..1cbe8ab 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/SelectedColumnMeta.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/SelectedColumnMeta.java
@@ -22,6 +22,7 @@ import java.io.Serializable;
 
 /**
  */
+@SuppressWarnings("serial")
 public class SelectedColumnMeta implements Serializable {
     public SelectedColumnMeta(boolean isAutoIncrement, boolean isCaseSensitive, boolean isSearchable, boolean isCurrency, int isNullalbe, boolean isSigned, int displaySize, String label, String name, String schemaName, String catelogName, String tableName, int precision, int scale, int columnType, String columnTypeName, boolean isReadOnly, boolean isWritable, boolean isDefinitelyWritable) {
         super();

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMetaWithType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMetaWithType.java b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMetaWithType.java
index 2ff21e4..e16aba6 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMetaWithType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMetaWithType.java
@@ -24,6 +24,7 @@ import java.util.HashSet;
 /**
  * Created by luwei on 17-4-26.
  */
+@SuppressWarnings("serial")
 public class TableMetaWithType extends TableMeta {
     public static enum tableTypeEnum implements Serializable {
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HiveAdhocConverter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HiveAdhocConverter.java b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HiveAdhocConverter.java
new file mode 100644
index 0000000..97d77bf
--- /dev/null
+++ b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HiveAdhocConverter.java
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.source.adhocquery;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Stack;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+//TODO: Some workaround ways to make sql readable by hive parser, should replaced it with a more well-designed way
+public class HiveAdhocConverter implements IAdHocConverter {
+
+    @SuppressWarnings("unused")
+    private static final Logger logger = LoggerFactory.getLogger(HiveAdhocConverter.class);
+
+    private static final Pattern EXTRACT_PATTERN = Pattern.compile("\\s+extract\\s*(\\()\\s*(.*?)\\s*from(\\s+)", Pattern.CASE_INSENSITIVE);
+    private static final Pattern FROM_PATTERN = Pattern.compile("\\s+from\\s+(\\()\\s*select\\s", Pattern.CASE_INSENSITIVE);
+    private static final Pattern CAST_PATTERN = Pattern.compile("CAST\\((.*?) (?i)AS\\s*(.*?)\\s*\\)", Pattern.CASE_INSENSITIVE);
+    private static final Pattern CONCAT_PATTERN = Pattern.compile("(['_a-z0-9A-Z]+)\\|\\|(['_a-z0-9A-Z]+)", Pattern.CASE_INSENSITIVE);
+
+    public static String replaceString(String originString, String fromString, String toString) {
+        return originString.replace(fromString, toString);
+    }
+
+    public static String extractReplace(String originString) {
+        Matcher extractMatcher = EXTRACT_PATTERN.matcher(originString);
+        String replacedString = originString;
+        Map<Integer, Integer> parenthesesPairs = null;
+
+        while (extractMatcher.find()) {
+            if (parenthesesPairs == null) {
+                parenthesesPairs = findParenthesesPairs(originString);
+            }
+
+            String functionStr = extractMatcher.group(2);
+            int startIdx = extractMatcher.end(3);
+            int endIdx = parenthesesPairs.get(extractMatcher.start(1));
+            String extractInner = originString.substring(startIdx, endIdx);
+            int originStart = extractMatcher.start(0) + 1;
+            int originEnd = endIdx + 1;
+
+            replacedString = replaceString(replacedString, originString.substring(originStart, originEnd), functionStr + "(" + extractInner + ")");
+        }
+
+        return replacedString;
+    }
+
+    public static String castRepalce(String originString) {
+        Matcher castMatcher = CAST_PATTERN.matcher(originString);
+        String replacedString = originString;
+
+        while (castMatcher.find()) {
+            String castStr = castMatcher.group();
+            String type = castMatcher.group(2);
+            String supportedType = "";
+            switch (type.toUpperCase()) {
+            case "INTEGER":
+                supportedType = "int";
+                break;
+            case "SHORT":
+                supportedType = "smallint";
+                break;
+            case "LONG":
+                supportedType = "bigint";
+                break;
+            default:
+                supportedType = type;
+            }
+
+            if (!supportedType.equals(type)) {
+                String replacedCastStr = castStr.replace(type, supportedType);
+                replacedString = replaceString(replacedString, castStr, replacedCastStr);
+            }
+        }
+
+        return replacedString;
+    }
+
+    public static String subqueryRepalce(String originString) {
+        Matcher subqueryMatcher = FROM_PATTERN.matcher(originString);
+        String replacedString = originString;
+        Map<Integer, Integer> parenthesesPairs = null;
+
+        while (subqueryMatcher.find()) {
+            if (parenthesesPairs == null) {
+                parenthesesPairs = findParenthesesPairs(originString);
+            }
+
+            int startIdx = subqueryMatcher.start(1);
+            int endIdx = parenthesesPairs.get(startIdx) + 1;
+
+            replacedString = replaceString(replacedString, originString.substring(startIdx, endIdx), originString.substring(startIdx, endIdx) + " as alias");
+        }
+
+        return replacedString;
+    }
+
+    public static String concatReplace(String originString) {
+        Matcher concatMatcher = CONCAT_PATTERN.matcher(originString);
+        String replacedString = originString;
+
+        while (concatMatcher.find()) {
+            String leftString = concatMatcher.group(1);
+            String rightString = concatMatcher.group(2);
+            replacedString = replaceString(replacedString, leftString + "||" + rightString, "concat(" + leftString + "," + rightString + ")");
+        }
+
+        return replacedString;
+    }
+
+    public static String doConvert(String originStr) {
+        // Step1.Replace " with `
+        String convertedSql = replaceString(originStr, "\"", "`");
+
+        // Step2.Replace extract functions
+        convertedSql = extractReplace(convertedSql);
+
+        // Step3.Replace cast type string
+        convertedSql = castRepalce(convertedSql);
+
+        // Step4.Replace sub query
+        convertedSql = subqueryRepalce(convertedSql);
+
+        // Step5.Replace char_length with length
+        convertedSql = replaceString(convertedSql, "char_length", "length");
+
+        // Step6.Replace "||" with concat
+        convertedSql = concatReplace(convertedSql);
+
+        return convertedSql;
+    }
+
+    private static Map<Integer, Integer> findParenthesesPairs(String sql) {
+        Map<Integer, Integer> result = new HashMap<>();
+        if (sql.length() > 1) {
+            Stack<Integer> lStack = new Stack<>();
+            boolean inStrVal = false;
+            for (int i = 0; i < sql.length(); i++) {
+                switch (sql.charAt(i)) {
+                case '(':
+                    if (!inStrVal) {
+                        lStack.push(i);
+                    }
+                    break;
+                case ')':
+                    if (!inStrVal && !lStack.empty()) {
+                        result.put(lStack.pop(), i);
+                    }
+                    break;
+                default:
+                    break;
+                }
+            }
+        }
+        return result;
+    }
+
+    @Override
+    public String convert(String originSql) {
+        return doConvert(originSql);
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocConverter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocConverter.java b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocConverter.java
new file mode 100644
index 0000000..c4b87f8
--- /dev/null
+++ b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocConverter.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.source.adhocquery;
+
+/**
+ * convert the query to satisfy the parser of adhoc query engine
+ */
+public interface IAdHocConverter {
+    String convert(String originSql);
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocRunner.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocRunner.java b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocRunner.java
new file mode 100644
index 0000000..369325c
--- /dev/null
+++ b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocRunner.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.adhocquery;
+
+import java.util.List;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
+
+public interface IAdHocRunner {
+
+    void init(KylinConfig config);
+
+    /**
+     * Run an ad-hoc query in the source database in case Kylin cannot serve using cube.
+     * 
+     * @param query                 the query statement
+     * @param returnRows            an empty list to collect returning rows
+     * @param returnColumnMeta      an empty list to collect metadata of returning columns
+     * @throws Exception if running ad-hoc query fails
+     */
+    void executeQuery(String query, List<List<String>> returnRows, List<SelectedColumnMeta> returnColumnMeta) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HiveAdhocConverterTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HiveAdhocConverterTest.java b/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HiveAdhocConverterTest.java
new file mode 100644
index 0000000..85a6d61
--- /dev/null
+++ b/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HiveAdhocConverterTest.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.source.adhocquery;
+
+import org.junit.Test;
+
+import junit.framework.TestCase;
+
+
+public class HiveAdhocConverterTest extends TestCase {
+    @Test
+    public void testSringReplace() {
+        String originString = "select count(*) as cnt from test_kylin_fact where char_length(lstg_format_name) < 10";
+        String replacedString = HiveAdhocConverter
+            .replaceString(originString, "char_length", "length");
+        assertEquals(replacedString, "select count(*) as cnt from test_kylin_fact where length(lstg_format_name) < 10");
+    }
+
+    @Test
+    public void testExtractReplace() {
+        String originString = "ignore EXTRACT(YEAR FROM KYLIN_CAL_DT.CAL_DT) ignore";
+        String replacedString = HiveAdhocConverter.extractReplace(originString);
+        assertEquals(replacedString, "ignore YEAR(KYLIN_CAL_DT.CAL_DT) ignore");
+    }
+
+    @Test
+    public void testCastReplace() {
+        String originString = "ignore EXTRACT(YEAR FROM CAST(KYLIN_CAL_DT.CAL_DT AS INTEGER)) ignore";
+        String replacedString = HiveAdhocConverter.castRepalce(originString);
+        assertEquals(replacedString, "ignore EXTRACT(YEAR FROM CAST(KYLIN_CAL_DT.CAL_DT AS int)) ignore");
+    }
+
+    @Test
+    public void testSubqueryReplace() {
+        String originString = "select seller_id,lstg_format_name,sum(price) from (select * from test_kylin_fact where (lstg_format_name='FP-GTC') limit 20) group by seller_id,lstg_format_name";
+        String replacedString = HiveAdhocConverter.subqueryRepalce(originString);
+        assertEquals(replacedString, "select seller_id,lstg_format_name,sum(price) from (select * from test_kylin_fact where (lstg_format_name='FP-GTC') limit 20) as alias group by seller_id,lstg_format_name");
+    }
+
+    @Test
+    public void testConcatReplace() {
+        String originString = "select count(*) as cnt from test_kylin_fact where lstg_format_name||'a'='ABINa'";
+        String replacedString = HiveAdhocConverter.concatReplace(originString);
+        assertEquals(replacedString, "select count(*) as cnt from test_kylin_fact where concat(lstg_format_name,'a')='ABINa'");
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java b/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java
deleted file mode 100644
index 7b870c6..0000000
--- a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.storage.adhoc;
-
-import java.util.List;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public abstract class AdHocRunnerBase {
-
-    private static final Logger logger = LoggerFactory.getLogger(AdHocRunnerBase.class);
-
-    protected KylinConfig config = null;
-
-    public AdHocRunnerBase() {
-    }
-
-    public AdHocRunnerBase(KylinConfig config) {
-        this.config = config;
-    }
-
-    public void setConfig(KylinConfig config) {
-        this.config = config;
-    }
-
-    public abstract void init();
-
-    public abstract void executeQuery(String query, List<List<String>> results, List<SelectedColumnMeta> columnMetas) throws Exception;
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-storage/src/main/java/org/apache/kylin/storage/adhoc/HiveAdhocConverter.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/HiveAdhocConverter.java b/core-storage/src/main/java/org/apache/kylin/storage/adhoc/HiveAdhocConverter.java
deleted file mode 100644
index 1a43557..0000000
--- a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/HiveAdhocConverter.java
+++ /dev/null
@@ -1,180 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-package org.apache.kylin.storage.adhoc;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Stack;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-//TODO: Some workaround ways to make sql readable by hive parser, should replaced it with a more well-designed way
-public class HiveAdhocConverter implements IAdhocConverter {
-
-    private static final Logger logger = LoggerFactory.getLogger(HiveAdhocConverter.class);
-
-    private static final Pattern EXTRACT_PATTERN = Pattern.compile("\\s+extract\\s*(\\()\\s*(.*?)\\s*from(\\s+)", Pattern.CASE_INSENSITIVE);
-    private static final Pattern FROM_PATTERN = Pattern.compile("\\s+from\\s+(\\()\\s*select\\s", Pattern.CASE_INSENSITIVE);
-    private static final Pattern CAST_PATTERN = Pattern.compile("CAST\\((.*?) (?i)AS\\s*(.*?)\\s*\\)", Pattern.CASE_INSENSITIVE);
-    private static final Pattern CONCAT_PATTERN = Pattern.compile("(['_a-z0-9A-Z]+)\\|\\|(['_a-z0-9A-Z]+)", Pattern.CASE_INSENSITIVE);
-
-    public static String replaceString(String originString, String fromString, String toString) {
-        return originString.replace(fromString, toString);
-    }
-
-    public static String extractReplace(String originString) {
-        Matcher extractMatcher = EXTRACT_PATTERN.matcher(originString);
-        String replacedString = originString;
-        Map<Integer, Integer> parenthesesPairs = null;
-
-        while (extractMatcher.find()) {
-            if (parenthesesPairs == null) {
-                parenthesesPairs = findParenthesesPairs(originString);
-            }
-
-            String functionStr = extractMatcher.group(2);
-            int startIdx = extractMatcher.end(3);
-            int endIdx = parenthesesPairs.get(extractMatcher.start(1));
-            String extractInner = originString.substring(startIdx, endIdx);
-            int originStart = extractMatcher.start(0) + 1;
-            int originEnd = endIdx + 1;
-
-            replacedString = replaceString(replacedString, originString.substring(originStart, originEnd), functionStr + "(" + extractInner + ")");
-        }
-
-        return replacedString;
-    }
-
-    public static String castRepalce(String originString) {
-        Matcher castMatcher = CAST_PATTERN.matcher(originString);
-        String replacedString = originString;
-
-        while (castMatcher.find()) {
-            String castStr = castMatcher.group();
-            String type = castMatcher.group(2);
-            String supportedType = "";
-            switch (type.toUpperCase()) {
-            case "INTEGER":
-                supportedType = "int";
-                break;
-            case "SHORT":
-                supportedType = "smallint";
-                break;
-            case "LONG":
-                supportedType = "bigint";
-                break;
-            default:
-                supportedType = type;
-            }
-
-            if (!supportedType.equals(type)) {
-                String replacedCastStr = castStr.replace(type, supportedType);
-                replacedString = replaceString(replacedString, castStr, replacedCastStr);
-            }
-        }
-
-        return replacedString;
-    }
-
-    public static String subqueryRepalce(String originString) {
-        Matcher subqueryMatcher = FROM_PATTERN.matcher(originString);
-        String replacedString = originString;
-        Map<Integer, Integer> parenthesesPairs = null;
-
-        while (subqueryMatcher.find()) {
-            if (parenthesesPairs == null) {
-                parenthesesPairs = findParenthesesPairs(originString);
-            }
-
-            int startIdx = subqueryMatcher.start(1);
-            int endIdx = parenthesesPairs.get(startIdx) + 1;
-
-            replacedString = replaceString(replacedString, originString.substring(startIdx, endIdx), originString.substring(startIdx, endIdx) + " as alias");
-        }
-
-        return replacedString;
-    }
-
-    public static String concatReplace(String originString) {
-        Matcher concatMatcher = CONCAT_PATTERN.matcher(originString);
-        String replacedString = originString;
-
-        while (concatMatcher.find()) {
-            String leftString = concatMatcher.group(1);
-            String rightString = concatMatcher.group(2);
-            replacedString = replaceString(replacedString, leftString + "||" + rightString, "concat(" + leftString + "," + rightString + ")");
-        }
-
-        return replacedString;
-    }
-
-    public static String doConvert(String originStr) {
-        // Step1.Replace " with `
-        String convertedSql = replaceString(originStr, "\"", "`");
-
-        // Step2.Replace extract functions
-        convertedSql = extractReplace(convertedSql);
-
-        // Step3.Replace cast type string
-        convertedSql = castRepalce(convertedSql);
-
-        // Step4.Replace sub query
-        convertedSql = subqueryRepalce(convertedSql);
-
-        // Step5.Replace char_length with length
-        convertedSql = replaceString(convertedSql, "char_length", "length");
-
-        // Step6.Replace "||" with concat
-        convertedSql = concatReplace(convertedSql);
-
-        return convertedSql;
-    }
-
-    private static Map<Integer, Integer> findParenthesesPairs(String sql) {
-        Map<Integer, Integer> result = new HashMap<>();
-        if (sql.length() > 1) {
-            Stack<Integer> lStack = new Stack<>();
-            boolean inStrVal = false;
-            for (int i = 0; i < sql.length(); i++) {
-                switch (sql.charAt(i)) {
-                case '(':
-                    if (!inStrVal) {
-                        lStack.push(i);
-                    }
-                    break;
-                case ')':
-                    if (!inStrVal && !lStack.empty()) {
-                        result.put(lStack.pop(), i);
-                    }
-                    break;
-                default:
-                    break;
-                }
-            }
-        }
-        return result;
-    }
-
-    @Override
-    public String convert(String originSql) {
-        return doConvert(originSql);
-    }
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-storage/src/main/java/org/apache/kylin/storage/adhoc/IAdhocConverter.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/IAdhocConverter.java b/core-storage/src/main/java/org/apache/kylin/storage/adhoc/IAdhocConverter.java
deleted file mode 100644
index d5815bb..0000000
--- a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/IAdhocConverter.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-package org.apache.kylin.storage.adhoc;
-
-/**
- * convert the query to satisfy the parser of adhoc query engine
- */
-public interface IAdhocConverter {
-    String convert(String originSql);
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/core-storage/src/test/java/org/apache/kylin/storage/adhoc/HiveAdhocConverterTest.java
----------------------------------------------------------------------
diff --git a/core-storage/src/test/java/org/apache/kylin/storage/adhoc/HiveAdhocConverterTest.java b/core-storage/src/test/java/org/apache/kylin/storage/adhoc/HiveAdhocConverterTest.java
deleted file mode 100644
index 62f6792..0000000
--- a/core-storage/src/test/java/org/apache/kylin/storage/adhoc/HiveAdhocConverterTest.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.storage.adhoc;
-
-import junit.framework.TestCase;
-import org.junit.Test;
-
-
-public class HiveAdhocConverterTest extends TestCase {
-    @Test
-    public void testSringReplace() {
-        String originString = "select count(*) as cnt from test_kylin_fact where char_length(lstg_format_name) < 10";
-        String replacedString = HiveAdhocConverter
-            .replaceString(originString, "char_length", "length");
-        assertEquals(replacedString, "select count(*) as cnt from test_kylin_fact where length(lstg_format_name) < 10");
-    }
-
-    @Test
-    public void testExtractReplace() {
-        String originString = "ignore EXTRACT(YEAR FROM KYLIN_CAL_DT.CAL_DT) ignore";
-        String replacedString = HiveAdhocConverter.extractReplace(originString);
-        assertEquals(replacedString, "ignore YEAR(KYLIN_CAL_DT.CAL_DT) ignore");
-    }
-
-    @Test
-    public void testCastReplace() {
-        String originString = "ignore EXTRACT(YEAR FROM CAST(KYLIN_CAL_DT.CAL_DT AS INTEGER)) ignore";
-        String replacedString = HiveAdhocConverter.castRepalce(originString);
-        assertEquals(replacedString, "ignore EXTRACT(YEAR FROM CAST(KYLIN_CAL_DT.CAL_DT AS int)) ignore");
-    }
-
-    @Test
-    public void testSubqueryReplace() {
-        String originString = "select seller_id,lstg_format_name,sum(price) from (select * from test_kylin_fact where (lstg_format_name='FP-GTC') limit 20) group by seller_id,lstg_format_name";
-        String replacedString = HiveAdhocConverter.subqueryRepalce(originString);
-        assertEquals(replacedString, "select seller_id,lstg_format_name,sum(price) from (select * from test_kylin_fact where (lstg_format_name='FP-GTC') limit 20) as alias group by seller_id,lstg_format_name");
-    }
-
-    @Test
-    public void testConcatReplace() {
-        String originString = "select count(*) as cnt from test_kylin_fact where lstg_format_name||'a'='ABINa'";
-        String replacedString = HiveAdhocConverter.concatReplace(originString);
-        assertEquals(replacedString, "select count(*) as cnt from test_kylin_fact where concat(lstg_format_name,'a')='ABINa'");
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index 8caebc2..83a1ef3 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -187,13 +187,13 @@ kylin.engine.spark-conf.spark.executor.extraJavaOptions=-Dhdp.version=current
 
 
 ### AD-HOC QUERY ###
-#kylin.query.ad-hoc.runner.class-name=org.apache.kylin.rest.adhoc.AdHocRunnerJdbcImpl
+#kylin.query.ad-hoc.runner-class-name=org.apache.kylin.rest.adhoc.AdHocRunnerJdbcImpl
 
 #kylin.query.ad-hoc.jdbc.url=jdbc:hive2://sandbox:10000/default
 #kylin.query.ad-hoc.jdbc.driver=org.apache.hive.jdbc.HiveDriver
 #kylin.query.ad-hoc.jdbc.username=hive
 #kylin.query.ad-hoc.jdbc.password=
 
-#kylin.query.ad-hoc.pool.max-total=8
-#kylin.query.ad-hoc.pool.max-idle=8
-#kylin.query.ad-hoc.pool.min-idle=0
+#kylin.query.ad-hoc.jdbc.pool-max-total=8
+#kylin.query.ad-hoc.jdbc.pool-max-idle=8
+#kylin.query.ad-hoc.jdbc.pool-min-idle=0

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/server-base/src/main/java/org/apache/kylin/rest/adhoc/AdHocRunnerJdbcImpl.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/adhoc/AdHocRunnerJdbcImpl.java b/server-base/src/main/java/org/apache/kylin/rest/adhoc/AdHocRunnerJdbcImpl.java
index 275fce5..44d1770 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/adhoc/AdHocRunnerJdbcImpl.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/adhoc/AdHocRunnerJdbcImpl.java
@@ -28,34 +28,26 @@ import java.util.LinkedList;
 import java.util.List;
 
 import org.apache.commons.pool.impl.GenericObjectPool;
-import org.apache.kylin.storage.adhoc.AdHocRunnerBase;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
+import org.apache.kylin.source.adhocquery.IAdHocRunner;
 
-public class AdHocRunnerJdbcImpl extends AdHocRunnerBase {
+public class AdHocRunnerJdbcImpl implements IAdHocRunner {
 
     private static JdbcConnectionPool pool = null;
 
-    public AdHocRunnerJdbcImpl() {
-        super();
-    }
-
-    public AdHocRunnerJdbcImpl(KylinConfig config) {
-        super(config);
-    }
-
     @Override
-    public void init() {
-        if (this.pool == null) {
-            this.pool = new JdbcConnectionPool();
-            JdbcConnectionFactory factory = new JdbcConnectionFactory(this.config.getJdbcUrl(), this.config.getJdbcDriverClass(), this.config.getJdbcUsername(), this.config.getJdbcPassword());
+    public void init(KylinConfig config) {
+        if (pool == null) {
+            pool = new JdbcConnectionPool();
+            JdbcConnectionFactory factory = new JdbcConnectionFactory(config.getJdbcUrl(), config.getJdbcDriverClass(), config.getJdbcUsername(), config.getJdbcPassword());
             GenericObjectPool.Config poolConfig = new GenericObjectPool.Config();
-            poolConfig.maxActive = this.config.getPoolMaxTotal();
-            poolConfig.maxIdle = this.config.getPoolMaxIdle();
-            poolConfig.minIdle = this.config.getPoolMinIdle();
+            poolConfig.maxActive = config.getPoolMaxTotal();
+            poolConfig.maxIdle = config.getPoolMaxIdle();
+            poolConfig.minIdle = config.getPoolMinIdle();
 
             try {
-                this.pool.createPool(factory, poolConfig);
+                pool.createPool(factory, poolConfig);
             } catch (IOException e) {
                 throw new RuntimeException(e.getMessage(), e);
             }
@@ -96,11 +88,11 @@ public class AdHocRunnerJdbcImpl extends AdHocRunnerBase {
     }
 
     private Connection getConnection() {
-        return this.pool.getConnection();
+        return pool.getConnection();
     }
 
     private void closeConnection(Connection connection) {
-        this.pool.returnConnection(connection);
+        pool.returnConnection(connection);
     }
 
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionFactory.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionFactory.java b/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionFactory.java
index 42613fe..dff98d0 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionFactory.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionFactory.java
@@ -19,12 +19,13 @@
 package org.apache.kylin.rest.adhoc;
 
 
-import org.apache.commons.pool.PoolableObjectFactory;
-
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.SQLException;
 
+import org.apache.commons.pool.PoolableObjectFactory;
+
+@SuppressWarnings("unused")
 class JdbcConnectionFactory implements PoolableObjectFactory {
 
     private final String jdbcUrl;

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
index f6bfe3e..0da92c7 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
@@ -26,10 +26,10 @@ import java.util.List;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.commons.io.IOUtils;
-import org.apache.kylin.rest.exception.InternalErrorException;
-import org.apache.kylin.rest.model.Query;
 import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.apache.kylin.metadata.querymeta.TableMeta;
+import org.apache.kylin.rest.exception.InternalErrorException;
+import org.apache.kylin.rest.model.Query;
 import org.apache.kylin.rest.request.MetaRequest;
 import org.apache.kylin.rest.request.PrepareSqlRequest;
 import org.apache.kylin.rest.request.SQLRequest;
@@ -59,6 +59,7 @@ import org.supercsv.prefs.CsvPreference;
 @Controller
 public class QueryController extends BasicController {
 
+    @SuppressWarnings("unused")
     private static final Logger logger = LoggerFactory.getLogger(QueryController.class);
 
     @Autowired

http://git-wip-us.apache.org/repos/asf/kylin/blob/f6cdd629/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java b/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
index 8221790..76ff237 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
@@ -18,8 +18,6 @@
 
 package org.apache.kylin.rest.util;
 
-import static org.apache.kylin.metadata.MetadataManager.CCInfo;
-
 import java.sql.SQLException;
 import java.util.Collections;
 import java.util.List;
@@ -33,15 +31,16 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.commons.lang3.tuple.Triple;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.metadata.MetadataManager;
+import org.apache.kylin.metadata.MetadataManager.CCInfo;
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.project.ProjectManager;
 import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.apache.kylin.query.routing.NoRealizationFoundException;
-import org.apache.kylin.rest.exception.InternalErrorException;
-import org.apache.kylin.storage.adhoc.AdHocRunnerBase;
-import org.apache.kylin.storage.adhoc.IAdhocConverter;
+import org.apache.kylin.source.adhocquery.IAdHocRunner;
+import org.apache.kylin.source.adhocquery.IAdHocConverter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -59,47 +58,27 @@ public class AdHocUtil {
         boolean isExpectedCause = (ExceptionUtils.getRootCause(sqlException).getClass()
                 .equals(NoRealizationFoundException.class));
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        Boolean isAdHoc = false;
 
         if (isExpectedCause && kylinConfig.isAdhocEnabled()) {
-            Class runnerClass = Class.forName(kylinConfig.getAdHocRunnerClassName());
-            Class converterClass = Class.forName(kylinConfig.getAdHocConverterClassName());
-            Object runnerObj = runnerClass.newInstance();
-            Object converterObj = converterClass.newInstance();
-
-            if (!(runnerObj instanceof AdHocRunnerBase)) {
-                throw new InternalErrorException("Ad-hoc runner class should be sub-class of AdHocRunnerBase");
-            }
-
-            if (!(converterObj instanceof IAdhocConverter)) {
-                throw new InternalErrorException("Ad-hoc converter class should implement of IAdhocConverter");
-            }
+            IAdHocRunner runner = (IAdHocRunner) ClassUtil.newInstance(kylinConfig.getAdHocRunnerClassName());
+            IAdHocConverter converter = (IAdHocConverter) ClassUtil.newInstance(kylinConfig.getAdHocConverterClassName());
 
-            AdHocRunnerBase runner = (AdHocRunnerBase) runnerObj;
-            IAdhocConverter converter = (IAdhocConverter) converterObj;
-            runner.setConfig(kylinConfig);
+            runner.init(kylinConfig);
 
-            logger.debug("Ad-hoc query enabled for Kylin");
+            logger.debug("Ad-hoc query runner {}", runner);
 
-            runner.init();
-
-            try {
-                String expandCC = restoreComputedColumnToExpr(sql, project);
-                String adhocSql = converter.convert(expandCC);
-                if (!adhocSql.equals(adhocSql)) {
-                    logger.info("before delegating to adhoc, the query is converted to {} ", adhocSql);
-                }
-
-                runner.executeQuery(adhocSql, results, columnMetas);
-                isAdHoc = true;
-            } catch (Exception exception) {
-                throw exception;
+            String expandCC = restoreComputedColumnToExpr(sql, project);
+            String adhocSql = converter.convert(expandCC);
+            if (!adhocSql.equals(sql)) {
+                logger.info("before delegating to adhoc, the query is converted to {} ", adhocSql);
             }
+
+            runner.executeQuery(adhocSql, results, columnMetas);
+            
+            return true;
         } else {
             throw sqlException;
         }
-
-        return isAdHoc;
     }
 
     private final static Pattern identifierInSqlPattern = Pattern.compile(


[52/67] [abbrv] kylin git commit: minor, refine BuildCubeWithEngine

Posted by li...@apache.org.
minor, refine BuildCubeWithEngine


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/2cde3360
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/2cde3360
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/2cde3360

Branch: refs/heads/master
Commit: 2cde33606980d30b169bd11ed6a0dfd5a93af5ff
Parents: 361ac00
Author: Cheng Wang <ch...@kyligence.io>
Authored: Tue May 30 10:13:05 2017 +0800
Committer: 成 <ch...@kyligence.io>
Committed: Tue May 30 14:54:30 2017 +0800

----------------------------------------------------------------------
 .../kylin/provision/BuildCubeWithEngine.java    | 27 ++++++++++++--------
 1 file changed, 16 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/2cde3360/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 5719523..bd563a2 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -55,13 +55,13 @@ import org.apache.kylin.job.execution.DefaultChainedExecutable;
 import org.apache.kylin.job.execution.ExecutableManager;
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
+import org.apache.kylin.rest.job.StorageCleanupJob;
 import org.apache.kylin.source.ISource;
 import org.apache.kylin.source.SourceFactory;
 import org.apache.kylin.source.SourcePartition;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
 import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
-import org.apache.kylin.rest.job.StorageCleanupJob;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -181,8 +181,10 @@ public class BuildCubeWithEngine {
     public void build() throws Exception {
         DeployUtil.prepareTestDataForNormalCubes("ci_left_join_model");
         System.setProperty("kylin.storage.hbase.hfile-size-gb", "1.0f");
-        testInner();
-        testLeft();
+        testCase("testInnerJoinCube");
+        testCase("testLeftJoinCube");
+        testCase("testTableExt");
+        testCase("testModel");
         System.setProperty("kylin.storage.hbase.hfile-size-gb", "0.0f");
     }
 
@@ -201,13 +203,7 @@ public class BuildCubeWithEngine {
         }
     }
 
-    private void testInner() throws Exception {
-        String[] testCase = new String[] { "testInnerJoinCube" };
-        runTestAndAssertSucceed(testCase);
-    }
-
-    private void testLeft() throws Exception {
-        String[] testCase = new String[] { "testLeftJoinCube" };
+    private void testCase(String... testCase) throws Exception {
         runTestAndAssertSucceed(testCase);
     }
 
@@ -260,11 +256,20 @@ public class BuildCubeWithEngine {
     }
 
     @SuppressWarnings("unused")
+    protected boolean testTableExt() throws Exception {
+        return true;
+    }
+
+    @SuppressWarnings("unused")
+    protected boolean testModel() throws Exception {
+        return true;
+    }
+
+    @SuppressWarnings("unused")
     // called by reflection
     private boolean testLeftJoinCube() throws Exception {
         String cubeName = "ci_left_join_cube";
         clearSegment(cubeName);
-
         // ci_left_join_cube has percentile which isn't supported by Spark engine now
         // updateCubeEngineType(cubeName);
 


[24/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/DoggedCubeBuilder.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/DoggedCubeBuilder.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/DoggedCubeBuilder.java
index 0a6314c..a9211da 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/DoggedCubeBuilder.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/DoggedCubeBuilder.java
@@ -57,8 +57,7 @@ public class DoggedCubeBuilder extends AbstractInMemCubeBuilder {
     private int splitRowThreshold = Integer.MAX_VALUE;
     private int unitRows = 1000;
 
-    public DoggedCubeBuilder(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDesc,
-            Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    public DoggedCubeBuilder(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
         super(cubeDesc, flatDesc, dictionaryMap);
 
         // check memory more often if a single row is big
@@ -199,10 +198,7 @@ public class DoggedCubeBuilder extends AbstractInMemCubeBuilder {
             } else {
                 for (Throwable t : errors)
                     logger.error("Exception during in-mem cube build", t);
-                throw new IOException(
-                        errors.size()
-                                + " exceptions during in-mem cube build, cause set to the first, check log for more",
-                        errors.get(0));
+                throw new IOException(errors.size() + " exceptions during in-mem cube build, cause set to the first, check log for more", errors.get(0));
             }
         }
 
@@ -255,8 +251,7 @@ public class DoggedCubeBuilder extends AbstractInMemCubeBuilder {
             int nSplit = splits.size();
             long splitRowCount = nSplit == 0 ? 0 : splits.get(nSplit - 1).inputRowCount;
 
-            logger.info(splitRowCount + " records went into split #" + nSplit + "; " + systemAvailMB + " MB left, "
-                    + reserveMemoryMB + " MB threshold");
+            logger.info(splitRowCount + " records went into split #" + nSplit + "; " + systemAvailMB + " MB left, " + reserveMemoryMB + " MB threshold");
 
             if (splitRowCount >= splitRowThreshold) {
                 logger.info("Split cut due to hitting splitRowThreshold " + splitRowThreshold);
@@ -264,8 +259,7 @@ public class DoggedCubeBuilder extends AbstractInMemCubeBuilder {
             }
 
             if (systemAvailMB <= reserveMemoryMB * 1.5) {
-                logger.info("Split cut due to hitting memory threshold, system avail " + systemAvailMB
-                        + " MB <= reserve " + reserveMemoryMB + "*1.5 MB");
+                logger.info("Split cut due to hitting memory threshold, system avail " + systemAvailMB + " MB <= reserve " + reserveMemoryMB + "*1.5 MB");
                 return true;
             }
 
@@ -411,8 +405,7 @@ public class DoggedCubeBuilder extends AbstractInMemCubeBuilder {
                 if (cuboidIterator.hasNext()) {
                     CuboidResult cuboid = cuboidIterator.next();
                     currentCuboidId = cuboid.cuboidId;
-                    scanner = cuboid.table.scan(new GTScanRequestBuilder().setInfo(cuboid.table.getInfo())
-                            .setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest());
+                    scanner = cuboid.table.scan(new GTScanRequestBuilder().setInfo(cuboid.table.getInfo()).setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest());
                     recordIterator = scanner.iterator();
                 } else {
                     return false;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilder.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilder.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilder.java
index 5233def..a26e948 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilder.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilder.java
@@ -33,8 +33,8 @@ import java.util.concurrent.atomic.AtomicInteger;
 import org.apache.kylin.common.util.Dictionary;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.common.util.MemoryBudgetController;
-import org.apache.kylin.common.util.MemoryBudgetController.MemoryWaterLevel;
 import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.common.util.MemoryBudgetController.MemoryWaterLevel;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.cube.cuboid.CuboidScheduler;
 import org.apache.kylin.cube.gridtable.CubeGridTable;
@@ -90,8 +90,7 @@ public class InMemCubeBuilder extends AbstractInMemCubeBuilder {
     private Object[] totalSumForSanityCheck;
     private ICuboidCollector resultCollector;
 
-    public InMemCubeBuilder(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDesc,
-            Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    public InMemCubeBuilder(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
         super(cubeDesc, flatDesc, dictionaryMap);
         this.cuboidScheduler = new CuboidScheduler(cubeDesc);
         this.baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
@@ -110,8 +109,10 @@ public class InMemCubeBuilder extends AbstractInMemCubeBuilder {
     }
 
     private GridTable newGridTableByCuboidID(long cuboidID) throws IOException {
-        GTInfo info = CubeGridTable.newGTInfo(Cuboid.findById(cubeDesc, cuboidID),
-                new CubeDimEncMap(cubeDesc, dictionaryMap));
+        GTInfo info = CubeGridTable.newGTInfo(
+                Cuboid.findById(cubeDesc, cuboidID),
+                new CubeDimEncMap(cubeDesc, dictionaryMap)
+        );
 
         // Below several store implementation are very similar in performance. The ConcurrentDiskStore is the simplest.
         // MemDiskStore store = new MemDiskStore(info, memBudget == null ? MemoryBudgetController.ZERO_BUDGET : memBudget);
@@ -231,9 +232,7 @@ public class InMemCubeBuilder extends AbstractInMemCubeBuilder {
         } else {
             for (Throwable t : errors)
                 logger.error("Exception during in-mem cube build", t);
-            throw new IOException(
-                    errors.size() + " exceptions during in-mem cube build, cause set to the first, check log for more",
-                    errors.get(0));
+            throw new IOException(errors.size() + " exceptions during in-mem cube build, cause set to the first, check log for more", errors.get(0));
         }
     }
 
@@ -323,9 +322,7 @@ public class InMemCubeBuilder extends AbstractInMemCubeBuilder {
         if (budget < baseResult.aggrCacheMB) {
             // make sure we have base aggr cache as minimal
             budget = baseResult.aggrCacheMB;
-            logger.warn("System avail memory (" + systemAvailMB + " MB) is less than base aggr cache ("
-                    + baseResult.aggrCacheMB + " MB) + minimal reservation (" + reserve
-                    + " MB), consider increase JVM heap -Xmx");
+            logger.warn("System avail memory (" + systemAvailMB + " MB) is less than base aggr cache (" + baseResult.aggrCacheMB + " MB) + minimal reservation (" + reserve + " MB), consider increase JVM heap -Xmx");
         }
 
         logger.debug("Memory Budget is " + budget + " MB");
@@ -340,11 +337,8 @@ public class InMemCubeBuilder extends AbstractInMemCubeBuilder {
         GTBuilder baseBuilder = baseCuboid.rebuild();
         IGTScanner baseInput = new InputConverter(baseCuboid.getInfo(), input);
 
-        Pair<ImmutableBitSet, ImmutableBitSet> dimensionMetricsBitSet = InMemCubeBuilderUtils
-                .getDimensionAndMetricColumnBitSet(baseCuboidId, measureCount);
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(baseCuboid.getInfo()).setRanges(null).setDimensions(null)
-                .setAggrGroupBy(dimensionMetricsBitSet.getFirst()).setAggrMetrics(dimensionMetricsBitSet.getSecond())
-                .setAggrMetricsFuncs(metricsAggrFuncs).setFilterPushDown(null).createGTScanRequest();
+        Pair<ImmutableBitSet, ImmutableBitSet> dimensionMetricsBitSet = InMemCubeBuilderUtils.getDimensionAndMetricColumnBitSet(baseCuboidId, measureCount);
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(baseCuboid.getInfo()).setRanges(null).setDimensions(null).setAggrGroupBy(dimensionMetricsBitSet.getFirst()).setAggrMetrics(dimensionMetricsBitSet.getSecond()).setAggrMetricsFuncs(metricsAggrFuncs).setFilterPushDown(null).createGTScanRequest();
         GTAggregateScanner aggregationScanner = new GTAggregateScanner(baseInput, req);
         aggregationScanner.trackMemoryLevel(baseCuboidMemTracker);
 
@@ -362,15 +356,13 @@ public class InMemCubeBuilder extends AbstractInMemCubeBuilder {
         long timeSpent = System.currentTimeMillis() - startTime;
         logger.info("Cuboid " + baseCuboidId + " has " + count + " rows, build takes " + timeSpent + "ms");
 
-        int mbEstimateBaseAggrCache = (int) (aggregationScanner.getEstimateSizeOfAggrCache()
-                / MemoryBudgetController.ONE_MB);
+        int mbEstimateBaseAggrCache = (int) (aggregationScanner.getEstimateSizeOfAggrCache() / MemoryBudgetController.ONE_MB);
         logger.info("Wild estimate of base aggr cache is " + mbEstimateBaseAggrCache + " MB");
 
         return updateCuboidResult(baseCuboidId, baseCuboid, count, timeSpent, 0);
     }
 
-    private CuboidResult updateCuboidResult(long cuboidId, GridTable table, int nRows, long timeSpent,
-            int aggrCacheMB) {
+    private CuboidResult updateCuboidResult(long cuboidId, GridTable table, int nRows, long timeSpent, int aggrCacheMB) {
         if (aggrCacheMB <= 0 && baseResult != null) {
             aggrCacheMB = (int) Math.round(//
                     (DERIVE_AGGR_CACHE_CONSTANT_FACTOR + DERIVE_AGGR_CACHE_VARIABLE_FACTOR * nRows / baseResult.nRows) //
@@ -408,18 +400,13 @@ public class InMemCubeBuilder extends AbstractInMemCubeBuilder {
     }
 
     private CuboidResult aggregateCuboid(CuboidResult parent, long cuboidId) throws IOException {
-        final Pair<ImmutableBitSet, ImmutableBitSet> allNeededColumns = InMemCubeBuilderUtils
-                .getDimensionAndMetricColumnBitSet(parent.cuboidId, cuboidId, measureCount);
-        return scanAndAggregateGridTable(parent.table, parent.cuboidId, cuboidId, allNeededColumns.getFirst(),
-                allNeededColumns.getSecond());
+        final Pair<ImmutableBitSet, ImmutableBitSet> allNeededColumns = InMemCubeBuilderUtils.getDimensionAndMetricColumnBitSet(parent.cuboidId, cuboidId, measureCount);
+        return scanAndAggregateGridTable(parent.table, parent.cuboidId, cuboidId, allNeededColumns.getFirst(), allNeededColumns.getSecond());
     }
 
-    private GTAggregateScanner prepareGTAggregationScanner(GridTable gridTable, long parentId, long cuboidId,
-            ImmutableBitSet aggregationColumns, ImmutableBitSet measureColumns) throws IOException {
+    private GTAggregateScanner prepareGTAggregationScanner(GridTable gridTable, long parentId, long cuboidId, ImmutableBitSet aggregationColumns, ImmutableBitSet measureColumns) throws IOException {
         GTInfo info = gridTable.getInfo();
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null)
-                .setAggrGroupBy(aggregationColumns).setAggrMetrics(measureColumns).setAggrMetricsFuncs(metricsAggrFuncs)
-                .setFilterPushDown(null).createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setAggrGroupBy(aggregationColumns).setAggrMetrics(measureColumns).setAggrMetricsFuncs(metricsAggrFuncs).setFilterPushDown(null).createGTScanRequest();
         GTAggregateScanner scanner = (GTAggregateScanner) gridTable.scan(req);
 
         // for child cuboid, some measures don't need aggregation.
@@ -438,13 +425,11 @@ public class InMemCubeBuilder extends AbstractInMemCubeBuilder {
         return scanner;
     }
 
-    private CuboidResult scanAndAggregateGridTable(GridTable gridTable, long parentId, long cuboidId,
-            ImmutableBitSet aggregationColumns, ImmutableBitSet measureColumns) throws IOException {
+    private CuboidResult scanAndAggregateGridTable(GridTable gridTable, long parentId, long cuboidId, ImmutableBitSet aggregationColumns, ImmutableBitSet measureColumns) throws IOException {
         long startTime = System.currentTimeMillis();
         logger.info("Calculating cuboid " + cuboidId);
 
-        GTAggregateScanner scanner = prepareGTAggregationScanner(gridTable, parentId, cuboidId, aggregationColumns,
-                measureColumns);
+        GTAggregateScanner scanner = prepareGTAggregationScanner(gridTable, parentId, cuboidId, aggregationColumns, measureColumns);
         GridTable newGridTable = newGridTableByCuboidID(cuboidId);
         GTBuilder builder = newGridTable.rebuild();
 
@@ -539,8 +524,7 @@ public class InMemCubeBuilder extends AbstractInMemCubeBuilder {
             this.info = info;
             this.input = input;
             this.record = new GTRecord(info);
-            this.inMemCubeBuilderInputConverter = new InMemCubeBuilderInputConverter(cubeDesc, flatDesc, dictionaryMap,
-                    info);
+            this.inMemCubeBuilderInputConverter = new InMemCubeBuilderInputConverter(cubeDesc, flatDesc, dictionaryMap, info);
         }
 
         @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilderInputConverter.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilderInputConverter.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilderInputConverter.java
index 6a2e40f..ab44f63 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilderInputConverter.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilderInputConverter.java
@@ -49,8 +49,7 @@ public class InMemCubeBuilderInputConverter {
     private final GTInfo gtInfo;
     protected List<byte[]> nullBytes;
 
-    public InMemCubeBuilderInputConverter(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDesc,
-            Map<TblColRef, Dictionary<String>> dictionaryMap, GTInfo gtInfo) {
+    public InMemCubeBuilderInputConverter(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDesc, Map<TblColRef, Dictionary<String>> dictionaryMap, GTInfo gtInfo) {
         this.gtInfo = gtInfo;
         this.flatDesc = new CubeJoinedFlatTableEnrich(flatDesc, cubeDesc);
         this.measureCount = cubeDesc.getMeasures().size();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilderUtils.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilderUtils.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilderUtils.java
index a0661f0..8557acf 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilderUtils.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilderUtils.java
@@ -26,8 +26,7 @@ import org.apache.kylin.common.util.Pair;
  */
 public final class InMemCubeBuilderUtils {
 
-    public static final Pair<ImmutableBitSet, ImmutableBitSet> getDimensionAndMetricColumnBitSet(final long cuboidId,
-            final int measureCount) {
+    public static final Pair<ImmutableBitSet, ImmutableBitSet> getDimensionAndMetricColumnBitSet(final long cuboidId, final int measureCount) {
         int cardinality = Long.bitCount(cuboidId);
         BitSet dimension = new BitSet();
         dimension.set(0, cardinality);
@@ -36,15 +35,13 @@ public final class InMemCubeBuilderUtils {
         return Pair.newPair(new ImmutableBitSet(dimension), new ImmutableBitSet(metrics));
     }
 
-    public static final Pair<ImmutableBitSet, ImmutableBitSet> getDimensionAndMetricColumnBitSet(
-            final long baseCuboidId, final long childCuboidId, final int measureCount) {
-        final Pair<ImmutableBitSet, ImmutableBitSet> parentDimensionAndMetricColumnBitSet = getDimensionAndMetricColumnBitSet(
-                baseCuboidId, measureCount);
+    public static final Pair<ImmutableBitSet, ImmutableBitSet> getDimensionAndMetricColumnBitSet(final long baseCuboidId, final long childCuboidId, final int measureCount) {
+        final Pair<ImmutableBitSet, ImmutableBitSet> parentDimensionAndMetricColumnBitSet = getDimensionAndMetricColumnBitSet(baseCuboidId, measureCount);
         ImmutableBitSet parentDimensions = parentDimensionAndMetricColumnBitSet.getFirst();
         ImmutableBitSet measureColumns = parentDimensionAndMetricColumnBitSet.getSecond();
         ImmutableBitSet childDimensions = parentDimensions;
         long mask = Long.highestOneBit(baseCuboidId);
-        long parentCuboidIdActualLength = (long) Long.SIZE - Long.numberOfLeadingZeros(baseCuboidId);
+        long parentCuboidIdActualLength = (long)Long.SIZE - Long.numberOfLeadingZeros(baseCuboidId);
         int index = 0;
         for (int i = 0; i < parentCuboidIdActualLength; i++) {
             if ((mask & baseCuboidId) > 0) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/MemDiskStore.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/MemDiskStore.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/MemDiskStore.java
index 9d4602e..a5471df 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/MemDiskStore.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/MemDiskStore.java
@@ -71,8 +71,7 @@ public class MemDiskStore implements IGTStore, Closeable {
         this(info, budgetCtrl, diskFile, false);
     }
 
-    private MemDiskStore(GTInfo info, MemoryBudgetController budgetCtrl, File diskFile, boolean delOnClose)
-            throws IOException {
+    private MemDiskStore(GTInfo info, MemoryBudgetController budgetCtrl, File diskFile, boolean delOnClose) throws IOException {
         this.info = info;
         this.lock = this;
         this.memPart = new MemPart(budgetCtrl);
@@ -168,8 +167,7 @@ public class MemDiskStore implements IGTStore, Closeable {
                         if (available() <= 0)
                             return -1;
 
-                        if (memChunk == null && memPart.headOffset() <= readOffset
-                                && readOffset < memPart.tailOffset()) {
+                        if (memChunk == null && memPart.headOffset() <= readOffset && readOffset < memPart.tailOffset()) {
                             memChunk = memPart.seekMemChunk(readOffset);
                         }
 
@@ -221,8 +219,7 @@ public class MemDiskStore implements IGTStore, Closeable {
                 din.close();
                 diskPart.closeRead();
                 if (debug)
-                    logger.debug(MemDiskStore.this + " read end @ " + readOffset + ", " + (memRead) + " from mem, "
-                            + (diskRead) + " from disk, " + nReadCalls + " read() calls");
+                    logger.debug(MemDiskStore.this + " read end @ " + readOffset + ", " + (memRead) + " from mem, " + (diskRead) + " from disk, " + nReadCalls + " read() calls");
             }
         }
 
@@ -361,8 +358,7 @@ public class MemDiskStore implements IGTStore, Closeable {
                     diskPart.closeWrite();
                     ongoingWriter = null;
                     if (debug)
-                        logger.debug(MemDiskStore.this + " write end @ " + writeOffset + ", " + (memWrite) + " to mem, "
-                                + (diskWrite) + " to disk, " + nWriteCalls + " write() calls");
+                        logger.debug(MemDiskStore.this + " write end @ " + writeOffset + ", " + (memWrite) + " to mem, " + (diskWrite) + " to disk, " + nWriteCalls + " write() calls");
                 } else {
                     // the asyncFlusher will call this close() again later
                 }
@@ -667,13 +663,11 @@ public class MemDiskStore implements IGTStore, Closeable {
 
         public void openWrite(boolean append) throws IOException {
             if (append) {
-                writeChannel = FileChannel.open(diskFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.APPEND,
-                        StandardOpenOption.WRITE);
+                writeChannel = FileChannel.open(diskFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE);
                 tailOffset = diskFile.length();
             } else {
                 diskFile.delete();
-                writeChannel = FileChannel.open(diskFile.toPath(), StandardOpenOption.CREATE_NEW,
-                        StandardOpenOption.WRITE);
+                writeChannel = FileChannel.open(diskFile.toPath(), StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE);
                 tailOffset = 0;
             }
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/kv/AbstractRowKeyEncoder.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/kv/AbstractRowKeyEncoder.java b/core-cube/src/main/java/org/apache/kylin/cube/kv/AbstractRowKeyEncoder.java
index 91a3bd4..2becde4 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/kv/AbstractRowKeyEncoder.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/kv/AbstractRowKeyEncoder.java
@@ -18,8 +18,6 @@
 
 package org.apache.kylin.cube.kv;
 
-import java.util.Map;
-
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.cube.CubeSegment;
@@ -30,6 +28,8 @@ import org.apache.kylin.metadata.model.TblColRef;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.Map;
+
 /**
  * 
  * @author xjiang

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/kv/CubeDimEncMap.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/kv/CubeDimEncMap.java b/core-cube/src/main/java/org/apache/kylin/cube/kv/CubeDimEncMap.java
index 2a17af4..bd9554a 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/kv/CubeDimEncMap.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/kv/CubeDimEncMap.java
@@ -18,8 +18,7 @@
 
 package org.apache.kylin.cube.kv;
 
-import java.util.Map;
-
+import com.google.common.collect.Maps;
 import org.apache.kylin.common.util.Dictionary;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.model.CubeDesc;
@@ -33,7 +32,7 @@ import org.apache.kylin.metadata.model.TblColRef;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Maps;
+import java.util.Map;
 
 public class CubeDimEncMap implements IDimensionEncodingMap, java.io.Serializable {
 
@@ -72,8 +71,7 @@ public class CubeDimEncMap implements IDimensionEncodingMap, java.io.Serializabl
                 }
             } else {
                 // normal case
-                result = DimensionEncodingFactory.create(colDesc.getEncodingName(), colDesc.getEncodingArgs(),
-                        colDesc.getEncodingVersion());
+                result = DimensionEncodingFactory.create(colDesc.getEncodingName(), colDesc.getEncodingArgs(), colDesc.getEncodingVersion());
             }
             encMap.put(col, result);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/kv/FuzzyMaskEncoder.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/kv/FuzzyMaskEncoder.java b/core-cube/src/main/java/org/apache/kylin/cube/kv/FuzzyMaskEncoder.java
index c2db871..0cbb7d2 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/kv/FuzzyMaskEncoder.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/kv/FuzzyMaskEncoder.java
@@ -77,8 +77,7 @@ public class FuzzyMaskEncoder extends RowKeyEncoder {
     }
 
     @Override
-    protected void fillColumnValue(TblColRef column, int columnLen, String valueStr, byte[] outputValue,
-            int outputValueOffset) {
+    protected void fillColumnValue(TblColRef column, int columnLen, String valueStr, byte[] outputValue, int outputValueOffset) {
         if (valueStr == null) {
             Arrays.fill(outputValue, outputValueOffset, outputValueOffset + columnLen, RowConstants.BYTE_ONE);
         } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/kv/RowKeyColumnIO.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/kv/RowKeyColumnIO.java b/core-cube/src/main/java/org/apache/kylin/cube/kv/RowKeyColumnIO.java
index 30c885c..65911a0 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/kv/RowKeyColumnIO.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/kv/RowKeyColumnIO.java
@@ -47,8 +47,7 @@ public class RowKeyColumnIO implements java.io.Serializable {
         return dimEncMap.getDictionary(col);
     }
 
-    public void writeColumn(TblColRef col, String value, int roundingFlag, byte defaultValue, byte[] output,
-            int outputOffset) {
+    public void writeColumn(TblColRef col, String value, int roundingFlag, byte defaultValue, byte[] output, int outputOffset) {
         DimensionEncoding dimEnc = dimEncMap.get(col);
         if (dimEnc instanceof DictionaryDimEnc)
             dimEnc = ((DictionaryDimEnc) dimEnc).copy(roundingFlag, defaultValue);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/kv/RowKeyEncoder.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/kv/RowKeyEncoder.java b/core-cube/src/main/java/org/apache/kylin/cube/kv/RowKeyEncoder.java
index d11dcb0..a669fb1 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/kv/RowKeyEncoder.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/kv/RowKeyEncoder.java
@@ -18,11 +18,7 @@
 
 package org.apache.kylin.cube.kv;
 
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import com.google.common.base.Preconditions;
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.common.util.ImmutableBitSet;
@@ -32,7 +28,10 @@ import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.gridtable.GTRecord;
 import org.apache.kylin.metadata.model.TblColRef;
 
-import com.google.common.base.Preconditions;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 public class RowKeyEncoder extends AbstractRowKeyEncoder implements java.io.Serializable {
 
@@ -75,10 +74,8 @@ public class RowKeyEncoder extends AbstractRowKeyEncoder implements java.io.Seri
             int shardSeedOffset = uhcOffset == -1 ? 0 : uhcOffset;
             int shardSeedLength = uhcLength == -1 ? bodyLength : uhcLength;
             short cuboidShardNum = cubeSeg.getCuboidShardNum(cuboid.getId());
-            short shardOffset = ShardingHash.getShard(key, RowConstants.ROWKEY_SHARD_AND_CUBOID_LEN + shardSeedOffset,
-                    shardSeedLength, cuboidShardNum);
-            return ShardingHash.normalize(cubeSeg.getCuboidBaseShard(cuboid.getId()), shardOffset,
-                    cubeSeg.getTotalShards(cuboid.getId()));
+            short shardOffset = ShardingHash.getShard(key, RowConstants.ROWKEY_SHARD_AND_CUBOID_LEN + shardSeedOffset, shardSeedLength, cuboidShardNum);
+            return ShardingHash.normalize(cubeSeg.getCuboidBaseShard(cuboid.getId()), shardOffset, cubeSeg.getTotalShards(cuboid.getId()));
         } else {
             throw new RuntimeException("If enableSharding false, you should never calculate shard");
         }
@@ -110,8 +107,7 @@ public class RowKeyEncoder extends AbstractRowKeyEncoder implements java.io.Seri
             int c = selectedCols.trueBitAt(i);
             ByteArray columnC = record.get(c);
             if (columnC.array() != null) {
-                System.arraycopy(record.get(c).array(), columnC.offset(), buf.array(), buf.offset() + pos,
-                        columnC.length());
+                System.arraycopy(record.get(c).array(), columnC.offset(), buf.array(), buf.offset() + pos, columnC.length());
                 pos += columnC.length();
             } else {
                 int maxLength = record.getInfo().getCodeSystem().maxCodeLength(c);
@@ -126,8 +122,7 @@ public class RowKeyEncoder extends AbstractRowKeyEncoder implements java.io.Seri
     public void encode(ByteArray bodyBytes, ByteArray outputBuf) {
         Preconditions.checkState(bodyBytes.length() == bodyLength);
         Preconditions.checkState(bodyBytes.length() + getHeaderLength() == outputBuf.length(), //
-                "bodybytes length: " + bodyBytes.length() + " outputBuf length: " + outputBuf.length()
-                        + " header length: " + getHeaderLength());
+                "bodybytes length: " + bodyBytes.length() + " outputBuf length: " + outputBuf.length() + " header length: " + getHeaderLength());
         System.arraycopy(bodyBytes.array(), bodyBytes.offset(), outputBuf.array(), getHeaderLength(), bodyLength);
 
         //fill shard and cuboid
@@ -176,8 +171,7 @@ public class RowKeyEncoder extends AbstractRowKeyEncoder implements java.io.Seri
         //return offset;
     }
 
-    protected void fillColumnValue(TblColRef column, int columnLen, String valueStr, byte[] outputValue,
-            int outputValueOffset) {
+    protected void fillColumnValue(TblColRef column, int columnLen, String valueStr, byte[] outputValue, int outputValueOffset) {
         // special null value case
         if (valueStr == null) {
             Arrays.fill(outputValue, outputValueOffset, outputValueOffset + columnLen, defaultValue());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java b/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
index 8e8faf1..5a32a92 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
@@ -118,8 +118,7 @@ public class AggregationGroup implements Serializable {
         // check no dup
         Set<String> set = new HashSet<>(Arrays.asList(names));
         if (set.size() < names.length)
-            throw new IllegalStateException(
-                    "Columns in aggrgroup must not contain duplication: " + Arrays.asList(names));
+            throw new IllegalStateException("Columns in aggrgroup must not contain duplication: " + Arrays.asList(names));
     }
 
     private void buildPartialCubeFullMask(RowKeyDesc rowKeyDesc) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
index d362616..82d0b1b 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
@@ -114,8 +114,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
 
         @Override
         public String toString() {
-            return "DeriveInfo [type=" + type + ", join=" + join + ", columns=" + Arrays.toString(columns)
-                    + ", isOneToOne=" + isOneToOne + "]";
+            return "DeriveInfo [type=" + type + ", join=" + join + ", columns=" + Arrays.toString(columns) + ", isOneToOne=" + isOneToOne + "]";
         }
 
     }
@@ -289,8 +288,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         throw new RuntimeException("Cannot get host info for " + derived);
     }
 
-    public Map<Array<TblColRef>, List<DeriveInfo>> getHostToDerivedInfo(List<TblColRef> rowCols,
-            Collection<TblColRef> wantedCols) {
+    public Map<Array<TblColRef>, List<DeriveInfo>> getHostToDerivedInfo(List<TblColRef> rowCols, Collection<TblColRef> wantedCols) {
         Map<Array<TblColRef>, List<DeriveInfo>> result = new HashMap<Array<TblColRef>, List<DeriveInfo>>();
         for (Entry<Array<TblColRef>, List<DeriveInfo>> entry : hostToDerivedMap.entrySet()) {
             Array<TblColRef> hostCols = entry.getKey();
@@ -500,15 +498,12 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         KylinVersion cubeVersion = new KylinVersion(getVersion());
         KylinVersion kylinVersion = KylinVersion.getCurrentVersion();
         if (!kylinVersion.isCompatibleWith(cubeVersion)) {
-            logger.info("checkSignature on {} is skipped as the its version {} is different from kylin version {}",
-                    getName(), cubeVersion, kylinVersion);
+            logger.info("checkSignature on {} is skipped as the its version {} is different from kylin version {}", getName(), cubeVersion, kylinVersion);
             return true;
         }
 
         if (kylinVersion.isCompatibleWith(cubeVersion) && !kylinVersion.isSignatureCompatibleWith(cubeVersion)) {
-            logger.info(
-                    "checkSignature on {} is skipped as the its version is {} (not signature compatible but compatible) ",
-                    getName(), cubeVersion);
+            logger.info("checkSignature on {} is skipped as the its version is {} (not signature compatible but compatible) ", getName(), cubeVersion);
             return true;
         }
 
@@ -616,9 +611,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
 
         // check all dimension columns are presented on rowkey
         List<TblColRef> dimCols = listDimensionColumnsExcludingDerived(true);
-        checkState(rowkey.getRowKeyColumns().length == dimCols.size(),
-                "RowKey columns count (%s) doesn't match dimensions columns count (%s)",
-                rowkey.getRowKeyColumns().length, dimCols.size());
+        checkState(rowkey.getRowKeyColumns().length == dimCols.size(), "RowKey columns count (%s) doesn't match dimensions columns count (%s)", rowkey.getRowKeyColumns().length, dimCols.size());
 
         initDictionaryDesc();
         amendAllColumns();
@@ -672,8 +665,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
                 combination = config.getCubeAggrGroupMaxCombination() + 1;
             } finally {
                 if (combination > config.getCubeAggrGroupMaxCombination()) {
-                    String msg = "Aggregation group " + index
-                            + " has too many combinations, use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max-combination' to a bigger value.";
+                    String msg = "Aggregation group " + index + " has too many combinations, use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max-combination' to a bigger value.";
                     logger.error("Aggregation group " + index + " has " + combination + " combinations;");
                     logger.error(msg);
                     throw new IllegalStateException(msg);
@@ -712,64 +704,50 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
             Set<String> jointDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
             getDims(jointDimsList, jointDims, agg.getSelectRule().jointDims);
 
-            if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims)
-                    || !includeDims.containsAll(jointDims)) {
+            if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims) || !includeDims.containsAll(jointDims)) {
                 List<String> notIncluded = Lists.newArrayList();
-                final Iterable<String> all = Iterables
-                        .unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims));
+                final Iterable<String> all = Iterables.unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims));
                 for (String dim : all) {
                     if (includeDims.contains(dim) == false) {
                         notIncluded.add(dim);
                     }
                 }
                 Collections.sort(notIncluded);
-                logger.error(
-                        "Aggregation group " + index + " Include dimensions not containing all the used dimensions");
-                throw new IllegalStateException("Aggregation group " + index
-                        + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString());
+                logger.error("Aggregation group " + index + " Include dimensions not containing all the used dimensions");
+                throw new IllegalStateException("Aggregation group " + index + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString());
             }
 
             if (CollectionUtils.containsAny(mandatoryDims, hierarchyDims)) {
-                logger.warn("Aggregation group " + index + " mandatory dimensions overlap with hierarchy dimensions: "
-                        + ensureOrder(CollectionUtils.intersection(mandatoryDims, hierarchyDims)));
+                logger.warn("Aggregation group " + index + " mandatory dimensions overlap with hierarchy dimensions: " + ensureOrder(CollectionUtils.intersection(mandatoryDims, hierarchyDims)));
             }
             if (CollectionUtils.containsAny(mandatoryDims, jointDims)) {
-                logger.warn("Aggregation group " + index + " mandatory dimensions overlap with joint dimensions: "
-                        + ensureOrder(CollectionUtils.intersection(mandatoryDims, jointDims)));
+                logger.warn("Aggregation group " + index + " mandatory dimensions overlap with joint dimensions: " + ensureOrder(CollectionUtils.intersection(mandatoryDims, jointDims)));
             }
 
             if (CollectionUtils.containsAny(hierarchyDims, jointDims)) {
                 logger.error("Aggregation group " + index + " hierarchy dimensions overlap with joint dimensions");
-                throw new IllegalStateException(
-                        "Aggregation group " + index + " hierarchy dimensions overlap with joint dimensions: "
-                                + ensureOrder(CollectionUtils.intersection(hierarchyDims, jointDims)));
+                throw new IllegalStateException("Aggregation group " + index + " hierarchy dimensions overlap with joint dimensions: " + ensureOrder(CollectionUtils.intersection(hierarchyDims, jointDims)));
             }
 
             if (hasSingle(hierarchyDimsList)) {
                 logger.error("Aggregation group " + index + " require at least 2 dimensions in a hierarchy");
-                throw new IllegalStateException(
-                        "Aggregation group " + index + " require at least 2 dimensions in a hierarchy.");
+                throw new IllegalStateException("Aggregation group " + index + " require at least 2 dimensions in a hierarchy.");
             }
             if (hasSingle(jointDimsList)) {
                 logger.error("Aggregation group " + index + " require at least 2 dimensions in a joint");
-                throw new IllegalStateException(
-                        "Aggregation group " + index + " require at least 2 dimensions in a joint");
+                throw new IllegalStateException("Aggregation group " + index + " require at least 2 dimensions in a joint");
             }
 
             Pair<Boolean, Set<String>> overlap = hasOverlap(hierarchyDimsList, hierarchyDims);
             if (overlap.getFirst() == true) {
-                logger.error("Aggregation group " + index + " a dimension exist in more than one hierarchy: "
-                        + ensureOrder(overlap.getSecond()));
-                throw new IllegalStateException("Aggregation group " + index
-                        + " a dimension exist in more than one hierarchy: " + ensureOrder(overlap.getSecond()));
+                logger.error("Aggregation group " + index + " a dimension exist in more than one hierarchy: " + ensureOrder(overlap.getSecond()));
+                throw new IllegalStateException("Aggregation group " + index + " a dimension exist in more than one hierarchy: " + ensureOrder(overlap.getSecond()));
             }
 
             overlap = hasOverlap(jointDimsList, jointDims);
             if (overlap.getFirst() == true) {
-                logger.error("Aggregation group " + index + " a dimension exist in more than one joint: "
-                        + ensureOrder(overlap.getSecond()));
-                throw new IllegalStateException("Aggregation group " + index
-                        + " a dimension exist in more than one joint: " + ensureOrder(overlap.getSecond()));
+                logger.error("Aggregation group " + index + " a dimension exist in more than one joint: " + ensureOrder(overlap.getSecond()));
+                throw new IllegalStateException("Aggregation group " + index + " a dimension exist in more than one joint: " + ensureOrder(overlap.getSecond()));
             }
 
             index++;
@@ -897,8 +875,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         return new String[][] { cols, extra };
     }
 
-    private void initDerivedMap(TblColRef[] hostCols, DeriveType type, JoinDesc join, TblColRef[] derivedCols,
-            String[] extra) {
+    private void initDerivedMap(TblColRef[] hostCols, DeriveType type, JoinDesc join, TblColRef[] derivedCols, String[] extra) {
         if (hostCols.length == 0 || derivedCols.length == 0)
             throw new IllegalStateException("host/derived columns must not be empty");
 
@@ -919,8 +896,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
 
         for (int i = 0; i < derivedCols.length; i++) {
             TblColRef derivedCol = derivedCols[i];
-            boolean isOneToOne = type == DeriveType.PK_FK || ArrayUtils.contains(hostCols, derivedCol)
-                    || (extra != null && extra[i].contains("1-1"));
+            boolean isOneToOne = type == DeriveType.PK_FK || ArrayUtils.contains(hostCols, derivedCol) || (extra != null && extra[i].contains("1-1"));
             derivedToHostMap.put(derivedCol, new DeriveInfo(type, join, hostCols, isOneToOne));
         }
 
@@ -952,8 +928,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
                 whatsLeft.add(derCol);
         }
         if (whatsLeft.size() > 0) {
-            infoList.add(new DeriveInfo(type, join, (TblColRef[]) whatsLeft.toArray(new TblColRef[whatsLeft.size()]),
-                    false));
+            infoList.add(new DeriveInfo(type, join, (TblColRef[]) whatsLeft.toArray(new TblColRef[whatsLeft.size()]), false));
         }
     }
 
@@ -1044,8 +1019,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         }
 
         for (int i = 0; i < measures.size(); i++) {
-            checkState(checkEachMeasureExist.get(i),
-                    "measure (%s) does not exist in column family,or measure duplicates", measures.get(i));
+            checkState(checkEachMeasureExist.get(i), "measure (%s) does not exist in column family,or measure duplicates", measures.get(i));
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
index 2f8eaa1..c49d37a 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
@@ -65,8 +65,7 @@ public class CubeJoinedFlatTableDesc implements IJoinedFlatTableDesc, java.io.Se
         if (cubeSegment == null) {
             return "kylin_intermediate_" + cubeDesc.getName().toLowerCase();
         } else {
-            return "kylin_intermediate_" + cubeDesc.getName().toLowerCase() + "_"
-                    + cubeSegment.getUuid().replaceAll("-", "_");
+            return "kylin_intermediate_" + cubeDesc.getName().toLowerCase() + "_" + cubeSegment.getUuid().replaceAll("-", "_");
         }
     }
 
@@ -116,8 +115,7 @@ public class CubeJoinedFlatTableDesc implements IJoinedFlatTableDesc, java.io.Se
     // sanity check the input record (in bytes) matches what's expected
     public void sanityCheck(BytesSplitter bytesSplitter) {
         if (columnCount != bytesSplitter.getBufferSize()) {
-            throw new IllegalArgumentException("Expect " + columnCount + " columns, but see "
-                    + bytesSplitter.getBufferSize() + " -- " + bytesSplitter);
+            throw new IllegalArgumentException("Expect " + columnCount + " columns, but see " + bytesSplitter.getBufferSize() + " -- " + bytesSplitter);
         }
 
         // TODO: check data types here

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java
index e590d07..e829aeb 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java
@@ -18,8 +18,6 @@
 
 package org.apache.kylin.cube.model;
 
-import java.util.List;
-
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.model.FunctionDesc;
@@ -28,6 +26,8 @@ import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.metadata.model.MeasureDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 
+import java.util.List;
+
 /**
  * An enrich of IJoinedFlatTableDesc for cubes
  */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java
index 1762b26..578831f 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java
@@ -81,8 +81,7 @@ public class DimensionDesc implements java.io.Serializable {
             }
         }
         if (derived != null && join == null) {
-            throw new IllegalStateException(
-                    "Derived can only be defined on lookup table, cube " + cubeDesc + ", " + this);
+            throw new IllegalStateException("Derived can only be defined on lookup table, cube " + cubeDesc + ", " + this);
         }
 
     }
@@ -141,8 +140,7 @@ public class DimensionDesc implements java.io.Serializable {
 
     @Override
     public String toString() {
-        return Objects.toStringHelper(this).add("name", name).add("table", table).add("column", column)
-                .add("derived", Arrays.toString(derived)).add("join", join).toString();
+        return Objects.toStringHelper(this).add("name", name).add("table", table).add("column", column).add("derived", Arrays.toString(derived)).add("join", join).toString();
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseColumnDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseColumnDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseColumnDesc.java
index 2a57a34..7007342 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseColumnDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseColumnDesc.java
@@ -18,14 +18,13 @@
 
 package org.apache.kylin.cube.model;
 
-import java.util.Arrays;
-
-import org.apache.kylin.metadata.model.FunctionDesc;
-import org.apache.kylin.metadata.model.MeasureDesc;
-
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.kylin.metadata.model.FunctionDesc;
+import org.apache.kylin.metadata.model.MeasureDesc;
+
+import java.util.Arrays;
 
 /**
  */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseColumnFamilyDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseColumnFamilyDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseColumnFamilyDesc.java
index 1d88549..85c2c17 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseColumnFamilyDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseColumnFamilyDesc.java
@@ -18,13 +18,12 @@
 
 package org.apache.kylin.cube.model;
 
-import java.util.Arrays;
-
-import org.apache.kylin.metadata.model.MeasureDesc;
-
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.kylin.metadata.model.MeasureDesc;
+
+import java.util.Arrays;
 
 /**
  */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java
index 7ae932a..d1e5829 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java
@@ -22,13 +22,12 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.LinkedList;
 
-import org.apache.kylin.common.util.StringUtil;
-import org.apache.kylin.metadata.model.FunctionDesc;
-import org.apache.kylin.metadata.model.MeasureDesc;
-
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.kylin.common.util.StringUtil;
+import org.apache.kylin.metadata.model.FunctionDesc;
+import org.apache.kylin.metadata.model.MeasureDesc;
 
 /**
  */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/RowKeyColDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/RowKeyColDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/RowKeyColDesc.java
index c62055d..b6f0a27 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/RowKeyColDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/RowKeyColDesc.java
@@ -88,7 +88,7 @@ public class RowKeyColDesc implements java.io.Serializable {
         }
 
         encodingArgs = DateDimEnc.replaceEncodingArgs(encoding, encodingArgs, encodingName, type);
-
+        
         if (encodingName.startsWith(FixedLenDimEnc.ENCODING_NAME) && (type.isIntegerFamily() || type.isNumberFamily()))
             throw new IllegalArgumentException(colRef + " type is " + type + " and cannot apply fixed_length encoding");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/CubeDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/CubeDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/CubeDesc.java
index d2f8253..4019e4f 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/CubeDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/CubeDesc.java
@@ -85,8 +85,7 @@ public class CubeDesc extends RootPersistentEntity {
 
         @Override
         public String toString() {
-            return "DeriveInfo [type=" + type + ", dimension=" + dimension + ", columns=" + Arrays.toString(columns)
-                    + ", isOneToOne=" + isOneToOne + "]";
+            return "DeriveInfo [type=" + type + ", dimension=" + dimension + ", columns=" + Arrays.toString(columns) + ", isOneToOne=" + isOneToOne + "]";
         }
 
     }
@@ -243,8 +242,7 @@ public class CubeDesc extends RootPersistentEntity {
         return derivedToHostMap.get(derived);
     }
 
-    public Map<Array<TblColRef>, List<DeriveInfo>> getHostToDerivedInfo(List<TblColRef> rowCols,
-            Collection<TblColRef> wantedCols) {
+    public Map<Array<TblColRef>, List<DeriveInfo>> getHostToDerivedInfo(List<TblColRef> rowCols, Collection<TblColRef> wantedCols) {
         Map<Array<TblColRef>, List<DeriveInfo>> result = new HashMap<Array<TblColRef>, List<DeriveInfo>>();
         for (Entry<Array<TblColRef>, List<DeriveInfo>> entry : hostToDerivedMap.entrySet()) {
             Array<TblColRef> hostCols = entry.getKey();
@@ -431,12 +429,7 @@ public class CubeDesc extends RootPersistentEntity {
         try {
             md = MessageDigest.getInstance("MD5");
             StringBuilder sigString = new StringBuilder();
-            sigString.append(this.name).append("|").append(this.getFactTable()).append("|")
-                    .append(JsonUtil.writeValueAsString(this.model.getPartitionDesc())).append("|")
-                    .append(JsonUtil.writeValueAsString(this.dimensions)).append("|")
-                    .append(JsonUtil.writeValueAsString(this.measures)).append("|")
-                    .append(JsonUtil.writeValueAsString(this.rowkey)).append("|")
-                    .append(JsonUtil.writeValueAsString(this.hbaseMapping));
+            sigString.append(this.name).append("|").append(this.getFactTable()).append("|").append(JsonUtil.writeValueAsString(this.model.getPartitionDesc())).append("|").append(JsonUtil.writeValueAsString(this.dimensions)).append("|").append(JsonUtil.writeValueAsString(this.measures)).append("|").append(JsonUtil.writeValueAsString(this.rowkey)).append("|").append(JsonUtil.writeValueAsString(this.hbaseMapping));
 
             byte[] signature = md.digest(sigString.toString().getBytes());
             return new String(Base64.encodeBase64(signature));
@@ -485,8 +478,7 @@ public class CubeDesc extends RootPersistentEntity {
         // check all dimension columns are presented on rowkey
         List<TblColRef> dimCols = listDimensionColumnsExcludingDerived();
         if (rowkey.getRowKeyColumns().length != dimCols.size()) {
-            addError("RowKey columns count (" + rowkey.getRowKeyColumns().length
-                    + ") does not match dimension columns count (" + dimCols.size() + "). ");
+            addError("RowKey columns count (" + rowkey.getRowKeyColumns().length + ") does not match dimension columns count (" + dimCols.size() + "). ");
         }
     }
 
@@ -584,8 +576,7 @@ public class CubeDesc extends RootPersistentEntity {
         initDerivedMap(new TblColRef[] { hostCol }, type, dimension, new TblColRef[] { derivedCol }, null);
     }
 
-    private void initDerivedMap(TblColRef[] hostCols, DeriveType type, DimensionDesc dimension, TblColRef[] derivedCols,
-            String[] extra) {
+    private void initDerivedMap(TblColRef[] hostCols, DeriveType type, DimensionDesc dimension, TblColRef[] derivedCols, String[] extra) {
         if (hostCols.length == 0 || derivedCols.length == 0)
             throw new IllegalStateException("host/derived columns must not be empty");
 
@@ -609,8 +600,7 @@ public class CubeDesc extends RootPersistentEntity {
 
         for (int i = 0; i < derivedCols.length; i++) {
             TblColRef derivedCol = derivedCols[i];
-            boolean isOneToOne = type == DeriveType.PK_FK || ArrayUtils.contains(hostCols, derivedCol)
-                    || (extra != null && extra[i].contains("1-1"));
+            boolean isOneToOne = type == DeriveType.PK_FK || ArrayUtils.contains(hostCols, derivedCol) || (extra != null && extra[i].contains("1-1"));
             derivedToHostMap.put(derivedCol, new DeriveInfo(type, dimension, hostCols, isOneToOne));
         }
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/DimensionDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/DimensionDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/DimensionDesc.java
index e63a5b0..3903b2b 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/DimensionDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/DimensionDesc.java
@@ -108,8 +108,7 @@ public class DimensionDesc {
         }
 
         if (derived != null && join == null) {
-            throw new IllegalStateException(
-                    "Derived can only be defined on lookup table, cube " + cubeDesc + ", " + this);
+            throw new IllegalStateException("Derived can only be defined on lookup table, cube " + cubeDesc + ", " + this);
         }
     }
 
@@ -234,9 +233,7 @@ public class DimensionDesc {
 
     @Override
     public String toString() {
-        return "DimensionDesc [name=" + name + ", join=" + join + ", hierarchy=" + Arrays.toString(hierarchy)
-                + ", table=" + table + ", column=" + Arrays.toString(column) + ", derived=" + Arrays.toString(derived)
-                + "]";
+        return "DimensionDesc [name=" + name + ", join=" + join + ", hierarchy=" + Arrays.toString(hierarchy) + ", table=" + table + ", column=" + Arrays.toString(column) + ", derived=" + Arrays.toString(derived) + "]";
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/RowKeyColDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/RowKeyColDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/RowKeyColDesc.java
index 3151513..8ef3698 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/RowKeyColDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/RowKeyColDesc.java
@@ -86,8 +86,7 @@ public class RowKeyColDesc {
 
     @Override
     public String toString() {
-        return "RowKeyColDesc [column=" + column + ", length=" + length + ", dictionary=" + dictionary + ", mandatory="
-                + mandatory + "]";
+        return "RowKeyColDesc [column=" + column + ", length=" + length + ", dictionary=" + dictionary + ", mandatory=" + mandatory + "]";
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/RowKeyDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/RowKeyDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/RowKeyDesc.java
index 2b77c6b..7d37a76 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/RowKeyDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/v1_4_0/RowKeyDesc.java
@@ -188,8 +188,7 @@ public class RowKeyDesc {
 
     @Override
     public String toString() {
-        return "RowKeyDesc [rowkeyColumns=" + Arrays.toString(rowkeyColumns) + ", aggregationGroups="
-                + Arrays.toString(aggregationGroups) + "]";
+        return "RowKeyDesc [rowkeyColumns=" + Arrays.toString(rowkeyColumns) + ", aggregationGroups=" + Arrays.toString(aggregationGroups) + "]";
     }
 
     private void buildRowKey(Map<String, TblColRef> colNameAbbr) {
@@ -236,8 +235,7 @@ public class RowKeyDesc {
             for (int j = 0; j < aggGrp.length; j++) {
                 TblColRef aggCol = colNameAbbr.get(aggGrp[j].toUpperCase());
                 if (aggCol == null) {
-                    throw new IllegalArgumentException(
-                            "Can't find aggregation column " + aggGrp[j] + " in  cube " + this.cubeDesc.getName());
+                    throw new IllegalArgumentException("Can't find aggregation column " + aggGrp[j] + " in  cube " + this.cubeDesc.getName());
                 }
                 Integer index = getColumnBitIndex(aggCol);
                 mask.groupMask |= 1L << index;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/validation/CubeMetadataValidator.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/CubeMetadataValidator.java b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/CubeMetadataValidator.java
index 3c9da15..c2c5f89 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/CubeMetadataValidator.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/CubeMetadataValidator.java
@@ -33,8 +33,7 @@ import org.apache.kylin.cube.model.validation.rule.StreamingCubeRule;
  */
 public class CubeMetadataValidator {
     @SuppressWarnings("unchecked")
-    private IValidatorRule<CubeDesc>[] rules = new IValidatorRule[] { new FunctionRule(), new AggregationGroupRule(),
-            new RowKeyAttrRule(), new DictionaryRule(), new StreamingCubeRule() };
+    private IValidatorRule<CubeDesc>[] rules = new IValidatorRule[] { new FunctionRule(), new AggregationGroupRule(), new RowKeyAttrRule(), new DictionaryRule(), new StreamingCubeRule() };
 
     public ValidateContext validate(CubeDesc cube) {
         ValidateContext context = new ValidateContext();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java
index e939b38..33fc390 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java
@@ -48,7 +48,6 @@ public class AggregationGroupRule implements IValidatorRule<CubeDesc> {
     public AggregationGroupRule() {
     }
 
-    @SuppressWarnings("checkstyle:methodlength")
     private void inner(CubeDesc cube, ValidateContext context) {
 
         if (cube.getAggregationGroups() == null || cube.getAggregationGroups().size() == 0) {
@@ -100,33 +99,28 @@ public class AggregationGroupRule implements IValidatorRule<CubeDesc> {
                 }
             }
 
-            if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims)
-                    || !includeDims.containsAll(jointDims)) {
+            if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims) || !includeDims.containsAll(jointDims)) {
                 List<String> notIncluded = Lists.newArrayList();
-                final Iterable<String> all = Iterables
-                        .unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims));
+                final Iterable<String> all = Iterables.unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims));
                 for (String dim : all) {
                     if (includeDims.contains(dim) == false) {
                         notIncluded.add(dim);
                     }
                 }
-                context.addResult(ResultLevel.ERROR, "Aggregation group " + index
-                        + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString());
+                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString());
                 continue;
             }
 
             if (CollectionUtils.containsAny(mandatoryDims, hierarchyDims)) {
                 Set<String> intersection = new HashSet<>(mandatoryDims);
                 intersection.retainAll(hierarchyDims);
-                context.addResult(ResultLevel.ERROR, "Aggregation group " + index
-                        + " mandatory dimension has overlap with hierarchy dimension: " + intersection.toString());
+                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " mandatory dimension has overlap with hierarchy dimension: " + intersection.toString());
                 continue;
             }
             if (CollectionUtils.containsAny(mandatoryDims, jointDims)) {
                 Set<String> intersection = new HashSet<>(mandatoryDims);
                 intersection.retainAll(jointDims);
-                context.addResult(ResultLevel.ERROR, "Aggregation group " + index
-                        + " mandatory dimension has overlap with joint dimension: " + intersection.toString());
+                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " mandatory dimension has overlap with joint dimension: " + intersection.toString());
                 continue;
             }
 
@@ -140,8 +134,7 @@ public class AggregationGroupRule implements IValidatorRule<CubeDesc> {
                     }
 
                     if (oneJoint.size() < 2) {
-                        context.addResult(ResultLevel.ERROR, "Aggregation group " + index
-                                + " require at least 2 dimensions in a joint: " + oneJoint.toString());
+                        context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " require at least 2 dimensions in a joint: " + oneJoint.toString());
                         continue;
                     }
                     jointDimNum += oneJoint.size();
@@ -156,17 +149,13 @@ public class AggregationGroupRule implements IValidatorRule<CubeDesc> {
                                 overlapHierarchies++;
                             }
                             if (share.size() > 1) {
-                                context.addResult(ResultLevel.ERROR,
-                                        "Aggregation group " + index
-                                                + " joint dimensions has overlap with more than 1 dimensions in same hierarchy: "
-                                                + share.toString());
+                                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " joint dimensions has overlap with more than 1 dimensions in same hierarchy: " + share.toString());
                                 continue;
                             }
                         }
 
                         if (overlapHierarchies > 1) {
-                            context.addResult(ResultLevel.ERROR, "Aggregation group " + index
-                                    + " joint dimensions has overlap with more than 1 hierarchies");
+                            context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " joint dimensions has overlap with more than 1 hierarchies");
                             continue;
                         }
                     }
@@ -186,8 +175,7 @@ public class AggregationGroupRule implements IValidatorRule<CubeDesc> {
                         }
                         existing.addAll(oneJoint);
                     }
-                    context.addResult(ResultLevel.ERROR, "Aggregation group " + index
-                            + " a dimension exists in more than one joint: " + overlap.toString());
+                    context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " a dimension exists in more than one joint: " + overlap.toString());
                     continue;
                 }
             }
@@ -198,9 +186,7 @@ public class AggregationGroupRule implements IValidatorRule<CubeDesc> {
                 combination = getMaxCombinations(cube) + 1;
             } finally {
                 if (combination > getMaxCombinations(cube)) {
-                    String msg = "Aggregation group " + index + " has too many combinations, current combination is "
-                            + combination + ", max allowed combination is " + getMaxCombinations(cube)
-                            + "; use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max-combination' to a bigger value.";
+                    String msg = "Aggregation group " + index + " has too many combinations, current combination is " + combination + ", max allowed combination is " + getMaxCombinations(cube) + "; use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max-combination' to a bigger value.";
                     context.addResult(ResultLevel.ERROR, msg);
                     continue;
                 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/DictionaryRule.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/DictionaryRule.java b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/DictionaryRule.java
index 449cafc..8da3ca0 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/DictionaryRule.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/DictionaryRule.java
@@ -82,9 +82,7 @@ public class DictionaryRule implements IValidatorRule<CubeDesc> {
                 return;
             }
 
-            if (StringUtils.isNotEmpty(builderClass)
-                    && builderClass.equalsIgnoreCase(GlobalDictionaryBuilder.class.getName())
-                    && dimensionColumns.contains(dictCol)) {
+            if (StringUtils.isNotEmpty(builderClass) && builderClass.equalsIgnoreCase(GlobalDictionaryBuilder.class.getName()) && dimensionColumns.contains(dictCol)) {
                 context.addResult(ResultLevel.ERROR, ERROR_GLOBAL_DICTIONNARY_ONLY_MEASURE + dictCol);
                 return;
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/FunctionRule.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/FunctionRule.java b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/FunctionRule.java
index 4d295b0..36631ce 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/FunctionRule.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/FunctionRule.java
@@ -72,26 +72,22 @@ public class FunctionRule implements IValidatorRule<CubeDesc> {
             FunctionDesc func = measure.getFunction();
             ParameterDesc parameter = func.getParameter();
             if (parameter == null) {
-                context.addResult(ResultLevel.ERROR,
-                        "Must define parameter for function " + func.getExpression() + " in " + measure.getName());
+                context.addResult(ResultLevel.ERROR, "Must define parameter for function " + func.getExpression() + " in " + measure.getName());
                 return;
             }
 
             String type = func.getParameter().getType();
             String value = func.getParameter().getValue();
             if (StringUtils.isEmpty(type)) {
-                context.addResult(ResultLevel.ERROR,
-                        "Must define type for parameter type " + func.getExpression() + " in " + measure.getName());
+                context.addResult(ResultLevel.ERROR, "Must define type for parameter type " + func.getExpression() + " in " + measure.getName());
                 return;
             }
             if (StringUtils.isEmpty(value)) {
-                context.addResult(ResultLevel.ERROR,
-                        "Must define type for parameter value " + func.getExpression() + " in " + measure.getName());
+                context.addResult(ResultLevel.ERROR, "Must define type for parameter value " + func.getExpression() + " in " + measure.getName());
                 return;
             }
             if (StringUtils.isEmpty(func.getReturnType())) {
-                context.addResult(ResultLevel.ERROR,
-                        "Must define return type for function " + func.getExpression() + " in " + measure.getName());
+                context.addResult(ResultLevel.ERROR, "Must define return type for function " + func.getExpression() + " in " + measure.getName());
                 return;
             }
 
@@ -112,8 +108,7 @@ public class FunctionRule implements IValidatorRule<CubeDesc> {
 
             if (TopNMeasureType.FUNC_TOP_N.equalsIgnoreCase(func.getExpression())) {
                 if (parameter.getNextParameter() == null) {
-                    context.addResult(ResultLevel.ERROR, "Must define at least 2 parameters for function "
-                            + func.getExpression() + " in " + measure.getName());
+                    context.addResult(ResultLevel.ERROR, "Must define at least 2 parameters for function " + func.getExpression() + " in " + measure.getName());
                     return;
                 }
 
@@ -122,8 +117,7 @@ public class FunctionRule implements IValidatorRule<CubeDesc> {
                 while (groupByCol != null) {
                     String embeded_groupby = groupByCol.getValue();
                     for (DimensionDesc dimensionDesc : cube.getDimensions()) {
-                        if (dimensionDesc.getColumn() != null
-                                && dimensionDesc.getColumn().equalsIgnoreCase(embeded_groupby)) {
+                        if (dimensionDesc.getColumn() != null && dimensionDesc.getColumn().equalsIgnoreCase(embeded_groupby)) {
                             duplicatedCol.add(embeded_groupby);
                         }
                     }
@@ -134,8 +128,7 @@ public class FunctionRule implements IValidatorRule<CubeDesc> {
         }
 
         if (countFuncs.size() != 1) {
-            context.addResult(ResultLevel.ERROR, "Must define one and only one count(1) function, but there are "
-                    + countFuncs.size() + " -- " + countFuncs);
+            context.addResult(ResultLevel.ERROR, "Must define one and only one count(1) function, but there are " + countFuncs.size() + " -- " + countFuncs);
         }
     }
 
@@ -171,8 +164,8 @@ public class FunctionRule implements IValidatorRule<CubeDesc> {
      */
     private boolean validateMeasureNamesDuplicated(List<MeasureDesc> measures, ValidateContext context) {
         Set<String> nameSet = new HashSet<>();
-        for (MeasureDesc measure : measures) {
-            if (nameSet.contains(measure.getName())) {
+        for (MeasureDesc measure: measures){
+            if (nameSet.contains(measure.getName())){
                 context.addResult(ResultLevel.ERROR, "There is duplicated measure's name: " + measure.getName());
                 return true;
             } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/StreamingCubeRule.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/StreamingCubeRule.java b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/StreamingCubeRule.java
index 9e4a37a..4438706 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/StreamingCubeRule.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/StreamingCubeRule.java
@@ -26,6 +26,7 @@ import org.apache.kylin.cube.model.validation.ValidateContext;
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.model.IEngineAware;
 import org.apache.kylin.metadata.model.ISourceAware;
+
 import org.apache.kylin.metadata.model.TblColRef;
 
 /**
@@ -43,20 +44,18 @@ public class StreamingCubeRule implements IValidatorRule<CubeDesc> {
     @Override
     public void validate(CubeDesc cube, ValidateContext context) {
         DataModelDesc model = cube.getModel();
-
+        
         if (model.getRootFactTable().getTableDesc().getSourceType() != ISourceAware.ID_STREAMING) {
             return;
         }
 
         if (model.getLookupTables().size() > 0) {
-            context.addResult(ResultLevel.ERROR,
-                    "Streaming Cube doesn't support star-schema so far; only one fact table is allowed.");
+            context.addResult(ResultLevel.ERROR, "Streaming Cube doesn't support star-schema so far; only one fact table is allowed.");
             return;
         }
 
         if (cube.getEngineType() == IEngineAware.ID_SPARK) {
-            context.addResult(ResultLevel.ERROR,
-                    "Spark engine doesn't support streaming source, select MapReduce engine instead.");
+            context.addResult(ResultLevel.ERROR, "Spark engine doesn't support streaming source, select MapReduce engine instead.");
             return;
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/upgrade/V1_5_1/CubeDescUpgrade_v_1_5_1.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/V1_5_1/CubeDescUpgrade_v_1_5_1.java b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/V1_5_1/CubeDescUpgrade_v_1_5_1.java
index 0740fc0..ec21650 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/V1_5_1/CubeDescUpgrade_v_1_5_1.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/V1_5_1/CubeDescUpgrade_v_1_5_1.java
@@ -226,8 +226,7 @@ public class CubeDescUpgrade_v_1_5_1 {
         }
 
         org.apache.kylin.cube.model.RowKeyDesc newRowKey = new org.apache.kylin.cube.model.RowKeyDesc();
-        org.apache.kylin.cube.model.RowKeyColDesc[] cols = new org.apache.kylin.cube.model.RowKeyColDesc[oldRowKey
-                .getRowKeyColumns().length];
+        org.apache.kylin.cube.model.RowKeyColDesc[] cols = new org.apache.kylin.cube.model.RowKeyColDesc[oldRowKey.getRowKeyColumns().length];
         int index = 0;
         for (RowKeyColDesc oldRowKeyCol : oldRowKey.getRowKeyColumns()) {
             org.apache.kylin.cube.model.RowKeyColDesc newRowKeyCol = new org.apache.kylin.cube.model.RowKeyColDesc();
@@ -243,8 +242,7 @@ public class CubeDescUpgrade_v_1_5_1 {
             } else if (oldRowKeyCol.getLength() > 0) {
                 newRowKeyCol.setEncoding("fixed_length:" + oldRowKeyCol.getLength());
             } else {
-                throw new IllegalArgumentException("Unknow encoding: Dictionary " + oldRowKeyCol.getDictionary()
-                        + ", length: " + oldRowKeyCol.getLength());
+                throw new IllegalArgumentException("Unknow encoding: Dictionary " + oldRowKeyCol.getDictionary() + ", length: " + oldRowKeyCol.getLength());
             }
             cols[index++] = newRowKeyCol;
         }
@@ -262,8 +260,7 @@ public class CubeDescUpgrade_v_1_5_1 {
             JsonUtil.writeValueIndent(os, hbaseMappingDesc);
             byte[] blob = os.toByteArray();
             ByteArrayInputStream is = new ByteArrayInputStream(blob);
-            org.apache.kylin.cube.model.HBaseMappingDesc newHBaseMappingDesc = JsonUtil.readValue(is,
-                    org.apache.kylin.cube.model.HBaseMappingDesc.class);
+            org.apache.kylin.cube.model.HBaseMappingDesc newHBaseMappingDesc = JsonUtil.readValue(is, org.apache.kylin.cube.model.HBaseMappingDesc.class);
             newModel.setHbaseMapping(newHBaseMappingDesc);
 
         } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/upgrade/V1_5_1/CubeMetadataUpgrade_v_1_5_1.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/V1_5_1/CubeMetadataUpgrade_v_1_5_1.java b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/V1_5_1/CubeMetadataUpgrade_v_1_5_1.java
index 668f24f..8a85629 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/V1_5_1/CubeMetadataUpgrade_v_1_5_1.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/V1_5_1/CubeMetadataUpgrade_v_1_5_1.java
@@ -90,8 +90,7 @@ public class CubeMetadataUpgrade_v_1_5_1 extends CubeMetadataUpgrade {
                 CubeDescUpgrade_v_1_5_1 upgrade = new CubeDescUpgrade_v_1_5_1(path, store);
                 CubeDesc ndesc = upgrade.upgrade();
 
-                ResourceStore.getStore(config).putResource(ndesc.getResourcePath(), ndesc,
-                        CubeDescManager.CUBE_DESC_SERIALIZER);
+                ResourceStore.getStore(config).putResource(ndesc.getResourcePath(), ndesc, CubeDescManager.CUBE_DESC_SERIALIZER);
                 updatedResources.add(ndesc.getResourcePath());
             } catch (Exception e) {
                 logger.error("error", e);
@@ -130,11 +129,8 @@ public class CubeMetadataUpgrade_v_1_5_1 extends CubeMetadataUpgrade {
         for (CubeInstance cube : cubes) {
             try {
                 org.apache.kylin.cube.model.CubeDesc cubeDesc = cube.getDescriptor();
-                if (cube.getFirstSegment() == null && cubeDesc != null
-                        && cubeDesc.getStorageType() == IStorageAware.ID_HBASE
-                        && cubeDesc.getEngineType() == IEngineAware.ID_MR_V1) {
-                    logger.info("CubeMetadataUpgrade_v_1_5_1 handling in upgradeEngineTypeStorageType {}",
-                            cube.getName());
+                if (cube.getFirstSegment() == null && cubeDesc != null && cubeDesc.getStorageType() == IStorageAware.ID_HBASE && cubeDesc.getEngineType() == IEngineAware.ID_MR_V1) {
+                    logger.info("CubeMetadataUpgrade_v_1_5_1 handling in upgradeEngineTypeStorageType {}", cube.getName());
 
                     cubeDesc.setEngineType(IEngineAware.ID_MR_V2);
                     cubeDesc.setStorageType(IStorageAware.ID_SHARDED_HBASE);
@@ -142,14 +138,11 @@ public class CubeMetadataUpgrade_v_1_5_1 extends CubeMetadataUpgrade {
                     store.putResource(cubeDesc.getResourcePath(), cubeDesc, CubeDescManager.CUBE_DESC_SERIALIZER);
                     updatedResources.add(cubeDesc.getResourcePath());
                 } else {
-                    logger.info(
-                            "CubeDesc {}'s storage type and engine type will not be upgraded because they're not empty",
-                            cubeDesc.getName());
+                    logger.info("CubeDesc {}'s storage type and engine type will not be upgraded because they're not empty", cubeDesc.getName());
                 }
             } catch (Exception e) {
                 logger.error("error", e);
-                errorMsgs.add(
-                        "upgradeEngineTypeStorageType [" + cube.getName() + "] failed: " + e.getLocalizedMessage());
+                errorMsgs.add("upgradeEngineTypeStorageType [" + cube.getName() + "] failed: " + e.getLocalizedMessage());
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/upgrade/common/CubeMetadataUpgrade.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/common/CubeMetadataUpgrade.java b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/common/CubeMetadataUpgrade.java
index d796288..f70286d 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/common/CubeMetadataUpgrade.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/common/CubeMetadataUpgrade.java
@@ -71,8 +71,7 @@ public abstract class CubeMetadataUpgrade {
 
     public void verify() {
         logger.info("=================================================================");
-        logger.info(
-                "The changes are applied, now it's time to verify the new metadata store by reloading all metadata:");
+        logger.info("The changes are applied, now it's time to verify the new metadata store by reloading all metadata:");
         logger.info("=================================================================");
         MetadataManager.clearCache();
         MetadataManager.getInstance(config);
@@ -89,14 +88,11 @@ public abstract class CubeMetadataUpgrade {
 
     public abstract void upgradeCompatibleMeta();
 
-    public static void upgradeOrVerify(Class upgradeClass, String[] args, boolean firstStepInChain,
-            boolean lastStepInChain)
-            throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
+    public static void upgradeOrVerify(Class upgradeClass, String[] args, boolean firstStepInChain, boolean lastStepInChain) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
 
         if (!(args != null && (args.length == 1))) {
             System.out.println("Usage: java CubeMetadataUpgrade <metadata_export_folder>");
-            System.out.println(
-                    ", where metadata_export_folder is the folder containing your current metadata's dump (Upgrade program will not modify it directly, relax.");
+            System.out.println(", where metadata_export_folder is the folder containing your current metadata's dump (Upgrade program will not modify it directly, relax.");
             return;
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/upgrade/common/MetadataVersionRefresher.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/common/MetadataVersionRefresher.java b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/common/MetadataVersionRefresher.java
index f252123..16987c2 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/common/MetadataVersionRefresher.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/common/MetadataVersionRefresher.java
@@ -59,8 +59,7 @@ public class MetadataVersionRefresher {
         collectFiles(this.store, "/", all);
 
         for (String path : all) {
-            if (path.endsWith(MetadataConstants.FILE_SURFIX) && !(path.startsWith(ResourceStore.DICT_RESOURCE_ROOT)
-                    || path.startsWith(ResourceStore.SNAPSHOT_RESOURCE_ROOT))) {
+            if (path.endsWith(MetadataConstants.FILE_SURFIX) && !(path.startsWith(ResourceStore.DICT_RESOURCE_ROOT) || path.startsWith(ResourceStore.SNAPSHOT_RESOURCE_ROOT))) {
                 logger.info("Updating metadata version of path {}", path);
                 ObjectNode objectNode = (ObjectNode) mapper.readTree(this.store.getResource(path).inputStream);
                 objectNode.put("version", version);


[27/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
index fe3c55e..8085a70 100644
--- a/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
+++ b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
@@ -17,26 +17,6 @@
  */
 package org.apache.calcite.sql2rel;
 
-import static org.apache.calcite.sql.SqlUtil.stripAs;
-import static org.apache.calcite.util.Static.RESOURCE;
-
-import java.lang.reflect.Type;
-import java.math.BigDecimal;
-import java.util.AbstractList;
-import java.util.ArrayDeque;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Deque;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-
 import org.apache.calcite.avatica.util.Spaces;
 import org.apache.calcite.linq4j.Ord;
 import org.apache.calcite.plan.Convention;
@@ -68,6 +48,7 @@ import org.apache.calcite.rel.core.Project;
 import org.apache.calcite.rel.core.RelFactories;
 import org.apache.calcite.rel.core.Sample;
 import org.apache.calcite.rel.core.Sort;
+import org.apache.calcite.rel.core.TableScan;
 import org.apache.calcite.rel.core.Uncollect;
 import org.apache.calcite.rel.logical.LogicalAggregate;
 import org.apache.calcite.rel.logical.LogicalCorrelate;
@@ -185,7 +166,6 @@ import org.apache.calcite.util.NumberUtil;
 import org.apache.calcite.util.Pair;
 import org.apache.calcite.util.Util;
 import org.apache.calcite.util.trace.CalciteTrace;
-import org.slf4j.Logger;
 
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
@@ -197,6 +177,28 @@ import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
+import org.slf4j.Logger;
+
+import java.lang.reflect.Type;
+import java.math.BigDecimal;
+import java.util.AbstractList;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Deque;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+
+import static org.apache.calcite.sql.SqlUtil.stripAs;
+import static org.apache.calcite.util.Static.RESOURCE;
+
 /*
  * The code has synced with calcite. Hope one day, we could remove the hardcode override point.
  * OVERRIDE POINT:
@@ -217,7 +219,8 @@ import com.google.common.collect.Maps;
 public class SqlToRelConverter {
     //~ Static fields/initializers ---------------------------------------------
 
-    protected static final Logger SQL2REL_LOGGER = CalciteTrace.getSqlToRelTracer();
+    protected static final Logger SQL2REL_LOGGER =
+        CalciteTrace.getSqlToRelTracer();
 
     private static final BigDecimal TWO = BigDecimal.valueOf(2L);
 
@@ -227,7 +230,8 @@ public class SqlToRelConverter {
     public static final int DEFAULT_IN_SUB_QUERY_THRESHOLD = Integer.MAX_VALUE;
 
     @Deprecated // to be removed before 2.0
-    public static final int DEFAULT_IN_SUBQUERY_THRESHOLD = DEFAULT_IN_SUB_QUERY_THRESHOLD;
+    public static final int DEFAULT_IN_SUBQUERY_THRESHOLD =
+        DEFAULT_IN_SUB_QUERY_THRESHOLD;
 
     //~ Instance fields --------------------------------------------------------
 
@@ -247,7 +251,8 @@ public class SqlToRelConverter {
     /**
      * Fields used in name resolution for correlated sub-queries.
      */
-    private final Map<CorrelationId, DeferredLookup> mapCorrelToDeferred = new HashMap<>();
+    private final Map<CorrelationId, DeferredLookup> mapCorrelToDeferred =
+        new HashMap<>();
 
     /**
      * Stack of names of datasets requested by the <code>
@@ -260,7 +265,8 @@ public class SqlToRelConverter {
      * equivalent constants. Used to avoid re-evaluating the sub-query if it's
      * already been evaluated.
      */
-    private final Map<SqlNode, RexNode> mapConvertedNonCorrSubqs = new HashMap<>();
+    private final Map<SqlNode, RexNode> mapConvertedNonCorrSubqs =
+        new HashMap<>();
 
     public final RelOptTable.ViewExpander viewExpander;
 
@@ -276,25 +282,42 @@ public class SqlToRelConverter {
      * @param convertletTable Expression converter
      */
     @Deprecated // to be removed before 2.0
-    public SqlToRelConverter(RelOptTable.ViewExpander viewExpander, SqlValidator validator,
-            Prepare.CatalogReader catalogReader, RelOptPlanner planner, RexBuilder rexBuilder,
-            SqlRexConvertletTable convertletTable) {
-        this(viewExpander, validator, catalogReader, RelOptCluster.create(planner, rexBuilder), convertletTable,
-                Config.DEFAULT);
+    public SqlToRelConverter(
+        RelOptTable.ViewExpander viewExpander,
+        SqlValidator validator,
+        Prepare.CatalogReader catalogReader,
+        RelOptPlanner planner,
+        RexBuilder rexBuilder,
+        SqlRexConvertletTable convertletTable) {
+        this(viewExpander, validator, catalogReader,
+            RelOptCluster.create(planner, rexBuilder), convertletTable,
+            Config.DEFAULT);
     }
 
     @Deprecated // to be removed before 2.0
-    public SqlToRelConverter(RelOptTable.ViewExpander viewExpander, SqlValidator validator,
-            Prepare.CatalogReader catalogReader, RelOptCluster cluster, SqlRexConvertletTable convertletTable) {
-        this(viewExpander, validator, catalogReader, cluster, convertletTable, Config.DEFAULT);
+    public SqlToRelConverter(
+        RelOptTable.ViewExpander viewExpander,
+        SqlValidator validator,
+        Prepare.CatalogReader catalogReader,
+        RelOptCluster cluster,
+        SqlRexConvertletTable convertletTable) {
+        this(viewExpander, validator, catalogReader, cluster, convertletTable,
+            Config.DEFAULT);
     }
 
     /* Creates a converter. */
-    public SqlToRelConverter(RelOptTable.ViewExpander viewExpander, SqlValidator validator,
-            Prepare.CatalogReader catalogReader, RelOptCluster cluster, SqlRexConvertletTable convertletTable,
-            Config config) {
+    public SqlToRelConverter(
+        RelOptTable.ViewExpander viewExpander,
+        SqlValidator validator,
+        Prepare.CatalogReader catalogReader,
+        RelOptCluster cluster,
+        SqlRexConvertletTable convertletTable,
+        Config config) {
         this.viewExpander = viewExpander;
-        this.opTab = (validator == null) ? SqlStdOperatorTable.instance() : validator.getOperatorTable();
+        this.opTab =
+            (validator
+                == null) ? SqlStdOperatorTable.instance()
+                : validator.getOperatorTable();
         this.validator = validator;
         this.catalogReader = catalogReader;
         this.subQueryConverter = new NoOpSubQueryConverter();
@@ -376,7 +399,8 @@ public class SqlToRelConverter {
      *
      * @param alreadyConvertedNonCorrSubqs the other map
      */
-    public void addConvertedNonCorrSubqs(Map<SqlNode, RexNode> alreadyConvertedNonCorrSubqs) {
+    public void addConvertedNonCorrSubqs(
+        Map<SqlNode, RexNode> alreadyConvertedNonCorrSubqs) {
         mapConvertedNonCorrSubqs.putAll(alreadyConvertedNonCorrSubqs);
     }
 
@@ -410,25 +434,37 @@ public class SqlToRelConverter {
         // SQL statement is something like an INSERT which has no
         // validator type information associated with its result,
         // hence the namespace check above.)
-        final List<RelDataTypeField> validatedFields = validator.getValidatedNodeType(query).getFieldList();
-        final RelDataType validatedRowType = validator.getTypeFactory().createStructType(Pair.right(validatedFields),
-                SqlValidatorUtil.uniquify(Pair.left(validatedFields), catalogReader.nameMatcher().isCaseSensitive()));
-
-        final List<RelDataTypeField> convertedFields = result.getRowType().getFieldList().subList(0,
-                validatedFields.size());
-        final RelDataType convertedRowType = validator.getTypeFactory().createStructType(convertedFields);
-
-        if (!RelOptUtil.equal("validated row type", validatedRowType, "converted row type", convertedRowType,
-                Litmus.IGNORE)) {
-            throw new AssertionError("Conversion to relational algebra failed to " + "preserve datatypes:\n"
-                    + "validated type:\n" + validatedRowType.getFullTypeString() + "\nconverted type:\n"
-                    + convertedRowType.getFullTypeString() + "\nrel:\n" + RelOptUtil.toString(result));
+        final List<RelDataTypeField> validatedFields =
+            validator.getValidatedNodeType(query).getFieldList();
+        final RelDataType validatedRowType =
+            validator.getTypeFactory().createStructType(
+                Pair.right(validatedFields),
+                SqlValidatorUtil.uniquify(Pair.left(validatedFields),
+                    catalogReader.nameMatcher().isCaseSensitive()));
+
+        final List<RelDataTypeField> convertedFields =
+            result.getRowType().getFieldList().subList(0, validatedFields.size());
+        final RelDataType convertedRowType =
+            validator.getTypeFactory().createStructType(convertedFields);
+
+        if (!RelOptUtil.equal("validated row type", validatedRowType,
+            "converted row type", convertedRowType, Litmus.IGNORE)) {
+            throw new AssertionError("Conversion to relational algebra failed to "
+                + "preserve datatypes:\n"
+                + "validated type:\n"
+                + validatedRowType.getFullTypeString()
+                + "\nconverted type:\n"
+                + convertedRowType.getFullTypeString()
+                + "\nrel:\n"
+                + RelOptUtil.toString(result));
         }
     }
 
-    public RelNode flattenTypes(RelNode rootRel, boolean restructure) {
-        RelStructuredTypeFlattener typeFlattener = new RelStructuredTypeFlattener(rexBuilder, createToRelContext(),
-                restructure);
+    public RelNode flattenTypes(
+        RelNode rootRel,
+        boolean restructure) {
+        RelStructuredTypeFlattener typeFlattener =
+            new RelStructuredTypeFlattener(rexBuilder, createToRelContext(), restructure);
         return typeFlattener.rewrite(rootRel);
     }
 
@@ -474,15 +510,20 @@ public class SqlToRelConverter {
         // Trim fields that are not used by their consumer.
         if (isTrimUnusedFields()) {
             final RelFieldTrimmer trimmer = newFieldTrimmer();
-            final List<RelCollation> collations = rootRel.getTraitSet().getTraits(RelCollationTraitDef.INSTANCE);
+            final List<RelCollation> collations =
+                rootRel.getTraitSet().getTraits(RelCollationTraitDef.INSTANCE);
             rootRel = trimmer.trim(rootRel);
-            if (!ordered && collations != null && !collations.isEmpty()
-                    && !collations.equals(ImmutableList.of(RelCollations.EMPTY))) {
-                final RelTraitSet traitSet = rootRel.getTraitSet().replace(RelCollationTraitDef.INSTANCE, collations);
+            if (!ordered
+                && collations != null
+                && !collations.isEmpty()
+                && !collations.equals(ImmutableList.of(RelCollations.EMPTY))) {
+                final RelTraitSet traitSet = rootRel.getTraitSet()
+                    .replace(RelCollationTraitDef.INSTANCE, collations);
                 rootRel = rootRel.copy(traitSet, rootRel.getInputs());
             }
             if (SQL2REL_LOGGER.isDebugEnabled()) {
-                SQL2REL_LOGGER.debug(RelOptUtil.dumpPlan("Plan after trimming unused fields", rootRel,
+                SQL2REL_LOGGER.debug(
+                    RelOptUtil.dumpPlan("Plan after trimming unused fields", rootRel,
                         SqlExplainFormat.TEXT, SqlExplainLevel.EXPPLAN_ATTRIBUTES));
             }
         }
@@ -495,7 +536,8 @@ public class SqlToRelConverter {
      * @return Field trimmer
      */
     protected RelFieldTrimmer newFieldTrimmer() {
-        final RelBuilder relBuilder = RelFactories.LOGICAL_BUILDER.create(cluster, null);
+        final RelBuilder relBuilder =
+            RelFactories.LOGICAL_BUILDER.create(cluster, null);
         return new RelFieldTrimmer(validator, relBuilder);
     }
 
@@ -510,15 +552,19 @@ public class SqlToRelConverter {
      *                        will become a JDBC result set; <code>false</code> if
      *                        the query will be part of a view.
      */
-    public RelRoot convertQuery(SqlNode query, final boolean needsValidation, final boolean top) {
+    public RelRoot convertQuery(
+        SqlNode query,
+        final boolean needsValidation,
+        final boolean top) {
 
         SqlNode origQuery = query; /* OVERRIDE POINT */
-
+        
         if (needsValidation) {
             query = validator.validate(query);
         }
 
-        RelMetadataQuery.THREAD_PROVIDERS.set(JaninoRelMetadataProvider.of(cluster.getMetadataProvider()));
+        RelMetadataQuery.THREAD_PROVIDERS.set(
+            JaninoRelMetadataProvider.of(cluster.getMetadataProvider()));
         RelNode result = convertQueryRecursive(query, top, null).rel;
         if (top) {
             if (isStream(query)) {
@@ -534,15 +580,19 @@ public class SqlToRelConverter {
         checkConvertedType(query, result);
 
         if (SQL2REL_LOGGER.isDebugEnabled()) {
-            SQL2REL_LOGGER.debug(RelOptUtil.dumpPlan("Plan after converting SqlNode to RelNode", result,
-                    SqlExplainFormat.TEXT, SqlExplainLevel.EXPPLAN_ATTRIBUTES));
+            SQL2REL_LOGGER.debug(
+                RelOptUtil.dumpPlan("Plan after converting SqlNode to RelNode",
+                    result, SqlExplainFormat.TEXT,
+                    SqlExplainLevel.EXPPLAN_ATTRIBUTES));
         }
 
         final RelDataType validatedRowType = validator.getValidatedNodeType(query);
-        RelRoot origResult = RelRoot.of(result, validatedRowType, query.getKind()).withCollation(collation);
+        RelRoot origResult = RelRoot.of(result, validatedRowType, query.getKind())
+            .withCollation(collation);
         return hackSelectStar(origQuery, origResult);
     }
 
+
     /* OVERRIDE POINT */
     private RelRoot hackSelectStar(SqlNode query, RelRoot root) {
         //        /*
@@ -579,26 +629,26 @@ public class SqlToRelConverter {
         List<String> inFields = inType.getFieldNames();
         List<RexNode> projExp = new ArrayList<>();
         List<Pair<Integer, String>> projFields = new ArrayList<>();
-        Map<Integer, Integer> projFieldMapping = new HashMap<>();
+        Map<Integer,Integer> projFieldMapping = new HashMap<>();
         RelDataTypeFactory.FieldInfoBuilder projTypeBuilder = getCluster().getTypeFactory().builder();
         RelDataTypeFactory.FieldInfoBuilder validTypeBuilder = getCluster().getTypeFactory().builder();
-
+        
         boolean hiddenColumnExists = false;
         for (int i = 0; i < root.validatedRowType.getFieldList().size(); i++) {
-            if (root.validatedRowType.getFieldNames().get(i).startsWith("_KY_"))
+            if (root.validatedRowType.getFieldNames().get(i).startsWith("_KY_")) 
                 hiddenColumnExists = true;
         }
-        if (!hiddenColumnExists) {
+        if(!hiddenColumnExists) {
             return root;
         }
-
+        
         for (int i = 0; i < inFields.size(); i++) {
             if (!inFields.get(i).startsWith("_KY_")) {
                 projExp.add(rootPrj.getProjects().get(i));
                 projFieldMapping.put(i, projFields.size());
                 projFields.add(Pair.of(projFields.size(), inFields.get(i)));
                 projTypeBuilder.add(inType.getFieldList().get(i));
-
+                
                 if (i < root.validatedRowType.getFieldList().size()) //for cases like kylin-it/src/test/resources/query/sql_verifyCount/query10.sql
                     validTypeBuilder.add(root.validatedRowType.getFieldList().get(i));
             }
@@ -613,7 +663,7 @@ public class SqlToRelConverter {
             List<RelFieldCollation> fieldCollations = originalCollation.getFieldCollations();
             ImmutableList.Builder<RelFieldCollation> newFieldCollations = ImmutableList.builder();
             for (RelFieldCollation fieldCollation : fieldCollations) {
-                if (projFieldMapping.containsKey(fieldCollation.getFieldIndex())) {
+                if(projFieldMapping.containsKey(fieldCollation.getFieldIndex())) { 
                     newFieldCollations.add(fieldCollation.copy(projFieldMapping.get(fieldCollation.getFieldIndex())));
                 } else {
                     newFieldCollations.add(fieldCollation);
@@ -624,28 +674,30 @@ public class SqlToRelConverter {
         }
 
         RelDataType validRowType = getCluster().getTypeFactory().createStructType(validTypeBuilder);
-        root = new RelRoot(rootSort == null ? rootPrj : rootSort, validRowType, root.kind, projFields,
-                rootSort == null ? root.collation : rootSort.getCollation());
+        root = new RelRoot(rootSort == null ? rootPrj : rootSort, validRowType, root.kind, projFields, rootSort == null ? root.collation : rootSort.getCollation());
 
         validator.setValidatedNodeType(query, validRowType);
 
         return root;
     }
 
+
     private static boolean isStream(SqlNode query) {
-        return query instanceof SqlSelect && ((SqlSelect) query).isKeywordPresent(SqlSelectKeyword.STREAM);
+        return query instanceof SqlSelect
+            && ((SqlSelect) query).isKeywordPresent(SqlSelectKeyword.STREAM);
     }
 
     public static boolean isOrdered(SqlNode query) {
         switch (query.getKind()) {
-        case SELECT:
-            return ((SqlSelect) query).getOrderList() != null && ((SqlSelect) query).getOrderList().size() > 0;
-        case WITH:
-            return isOrdered(((SqlWith) query).body);
-        case ORDER_BY:
-            return ((SqlOrderBy) query).orderList.size() > 0;
-        default:
-            return false;
+            case SELECT:
+                return ((SqlSelect) query).getOrderList() != null
+                    && ((SqlSelect) query).getOrderList().size() > 0;
+            case WITH:
+                return isOrdered(((SqlWith) query).body);
+            case ORDER_BY:
+                return ((SqlOrderBy) query).orderList.size() > 0;
+            default:
+                return false;
         }
     }
 
@@ -675,7 +727,8 @@ public class SqlToRelConverter {
     /**
      * Factory method for creating translation workspace.
      */
-    protected Blackboard createBlackboard(SqlValidatorScope scope, Map<String, RexNode> nameToNodeMap, boolean top) {
+    protected Blackboard createBlackboard(SqlValidatorScope scope,
+        Map<String, RexNode> nameToNodeMap, boolean top) {
         return new Blackboard(scope, nameToNodeMap, top);
     }
 
@@ -683,25 +736,45 @@ public class SqlToRelConverter {
      * Implementation of {@link #convertSelect(SqlSelect, boolean)};
      * derived class may override.
      */
-    protected void convertSelectImpl(final Blackboard bb, SqlSelect select) {
-        convertFrom(bb, select.getFrom());
-        convertWhere(bb, select.getWhere());
+    protected void convertSelectImpl(
+        final Blackboard bb,
+        SqlSelect select) {
+        convertFrom(
+            bb,
+            select.getFrom());
+        convertWhere(
+            bb,
+            select.getWhere());
 
         final List<SqlNode> orderExprList = new ArrayList<>();
         final List<RelFieldCollation> collationList = new ArrayList<>();
-        gatherOrderExprs(bb, select, select.getOrderList(), orderExprList, collationList);
-        final RelCollation collation = cluster.traitSet().canonize(RelCollations.of(collationList));
+        gatherOrderExprs(
+            bb,
+            select,
+            select.getOrderList(),
+            orderExprList,
+            collationList);
+        final RelCollation collation =
+            cluster.traitSet().canonize(RelCollations.of(collationList));
 
         if (validator.isAggregate(select)) {
-            convertAgg(bb, select, orderExprList);
+            convertAgg(
+                bb,
+                select,
+                orderExprList);
         } else {
-            convertSelectList(bb, select, orderExprList);
+            convertSelectList(
+                bb,
+                select,
+                orderExprList);
         }
 
         if (select.isDistinct()) {
             distinctify(bb, true);
         }
-        convertOrder(select, bb, collation, orderExprList, select.getOffset(), select.getFetch());
+        convertOrder(
+            select, bb, collation, orderExprList, select.getOffset(),
+            select.getFetch());
         bb.setRoot(bb.root, true);
     }
 
@@ -717,7 +790,9 @@ public class SqlToRelConverter {
      * @param bb               Blackboard
      * @param checkForDupExprs Check for duplicate expressions
      */
-    private void distinctify(Blackboard bb, boolean checkForDupExprs) {
+    private void distinctify(
+        Blackboard bb,
+        boolean checkForDupExprs) {
         // Look for duplicate expressions in the project.
         // Say we have 'select x, y, x, z'.
         // Then dups will be {[2, 0]}
@@ -751,7 +826,9 @@ public class SqlToRelConverter {
                     newProjects.add(RexInputRef.of2(i, fields));
                 }
             }
-            rel = LogicalProject.create(rel, Pair.left(newProjects), Pair.right(newProjects));
+            rel =
+                LogicalProject.create(rel, Pair.left(newProjects),
+                    Pair.right(newProjects));
             bb.root = rel;
             distinctify(bb, false);
             rel = bb.root;
@@ -763,21 +840,33 @@ public class SqlToRelConverter {
                 final int origin = origins.get(i);
                 RelDataTypeField field = fields.get(i);
                 undoProjects.add(
-                        Pair.of((RexNode) new RexInputRef(squished.get(origin), field.getType()), field.getName()));
+                    Pair.of(
+                        (RexNode) new RexInputRef(
+                            squished.get(origin), field.getType()),
+                        field.getName()));
             }
 
-            rel = LogicalProject.create(rel, Pair.left(undoProjects), Pair.right(undoProjects));
-            bb.setRoot(rel, false);
+            rel =
+                LogicalProject.create(rel, Pair.left(undoProjects),
+                    Pair.right(undoProjects));
+            bb.setRoot(
+                rel,
+                false);
 
             return;
         }
 
         // Usual case: all of the expressions in the SELECT clause are
         // different.
-        final ImmutableBitSet groupSet = ImmutableBitSet.range(rel.getRowType().getFieldCount());
-        rel = createAggregate(bb, false, groupSet, ImmutableList.of(groupSet), ImmutableList.<AggregateCall> of());
-
-        bb.setRoot(rel, false);
+        final ImmutableBitSet groupSet =
+            ImmutableBitSet.range(rel.getRowType().getFieldCount());
+        rel =
+            createAggregate(bb, false, groupSet, ImmutableList.of(groupSet),
+                ImmutableList.<AggregateCall>of());
+
+        bb.setRoot(
+            rel,
+            false);
     }
 
     private int findExpr(RexNode seek, List<RexNode> exprs, int count) {
@@ -802,18 +891,29 @@ public class SqlToRelConverter {
      *                      returning first row
      * @param fetch         Expression for number of rows to fetch
      */
-    protected void convertOrder(SqlSelect select, Blackboard bb, RelCollation collation, List<SqlNode> orderExprList,
-            SqlNode offset, SqlNode fetch) {
-        if (select.getOrderList() == null || select.getOrderList().getList().isEmpty()) {
+    protected void convertOrder(
+        SqlSelect select,
+        Blackboard bb,
+        RelCollation collation,
+        List<SqlNode> orderExprList,
+        SqlNode offset,
+        SqlNode fetch) {
+        if (select.getOrderList() == null
+            || select.getOrderList().getList().isEmpty()) {
             assert collation.getFieldCollations().isEmpty();
-            if ((offset == null || ((SqlLiteral) offset).bigDecimalValue().equals(BigDecimal.ZERO)) && fetch == null) {
+            if ((offset == null
+                || ((SqlLiteral) offset).bigDecimalValue().equals(BigDecimal.ZERO))
+                && fetch == null) {
                 return;
             }
         }
 
         // Create a sorter using the previously constructed collations.
-        bb.setRoot(LogicalSort.create(bb.root, collation, offset == null ? null : convertExpression(offset),
-                fetch == null ? null : convertExpression(fetch)), false);
+        bb.setRoot(
+            LogicalSort.create(bb.root, collation,
+                offset == null ? null : convertExpression(offset),
+                fetch == null ? null : convertExpression(fetch)),
+            false);
 
         // If extra expressions were added to the project list for sorting,
         // add another project to remove them. But make the collation empty, because
@@ -823,11 +923,15 @@ public class SqlToRelConverter {
         if (orderExprList.size() > 0 && !bb.top) {
             final List<RexNode> exprs = new ArrayList<>();
             final RelDataType rowType = bb.root.getRowType();
-            final int fieldCount = rowType.getFieldCount() - orderExprList.size();
+            final int fieldCount =
+                rowType.getFieldCount() - orderExprList.size();
             for (int i = 0; i < fieldCount; i++) {
                 exprs.add(rexBuilder.makeInputRef(bb.root, i));
             }
-            bb.setRoot(LogicalProject.create(bb.root, exprs, rowType.getFieldNames().subList(0, fieldCount)), false);
+            bb.setRoot(
+                LogicalProject.create(bb.root, exprs,
+                    rowType.getFieldNames().subList(0, fieldCount)),
+                false);
         }
     }
 
@@ -836,16 +940,18 @@ public class SqlToRelConverter {
      *
      * @param node a RexNode tree
      */
-    private static boolean containsInOperator(SqlNode node) {
+    private static boolean containsInOperator(
+        SqlNode node) {
         try {
-            SqlVisitor<Void> visitor = new SqlBasicVisitor<Void>() {
-                public Void visit(SqlCall call) {
-                    if (call.getOperator() instanceof SqlInOperator) {
-                        throw new Util.FoundOne(call);
+            SqlVisitor<Void> visitor =
+                new SqlBasicVisitor<Void>() {
+                    public Void visit(SqlCall call) {
+                        if (call.getOperator() instanceof SqlInOperator) {
+                            throw new Util.FoundOne(call);
+                        }
+                        return super.visit(call);
                     }
-                    return super.visit(call);
-                }
-            };
+                };
             node.accept(visitor);
             return false;
         } catch (Util.FoundOne e) {
@@ -861,11 +967,12 @@ public class SqlToRelConverter {
      * @param sqlNode the root node from which to look for NOT operators
      * @return the transformed SqlNode representation with NOT pushed down.
      */
-    private static SqlNode pushDownNotForIn(SqlValidatorScope scope, SqlNode sqlNode) {
+    private static SqlNode pushDownNotForIn(SqlValidatorScope scope,
+        SqlNode sqlNode) {
         if ((sqlNode instanceof SqlCall) && containsInOperator(sqlNode)) {
             SqlCall sqlCall = (SqlCall) sqlNode;
             if ((sqlCall.getOperator() == SqlStdOperatorTable.AND)
-                    || (sqlCall.getOperator() == SqlStdOperatorTable.OR)) {
+                || (sqlCall.getOperator() == SqlStdOperatorTable.OR)) {
                 SqlNode[] sqlOperands = ((SqlBasicCall) sqlCall).operands;
                 for (int i = 0; i < sqlOperands.length; i++) {
                     sqlOperands[i] = pushDownNotForIn(scope, sqlOperands[i]);
@@ -880,38 +987,45 @@ public class SqlToRelConverter {
                     SqlNode[] orOperands = new SqlNode[andOperands.length];
                     for (int i = 0; i < orOperands.length; i++) {
                         orOperands[i] = reg(scope,
-                                SqlStdOperatorTable.NOT.createCall(SqlParserPos.ZERO, andOperands[i]));
+                            SqlStdOperatorTable.NOT.createCall(SqlParserPos.ZERO,
+                                andOperands[i]));
                     }
                     for (int i = 0; i < orOperands.length; i++) {
                         orOperands[i] = pushDownNotForIn(scope, orOperands[i]);
                     }
                     return reg(scope,
-                            SqlStdOperatorTable.OR.createCall(SqlParserPos.ZERO, orOperands[0], orOperands[1]));
+                        SqlStdOperatorTable.OR.createCall(SqlParserPos.ZERO,
+                            orOperands[0], orOperands[1]));
                 } else if (childSqlCall.getOperator() == SqlStdOperatorTable.OR) {
                     SqlNode[] orOperands = childSqlCall.getOperands();
                     SqlNode[] andOperands = new SqlNode[orOperands.length];
                     for (int i = 0; i < andOperands.length; i++) {
                         andOperands[i] = reg(scope,
-                                SqlStdOperatorTable.NOT.createCall(SqlParserPos.ZERO, orOperands[i]));
+                            SqlStdOperatorTable.NOT.createCall(SqlParserPos.ZERO,
+                                orOperands[i]));
                     }
                     for (int i = 0; i < andOperands.length; i++) {
                         andOperands[i] = pushDownNotForIn(scope, andOperands[i]);
                     }
                     return reg(scope,
-                            SqlStdOperatorTable.AND.createCall(SqlParserPos.ZERO, andOperands[0], andOperands[1]));
+                        SqlStdOperatorTable.AND.createCall(SqlParserPos.ZERO,
+                            andOperands[0], andOperands[1]));
                 } else if (childSqlCall.getOperator() == SqlStdOperatorTable.NOT) {
                     SqlNode[] notOperands = childSqlCall.getOperands();
                     assert notOperands.length == 1;
                     return pushDownNotForIn(scope, notOperands[0]);
                 } else if (childSqlCall.getOperator() instanceof SqlInOperator) {
                     SqlNode[] inOperands = childSqlCall.getOperands();
-                    SqlInOperator inOp = (SqlInOperator) childSqlCall.getOperator();
+                    SqlInOperator inOp =
+                        (SqlInOperator) childSqlCall.getOperator();
                     if (inOp.isNotIn()) {
                         return reg(scope,
-                                SqlStdOperatorTable.IN.createCall(SqlParserPos.ZERO, inOperands[0], inOperands[1]));
+                            SqlStdOperatorTable.IN.createCall(SqlParserPos.ZERO,
+                                inOperands[0], inOperands[1]));
                     } else {
                         return reg(scope,
-                                SqlStdOperatorTable.NOT_IN.createCall(SqlParserPos.ZERO, inOperands[0], inOperands[1]));
+                            SqlStdOperatorTable.NOT_IN.createCall(SqlParserPos.ZERO,
+                                inOperands[0], inOperands[1]));
                     }
                 } else {
                     // childSqlCall is "leaf" node in a logical expression tree
@@ -942,7 +1056,9 @@ public class SqlToRelConverter {
      * @param bb    Blackboard
      * @param where WHERE clause, may be null
      */
-    private void convertWhere(final Blackboard bb, final SqlNode where) {
+    private void convertWhere(
+        final Blackboard bb,
+        final SqlNode where) {
         if (where == null) {
             return;
         }
@@ -955,14 +1071,16 @@ public class SqlToRelConverter {
             return;
         }
 
-        final RelFactories.FilterFactory factory = RelFactories.DEFAULT_FILTER_FACTORY;
+        final RelFactories.FilterFactory factory =
+            RelFactories.DEFAULT_FILTER_FACTORY;
         final RelNode filter = factory.createFilter(bb.root, convertedWhere);
         final RelNode r;
         final CorrelationUse p = getCorrelationUse(bb, filter);
         if (p != null) {
             assert p.r instanceof Filter;
             Filter f = (Filter) p.r;
-            r = LogicalFilter.create(f.getInput(), f.getCondition(), ImmutableSet.of(p.id));
+            r = LogicalFilter.create(f.getInput(), f.getCondition(),
+                ImmutableSet.of(p.id));
         } else {
             r = filter;
         }
@@ -970,7 +1088,10 @@ public class SqlToRelConverter {
         bb.setRoot(r, false);
     }
 
-    private void replaceSubQueries(final Blackboard bb, final SqlNode expr, RelOptUtil.Logic logic) {
+    private void replaceSubQueries(
+        final Blackboard bb,
+        final SqlNode expr,
+        RelOptUtil.Logic logic) {
         findSubQueries(bb, expr, logic, false);
         for (SubQuery node : bb.subQueryList) {
             substituteSubQuery(bb, node);
@@ -989,234 +1110,269 @@ public class SqlToRelConverter {
         final SqlNode query;
         final RelOptUtil.Exists converted;
         switch (subQuery.node.getKind()) {
-        case CURSOR:
-            convertCursor(bb, subQuery);
-            return;
-
-        case MULTISET_QUERY_CONSTRUCTOR:
-        case MULTISET_VALUE_CONSTRUCTOR:
-        case ARRAY_QUERY_CONSTRUCTOR:
-            rel = convertMultisets(ImmutableList.of(subQuery.node), bb);
-            subQuery.expr = bb.register(rel, JoinRelType.INNER);
-            return;
+            case CURSOR:
+                convertCursor(bb, subQuery);
+                return;
 
-        case IN:
-            call = (SqlBasicCall) subQuery.node;
-            query = call.operand(1);
-            if (!config.isExpand() && !(query instanceof SqlNodeList)) {
+            case MULTISET_QUERY_CONSTRUCTOR:
+            case MULTISET_VALUE_CONSTRUCTOR:
+            case ARRAY_QUERY_CONSTRUCTOR:
+                rel = convertMultisets(ImmutableList.of(subQuery.node), bb);
+                subQuery.expr = bb.register(rel, JoinRelType.INNER);
                 return;
-            }
-            final SqlNode leftKeyNode = call.operand(0);
-
-            final List<RexNode> leftKeys;
-            switch (leftKeyNode.getKind()) {
-            case ROW:
-                leftKeys = Lists.newArrayList();
-                for (SqlNode sqlExpr : ((SqlBasicCall) leftKeyNode).getOperandList()) {
-                    leftKeys.add(bb.convertExpression(sqlExpr));
-                }
-                break;
-            default:
-                leftKeys = ImmutableList.of(bb.convertExpression(leftKeyNode));
-            }
 
-            final boolean notIn = ((SqlInOperator) call.getOperator()).isNotIn();
-            if (query instanceof SqlNodeList) {
-                SqlNodeList valueList = (SqlNodeList) query;
-                if (!containsNullLiteral(valueList) && valueList.size() < config.getInSubQueryThreshold()) {
-                    // We're under the threshold, so convert to OR.
-                    subQuery.expr = convertInToOr(bb, leftKeys, valueList, notIn);
+            case IN:
+                call = (SqlBasicCall) subQuery.node;
+                query = call.operand(1);
+                if (!config.isExpand() && !(query instanceof SqlNodeList)) {
                     return;
                 }
+                final SqlNode leftKeyNode = call.operand(0);
+
+                final List<RexNode> leftKeys;
+                switch (leftKeyNode.getKind()) {
+                    case ROW:
+                        leftKeys = Lists.newArrayList();
+                        for (SqlNode sqlExpr : ((SqlBasicCall) leftKeyNode).getOperandList()) {
+                            leftKeys.add(bb.convertExpression(sqlExpr));
+                        }
+                        break;
+                    default:
+                        leftKeys = ImmutableList.of(bb.convertExpression(leftKeyNode));
+                }
 
-                // Otherwise, let convertExists translate
-                // values list into an inline table for the
-                // reference to Q below.
-            }
-
-            // Project out the search columns from the left side
-
-            // Q1:
-            // "select from emp where emp.deptno in (select col1 from T)"
-            //
-            // is converted to
-            //
-            // "select from
-            //   emp inner join (select distinct col1 from T)) q
-            //   on emp.deptno = q.col1
-            //
-            // Q2:
-            // "select from emp where emp.deptno not in (Q)"
-            //
-            // is converted to
-            //
-            // "select from
-            //   emp left outer join (select distinct col1, TRUE from T) q
-            //   on emp.deptno = q.col1
-            //   where emp.deptno <> null
-            //         and q.indicator <> TRUE"
-            //
-            final RelDataType targetRowType = SqlTypeUtil.promoteToRowType(typeFactory,
-                    validator.getValidatedNodeType(leftKeyNode), null);
-            converted = convertExists(query, RelOptUtil.SubQueryType.IN, subQuery.logic, notIn, targetRowType);
-            if (converted.indicator) {
-                // Generate
-                //    emp CROSS JOIN (SELECT COUNT(*) AS c,
-                //                       COUNT(deptno) AS ck FROM dept)
-                final RelDataType longType = typeFactory.createSqlType(SqlTypeName.BIGINT);
-                final RelNode seek = converted.r.getInput(0); // fragile
-                final int keyCount = leftKeys.size();
-                final List<Integer> args = ImmutableIntList.range(0, keyCount);
-                LogicalAggregate aggregate = LogicalAggregate.create(seek, false, ImmutableBitSet.of(), null,
-                        ImmutableList.of(
-                                AggregateCall.create(SqlStdOperatorTable.COUNT, false, ImmutableList.<Integer> of(), -1,
-                                        longType, null),
-                                AggregateCall.create(SqlStdOperatorTable.COUNT, false, args, -1, longType, null)));
-                LogicalJoin join = LogicalJoin.create(bb.root, aggregate, rexBuilder.makeLiteral(true),
-                        ImmutableSet.<CorrelationId> of(), JoinRelType.INNER);
-                bb.setRoot(join, false);
-            }
-            final RexNode rex = bb.register(converted.r, converted.outerJoin ? JoinRelType.LEFT : JoinRelType.INNER,
-                    leftKeys);
+                final boolean notIn = ((SqlInOperator) call.getOperator()).isNotIn();
+                if (query instanceof SqlNodeList) {
+                    SqlNodeList valueList = (SqlNodeList) query;
+                    if (!containsNullLiteral(valueList)
+                        && valueList.size() < config.getInSubQueryThreshold()) {
+                        // We're under the threshold, so convert to OR.
+                        subQuery.expr =
+                            convertInToOr(
+                                bb,
+                                leftKeys,
+                                valueList,
+                                notIn);
+                        return;
+                    }
 
-            RelOptUtil.Logic logic = subQuery.logic;
-            switch (logic) {
-            case TRUE_FALSE_UNKNOWN:
-            case UNKNOWN_AS_TRUE:
-                if (!converted.indicator) {
-                    logic = RelOptUtil.Logic.TRUE_FALSE;
+                    // Otherwise, let convertExists translate
+                    // values list into an inline table for the
+                    // reference to Q below.
                 }
-            }
-            subQuery.expr = translateIn(logic, bb.root, rex);
-            if (notIn) {
-                subQuery.expr = rexBuilder.makeCall(SqlStdOperatorTable.NOT, subQuery.expr);
-            }
-            return;
 
-        case EXISTS:
-            // "select from emp where exists (select a from T)"
-            //
-            // is converted to the following if the sub-query is correlated:
-            //
-            // "select from emp left outer join (select AGG_TRUE() as indicator
-            // from T group by corr_var) q where q.indicator is true"
-            //
-            // If there is no correlation, the expression is replaced with a
-            // boolean indicating whether the sub-query returned 0 or >= 1 row.
-            call = (SqlBasicCall) subQuery.node;
-            query = call.operand(0);
-            if (!config.isExpand()) {
-                return;
-            }
-            converted = convertExists(query, RelOptUtil.SubQueryType.EXISTS, subQuery.logic, true, null);
-            assert !converted.indicator;
-            if (convertNonCorrelatedSubQuery(subQuery, bb, converted.r, true)) {
+                // Project out the search columns from the left side
+
+                // Q1:
+                // "select from emp where emp.deptno in (select col1 from T)"
+                //
+                // is converted to
+                //
+                // "select from
+                //   emp inner join (select distinct col1 from T)) q
+                //   on emp.deptno = q.col1
+                //
+                // Q2:
+                // "select from emp where emp.deptno not in (Q)"
+                //
+                // is converted to
+                //
+                // "select from
+                //   emp left outer join (select distinct col1, TRUE from T) q
+                //   on emp.deptno = q.col1
+                //   where emp.deptno <> null
+                //         and q.indicator <> TRUE"
+                //
+                final RelDataType targetRowType =
+                    SqlTypeUtil.promoteToRowType(typeFactory,
+                        validator.getValidatedNodeType(leftKeyNode), null);
+                converted =
+                    convertExists(query, RelOptUtil.SubQueryType.IN, subQuery.logic,
+                        notIn, targetRowType);
+                if (converted.indicator) {
+                    // Generate
+                    //    emp CROSS JOIN (SELECT COUNT(*) AS c,
+                    //                       COUNT(deptno) AS ck FROM dept)
+                    final RelDataType longType =
+                        typeFactory.createSqlType(SqlTypeName.BIGINT);
+                    final RelNode seek = converted.r.getInput(0); // fragile
+                    final int keyCount = leftKeys.size();
+                    final List<Integer> args = ImmutableIntList.range(0, keyCount);
+                    LogicalAggregate aggregate =
+                        LogicalAggregate.create(seek, false, ImmutableBitSet.of(), null,
+                            ImmutableList.of(
+                                AggregateCall.create(SqlStdOperatorTable.COUNT, false,
+                                    ImmutableList.<Integer>of(), -1, longType, null),
+                                AggregateCall.create(SqlStdOperatorTable.COUNT, false,
+                                    args, -1, longType, null)));
+                    LogicalJoin join =
+                        LogicalJoin.create(bb.root, aggregate, rexBuilder.makeLiteral(true),
+                            ImmutableSet.<CorrelationId>of(), JoinRelType.INNER);
+                    bb.setRoot(join, false);
+                }
+                final RexNode rex =
+                    bb.register(converted.r,
+                        converted.outerJoin ? JoinRelType.LEFT : JoinRelType.INNER,
+                        leftKeys);
+
+                RelOptUtil.Logic logic = subQuery.logic;
+                switch (logic) {
+                    case TRUE_FALSE_UNKNOWN:
+                    case UNKNOWN_AS_TRUE:
+                        if (!converted.indicator) {
+                            logic = RelOptUtil.Logic.TRUE_FALSE;
+                        }
+                }
+                subQuery.expr = translateIn(logic, bb.root, rex);
+                if (notIn) {
+                    subQuery.expr =
+                        rexBuilder.makeCall(SqlStdOperatorTable.NOT, subQuery.expr);
+                }
                 return;
-            }
-            subQuery.expr = bb.register(converted.r, JoinRelType.LEFT);
-            return;
 
-        case SCALAR_QUERY:
-            // Convert the sub-query.  If it's non-correlated, convert it
-            // to a constant expression.
-            if (!config.isExpand()) {
+            case EXISTS:
+                // "select from emp where exists (select a from T)"
+                //
+                // is converted to the following if the sub-query is correlated:
+                //
+                // "select from emp left outer join (select AGG_TRUE() as indicator
+                // from T group by corr_var) q where q.indicator is true"
+                //
+                // If there is no correlation, the expression is replaced with a
+                // boolean indicating whether the sub-query returned 0 or >= 1 row.
+                call = (SqlBasicCall) subQuery.node;
+                query = call.operand(0);
+                if (!config.isExpand()) {
+                    return;
+                }
+                converted = convertExists(query, RelOptUtil.SubQueryType.EXISTS,
+                    subQuery.logic, true, null);
+                assert !converted.indicator;
+                if (convertNonCorrelatedSubQuery(subQuery, bb, converted.r, true)) {
+                    return;
+                }
+                subQuery.expr = bb.register(converted.r, JoinRelType.LEFT);
                 return;
-            }
-            call = (SqlBasicCall) subQuery.node;
-            query = call.operand(0);
-            converted = convertExists(query, RelOptUtil.SubQueryType.SCALAR, subQuery.logic, true, null);
-            assert !converted.indicator;
-            if (convertNonCorrelatedSubQuery(subQuery, bb, converted.r, false)) {
+
+            case SCALAR_QUERY:
+                // Convert the sub-query.  If it's non-correlated, convert it
+                // to a constant expression.
+                if (!config.isExpand()) {
+                    return;
+                }
+                call = (SqlBasicCall) subQuery.node;
+                query = call.operand(0);
+                converted = convertExists(query, RelOptUtil.SubQueryType.SCALAR,
+                    subQuery.logic, true, null);
+                assert !converted.indicator;
+                if (convertNonCorrelatedSubQuery(subQuery, bb, converted.r, false)) {
+                    return;
+                }
+                rel = convertToSingleValueSubq(query, converted.r);
+                subQuery.expr = bb.register(rel, JoinRelType.LEFT);
                 return;
-            }
-            rel = convertToSingleValueSubq(query, converted.r);
-            subQuery.expr = bb.register(rel, JoinRelType.LEFT);
-            return;
 
-        case SELECT:
-            // This is used when converting multiset queries:
-            //
-            // select * from unnest(select multiset[deptno] from emps);
-            //
-            converted = convertExists(subQuery.node, RelOptUtil.SubQueryType.SCALAR, subQuery.logic, true, null);
-            assert !converted.indicator;
-            subQuery.expr = bb.register(converted.r, JoinRelType.LEFT);
-            return;
+            case SELECT:
+                // This is used when converting multiset queries:
+                //
+                // select * from unnest(select multiset[deptno] from emps);
+                //
+                converted = convertExists(subQuery.node, RelOptUtil.SubQueryType.SCALAR,
+                    subQuery.logic, true, null);
+                assert !converted.indicator;
+                subQuery.expr = bb.register(converted.r, JoinRelType.LEFT);
+                return;
 
-        default:
-            throw new AssertionError("unexpected kind of sub-query: " + subQuery.node);
+            default:
+                throw new AssertionError("unexpected kind of sub-query: "
+                    + subQuery.node);
         }
     }
 
-    private RexNode translateIn(RelOptUtil.Logic logic, RelNode root, final RexNode rex) {
+    private RexNode translateIn(RelOptUtil.Logic logic, RelNode root,
+        final RexNode rex) {
         switch (logic) {
-        case TRUE:
-            return rexBuilder.makeLiteral(true);
+            case TRUE:
+                return rexBuilder.makeLiteral(true);
 
-        case TRUE_FALSE:
-        case UNKNOWN_AS_FALSE:
-            assert rex instanceof RexRangeRef;
-            final int fieldCount = rex.getType().getFieldCount();
-            RexNode rexNode = rexBuilder.makeFieldAccess(rex, fieldCount - 1);
-            rexNode = rexBuilder.makeCall(SqlStdOperatorTable.IS_TRUE, rexNode);
-
-            // Then append the IS NOT NULL(leftKeysForIn).
-            //
-            // RexRangeRef contains the following fields:
-            //   leftKeysForIn,
-            //   rightKeysForIn (the original sub-query select list),
-            //   nullIndicator
-            //
-            // The first two lists contain the same number of fields.
-            final int k = (fieldCount - 1) / 2;
-            for (int i = 0; i < k; i++) {
-                rexNode = rexBuilder.makeCall(SqlStdOperatorTable.AND, rexNode,
-                        rexBuilder.makeCall(SqlStdOperatorTable.IS_NOT_NULL, rexBuilder.makeFieldAccess(rex, i)));
-            }
-            return rexNode;
-
-        case TRUE_FALSE_UNKNOWN:
-        case UNKNOWN_AS_TRUE:
-            // select e.deptno,
-            //   case
-            //   when ct.c = 0 then false
-            //   when dt.i is not null then true
-            //   when e.deptno is null then null
-            //   when ct.ck < ct.c then null
-            //   else false
-            //   end
-            // from e
-            // cross join (select count(*) as c, count(deptno) as ck from v) as ct
-            // left join (select distinct deptno, true as i from v) as dt
-            //   on e.deptno = dt.deptno
-            final Join join = (Join) root;
-            final Project left = (Project) join.getLeft();
-            final RelNode leftLeft = ((Join) left.getInput()).getLeft();
-            final int leftLeftCount = leftLeft.getRowType().getFieldCount();
-            final RelDataType longType = typeFactory.createSqlType(SqlTypeName.BIGINT);
-            final RexNode cRef = rexBuilder.makeInputRef(root, leftLeftCount);
-            final RexNode ckRef = rexBuilder.makeInputRef(root, leftLeftCount + 1);
-            final RexNode iRef = rexBuilder.makeInputRef(root, root.getRowType().getFieldCount() - 1);
-
-            final RexLiteral zero = rexBuilder.makeExactLiteral(BigDecimal.ZERO, longType);
-            final RexLiteral trueLiteral = rexBuilder.makeLiteral(true);
-            final RexLiteral falseLiteral = rexBuilder.makeLiteral(false);
-            final RexNode unknownLiteral = rexBuilder.makeNullLiteral(trueLiteral.getType());
-
-            final ImmutableList.Builder<RexNode> args = ImmutableList.builder();
-            args.add(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, cRef, zero), falseLiteral,
-                    rexBuilder.makeCall(SqlStdOperatorTable.IS_NOT_NULL, iRef), trueLiteral);
-            final JoinInfo joinInfo = join.analyzeCondition();
-            for (int leftKey : joinInfo.leftKeys) {
-                final RexNode kRef = rexBuilder.makeInputRef(root, leftKey);
-                args.add(rexBuilder.makeCall(SqlStdOperatorTable.IS_NULL, kRef), unknownLiteral);
-            }
-            args.add(rexBuilder.makeCall(SqlStdOperatorTable.LESS_THAN, ckRef, cRef), unknownLiteral, falseLiteral);
+            case TRUE_FALSE:
+            case UNKNOWN_AS_FALSE:
+                assert rex instanceof RexRangeRef;
+                final int fieldCount = rex.getType().getFieldCount();
+                RexNode rexNode = rexBuilder.makeFieldAccess(rex, fieldCount - 1);
+                rexNode = rexBuilder.makeCall(SqlStdOperatorTable.IS_TRUE, rexNode);
+
+                // Then append the IS NOT NULL(leftKeysForIn).
+                //
+                // RexRangeRef contains the following fields:
+                //   leftKeysForIn,
+                //   rightKeysForIn (the original sub-query select list),
+                //   nullIndicator
+                //
+                // The first two lists contain the same number of fields.
+                final int k = (fieldCount - 1) / 2;
+                for (int i = 0; i < k; i++) {
+                    rexNode =
+                        rexBuilder.makeCall(
+                            SqlStdOperatorTable.AND,
+                            rexNode,
+                            rexBuilder.makeCall(
+                                SqlStdOperatorTable.IS_NOT_NULL,
+                                rexBuilder.makeFieldAccess(rex, i)));
+                }
+                return rexNode;
 
-            return rexBuilder.makeCall(SqlStdOperatorTable.CASE, args.build());
+            case TRUE_FALSE_UNKNOWN:
+            case UNKNOWN_AS_TRUE:
+                // select e.deptno,
+                //   case
+                //   when ct.c = 0 then false
+                //   when dt.i is not null then true
+                //   when e.deptno is null then null
+                //   when ct.ck < ct.c then null
+                //   else false
+                //   end
+                // from e
+                // cross join (select count(*) as c, count(deptno) as ck from v) as ct
+                // left join (select distinct deptno, true as i from v) as dt
+                //   on e.deptno = dt.deptno
+                final Join join = (Join) root;
+                final Project left = (Project) join.getLeft();
+                final RelNode leftLeft = ((Join) left.getInput()).getLeft();
+                final int leftLeftCount = leftLeft.getRowType().getFieldCount();
+                final RelDataType longType =
+                    typeFactory.createSqlType(SqlTypeName.BIGINT);
+                final RexNode cRef = rexBuilder.makeInputRef(root, leftLeftCount);
+                final RexNode ckRef = rexBuilder.makeInputRef(root, leftLeftCount + 1);
+                final RexNode iRef =
+                    rexBuilder.makeInputRef(root, root.getRowType().getFieldCount() - 1);
+
+                final RexLiteral zero =
+                    rexBuilder.makeExactLiteral(BigDecimal.ZERO, longType);
+                final RexLiteral trueLiteral = rexBuilder.makeLiteral(true);
+                final RexLiteral falseLiteral = rexBuilder.makeLiteral(false);
+                final RexNode unknownLiteral =
+                    rexBuilder.makeNullLiteral(trueLiteral.getType());
+
+                final ImmutableList.Builder<RexNode> args = ImmutableList.builder();
+                args.add(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, cRef, zero),
+                    falseLiteral,
+                    rexBuilder.makeCall(SqlStdOperatorTable.IS_NOT_NULL, iRef),
+                    trueLiteral);
+                final JoinInfo joinInfo = join.analyzeCondition();
+                for (int leftKey : joinInfo.leftKeys) {
+                    final RexNode kRef = rexBuilder.makeInputRef(root, leftKey);
+                    args.add(rexBuilder.makeCall(SqlStdOperatorTable.IS_NULL, kRef),
+                        unknownLiteral);
+                }
+                args.add(rexBuilder.makeCall(SqlStdOperatorTable.LESS_THAN, ckRef, cRef),
+                    unknownLiteral,
+                    falseLiteral);
 
-        default:
-            throw new AssertionError(logic);
+                return rexBuilder.makeCall(SqlStdOperatorTable.CASE, args.build());
+
+            default:
+                throw new AssertionError(logic);
         }
     }
 
@@ -1242,16 +1398,25 @@ public class SqlToRelConverter {
      * @param isExists  true if the sub-query is part of an EXISTS expression
      * @return Whether the sub-query can be converted to a constant
      */
-    private boolean convertNonCorrelatedSubQuery(SubQuery subQuery, Blackboard bb, RelNode converted,
-            boolean isExists) {
+    private boolean convertNonCorrelatedSubQuery(
+        SubQuery subQuery,
+        Blackboard bb,
+        RelNode converted,
+        boolean isExists) {
         SqlCall call = (SqlBasicCall) subQuery.node;
-        if (subQueryConverter.canConvertSubQuery() && isSubQueryNonCorrelated(converted, bb)) {
+        if (subQueryConverter.canConvertSubQuery()
+            && isSubQueryNonCorrelated(converted, bb)) {
             // First check if the sub-query has already been converted
             // because it's a nested sub-query.  If so, don't re-evaluate
             // it again.
             RexNode constExpr = mapConvertedNonCorrSubqs.get(call);
             if (constExpr == null) {
-                constExpr = subQueryConverter.convertSubQuery(call, this, isExists, config.isExplain());
+                constExpr =
+                    subQueryConverter.convertSubQuery(
+                        call,
+                        this,
+                        isExists,
+                        config.isExplain());
             }
             if (constExpr != null) {
                 subQuery.expr = constExpr;
@@ -1270,14 +1435,17 @@ public class SqlToRelConverter {
      * @param plan   the original RelNode tree corresponding to the statement
      * @return the converted RelNode tree
      */
-    public RelNode convertToSingleValueSubq(SqlNode query, RelNode plan) {
+    public RelNode convertToSingleValueSubq(
+        SqlNode query,
+        RelNode plan) {
         // Check whether query is guaranteed to produce a single value.
         if (query instanceof SqlSelect) {
             SqlSelect select = (SqlSelect) query;
             SqlNodeList selectList = select.getSelectList();
             SqlNodeList groupList = select.getGroup();
 
-            if ((selectList.size() == 1) && ((groupList == null) || (groupList.size() == 0))) {
+            if ((selectList.size() == 1)
+                && ((groupList == null) || (groupList.size() == 0))) {
                 SqlNode selectExpr = selectList.get(0);
                 if (selectExpr instanceof SqlCall) {
                     SqlCall selectExprCall = (SqlCall) selectExpr;
@@ -1288,7 +1456,8 @@ public class SqlToRelConverter {
 
                 // If there is a limit with 0 or 1,
                 // it is ensured to produce a single value
-                if (select.getFetch() != null && select.getFetch() instanceof SqlNumericLiteral) {
+                if (select.getFetch() != null
+                    && select.getFetch() instanceof SqlNumericLiteral) {
                     SqlNumericLiteral limitNum = (SqlNumericLiteral) select.getFetch();
                     if (((BigDecimal) limitNum.getValue()).intValue() < 2) {
                         return plan;
@@ -1300,13 +1469,17 @@ public class SqlToRelConverter {
             // it is necessary to look into the operands to determine
             // whether SingleValueAgg is necessary
             SqlCall exprCall = (SqlCall) query;
-            if (exprCall.getOperator() instanceof SqlValuesOperator && Util.isSingleValue(exprCall)) {
+            if (exprCall.getOperator()
+                instanceof SqlValuesOperator
+                && Util.isSingleValue(exprCall)) {
                 return plan;
             }
         }
 
         // If not, project SingleValueAgg
-        return RelOptUtil.createSingleValueAggRel(cluster, plan);
+        return RelOptUtil.createSingleValueAggRel(
+            cluster,
+            plan);
     }
 
     /**
@@ -1317,34 +1490,52 @@ public class SqlToRelConverter {
      * @param isNotIn    is this a NOT IN operator
      * @return converted expression
      */
-    private RexNode convertInToOr(final Blackboard bb, final List<RexNode> leftKeys, SqlNodeList valuesList,
-            boolean isNotIn) {
+    private RexNode convertInToOr(
+        final Blackboard bb,
+        final List<RexNode> leftKeys,
+        SqlNodeList valuesList,
+        boolean isNotIn) {
         final List<RexNode> comparisons = new ArrayList<>();
         for (SqlNode rightVals : valuesList) {
             RexNode rexComparison;
             if (leftKeys.size() == 1) {
-                rexComparison = rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, leftKeys.get(0),
-                        ensureSqlType(leftKeys.get(0).getType(), bb.convertExpression(rightVals)));
+                rexComparison =
+                    rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
+                        leftKeys.get(0),
+                        ensureSqlType(leftKeys.get(0).getType(),
+                            bb.convertExpression(rightVals)));
             } else {
                 assert rightVals instanceof SqlCall;
                 final SqlBasicCall call = (SqlBasicCall) rightVals;
-                assert (call.getOperator() instanceof SqlRowOperator) && call.operandCount() == leftKeys.size();
-                rexComparison = RexUtil.composeConjunction(rexBuilder, Iterables.transform(
-                        Pair.zip(leftKeys, call.getOperandList()), new Function<Pair<RexNode, SqlNode>, RexNode>() {
-                            public RexNode apply(Pair<RexNode, SqlNode> pair) {
-                                return rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, pair.left,
-                                        ensureSqlType(pair.left.getType(), bb.convertExpression(pair.right)));
-                            }
-                        }), false);
+                assert (call.getOperator() instanceof SqlRowOperator)
+                    && call.operandCount() == leftKeys.size();
+                rexComparison =
+                    RexUtil.composeConjunction(
+                        rexBuilder,
+                        Iterables.transform(
+                            Pair.zip(leftKeys, call.getOperandList()),
+                            new Function<Pair<RexNode, SqlNode>, RexNode>() {
+                                public RexNode apply(Pair<RexNode, SqlNode> pair) {
+                                    return rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
+                                        pair.left,
+                                        ensureSqlType(pair.left.getType(),
+                                            bb.convertExpression(pair.right)));
+                                }
+                            }),
+                        false);
             }
             comparisons.add(rexComparison);
         }
 
-        RexNode result = RexUtil.composeDisjunction(rexBuilder, comparisons, true);
+        RexNode result =
+            RexUtil.composeDisjunction(rexBuilder, comparisons, true);
         assert result != null;
 
         if (isNotIn) {
-            result = rexBuilder.makeCall(SqlStdOperatorTable.NOT, result);
+            result =
+                rexBuilder.makeCall(
+                    SqlStdOperatorTable.NOT,
+                    result);
         }
 
         return result;
@@ -1354,8 +1545,9 @@ public class SqlToRelConverter {
      * cast if necessary. If the expression already has the right type family,
      * returns the expression unchanged. */
     private RexNode ensureSqlType(RelDataType type, RexNode node) {
-        if (type.getSqlTypeName() == node.getType().getSqlTypeName() || (type.getSqlTypeName() == SqlTypeName.VARCHAR
-                && node.getType().getSqlTypeName() == SqlTypeName.CHAR)) {
+        if (type.getSqlTypeName() == node.getType().getSqlTypeName()
+            || (type.getSqlTypeName() == SqlTypeName.VARCHAR
+            && node.getType().getSqlTypeName() == SqlTypeName.CHAR)) {
             return node;
         }
         return rexBuilder.ensureType(type, node, true);
@@ -1392,9 +1584,15 @@ public class SqlToRelConverter {
      * @param notIn Whether the operation is NOT IN
      * @return join expression
      */
-    private RelOptUtil.Exists convertExists(SqlNode seek, RelOptUtil.SubQueryType subQueryType, RelOptUtil.Logic logic,
-            boolean notIn, RelDataType targetDataType) {
-        final SqlValidatorScope seekScope = (seek instanceof SqlSelect) ? validator.getSelectScope((SqlSelect) seek)
+    private RelOptUtil.Exists convertExists(
+        SqlNode seek,
+        RelOptUtil.SubQueryType subQueryType,
+        RelOptUtil.Logic logic,
+        boolean notIn,
+        RelDataType targetDataType) {
+        final SqlValidatorScope seekScope =
+            (seek instanceof SqlSelect)
+                ? validator.getSelectScope((SqlSelect) seek)
                 : null;
         final Blackboard seekBb = createBlackboard(seekScope, null, false);
         RelNode seekRel = convertQueryOrInList(seekBb, seek, targetDataType);
@@ -1402,7 +1600,10 @@ public class SqlToRelConverter {
         return RelOptUtil.createExistsPlan(seekRel, subQueryType, logic, notIn);
     }
 
-    private RelNode convertQueryOrInList(Blackboard bb, SqlNode seek, RelDataType targetRowType) {
+    private RelNode convertQueryOrInList(
+        Blackboard bb,
+        SqlNode seek,
+        RelDataType targetRowType) {
         // NOTE: Once we start accepting single-row queries as row constructors,
         // there will be an ambiguity here for a case like X IN ((SELECT Y FROM
         // Z)).  The SQL standard resolves the ambiguity by saying that a lone
@@ -1410,26 +1611,40 @@ public class SqlToRelConverter {
         // expression.  The semantic difference is that a table expression can
         // return multiple rows.
         if (seek instanceof SqlNodeList) {
-            return convertRowValues(bb, seek, ((SqlNodeList) seek).getList(), false, targetRowType);
+            return convertRowValues(
+                bb,
+                seek,
+                ((SqlNodeList) seek).getList(),
+                false,
+                targetRowType);
         } else {
             return convertQueryRecursive(seek, false, null).project();
         }
     }
 
-    private RelNode convertRowValues(Blackboard bb, SqlNode rowList, Collection<SqlNode> rows,
-            boolean allowLiteralsOnly, RelDataType targetRowType) {
+    private RelNode convertRowValues(
+        Blackboard bb,
+        SqlNode rowList,
+        Collection<SqlNode> rows,
+        boolean allowLiteralsOnly,
+        RelDataType targetRowType) {
         // NOTE jvs 30-Apr-2006: We combine all rows consisting entirely of
         // literals into a single LogicalValues; this gives the optimizer a smaller
         // input tree.  For everything else (computed expressions, row
         // sub-queries), we union each row in as a projection on top of a
         // LogicalOneRow.
 
-        final ImmutableList.Builder<ImmutableList<RexLiteral>> tupleList = ImmutableList.builder();
+        final ImmutableList.Builder<ImmutableList<RexLiteral>> tupleList =
+            ImmutableList.builder();
         final RelDataType rowType;
         if (targetRowType != null) {
             rowType = targetRowType;
         } else {
-            rowType = SqlTypeUtil.promoteToRowType(typeFactory, validator.getValidatedNodeType(rowList), null);
+            rowType =
+                SqlTypeUtil.promoteToRowType(
+                    typeFactory,
+                    validator.getValidatedNodeType(rowList),
+                    null);
         }
 
         final List<RelNode> unionInputs = new ArrayList<>();
@@ -1439,7 +1654,12 @@ public class SqlToRelConverter {
                 call = (SqlBasicCall) node;
                 ImmutableList.Builder<RexLiteral> tuple = ImmutableList.builder();
                 for (Ord<SqlNode> operand : Ord.zip(call.operands)) {
-                    RexLiteral rexLiteral = convertLiteralInValuesList(operand.e, bb, rowType, operand.i);
+                    RexLiteral rexLiteral =
+                        convertLiteralInValuesList(
+                            operand.e,
+                            bb,
+                            rowType,
+                            operand.i);
                     if ((rexLiteral == null) && allowLiteralsOnly) {
                         return null;
                     }
@@ -1455,7 +1675,12 @@ public class SqlToRelConverter {
                     continue;
                 }
             } else {
-                RexLiteral rexLiteral = convertLiteralInValuesList(node, bb, rowType, 0);
+                RexLiteral rexLiteral =
+                    convertLiteralInValuesList(
+                        node,
+                        bb,
+                        rowType,
+                        0);
                 if ((rexLiteral != null) && config.isCreateValuesRel()) {
                     tupleList.add(ImmutableList.of(rexLiteral));
                     continue;
@@ -1466,11 +1691,15 @@ public class SqlToRelConverter {
                 }
 
                 // convert "1" to "row(1)"
-                call = (SqlBasicCall) SqlStdOperatorTable.ROW.createCall(SqlParserPos.ZERO, node);
+                call =
+                    (SqlBasicCall) SqlStdOperatorTable.ROW.createCall(
+                        SqlParserPos.ZERO,
+                        node);
             }
             unionInputs.add(convertRowConstructor(bb, call));
         }
-        LogicalValues values = LogicalValues.create(cluster, rowType, tupleList.build());
+        LogicalValues values =
+            LogicalValues.create(cluster, rowType, tupleList.build());
         RelNode resultRel;
         if (unionInputs.isEmpty()) {
             resultRel = values;
@@ -1484,7 +1713,11 @@ public class SqlToRelConverter {
         return resultRel;
     }
 
-    private RexLiteral convertLiteralInValuesList(SqlNode sqlNode, Blackboard bb, RelDataType rowType, int iField) {
+    private RexLiteral convertLiteralInValuesList(
+        SqlNode sqlNode,
+        Blackboard bb,
+        RelDataType rowType,
+        int iField) {
         if (!(sqlNode instanceof SqlLiteral)) {
             return null;
         }
@@ -1497,7 +1730,10 @@ public class SqlToRelConverter {
             return null;
         }
 
-        RexNode literalExpr = exprConverter.convertLiteral(bb, (SqlLiteral) sqlNode);
+        RexNode literalExpr =
+            exprConverter.convertLiteral(
+                bb,
+                (SqlLiteral) sqlNode);
 
         if (!(literalExpr instanceof RexLiteral)) {
             assert literalExpr.isA(SqlKind.CAST);
@@ -1514,15 +1750,24 @@ public class SqlToRelConverter {
         Comparable value = literal.getValue();
 
         if (SqlTypeUtil.isExactNumeric(type) && SqlTypeUtil.hasScale(type)) {
-            BigDecimal roundedValue = NumberUtil.rescaleBigDecimal((BigDecimal) value, type.getScale());
-            return rexBuilder.makeExactLiteral(roundedValue, type);
+            BigDecimal roundedValue =
+                NumberUtil.rescaleBigDecimal(
+                    (BigDecimal) value,
+                    type.getScale());
+            return rexBuilder.makeExactLiteral(
+                roundedValue,
+                type);
         }
 
-        if ((value instanceof NlsString) && (type.getSqlTypeName() == SqlTypeName.CHAR)) {
+        if ((value instanceof NlsString)
+            && (type.getSqlTypeName() == SqlTypeName.CHAR)) {
             // pad fixed character type
             NlsString unpadded = (NlsString) value;
-            return rexBuilder.makeCharLiteral(new NlsString(Spaces.padRight(unpadded.getValue(), type.getPrecision()),
-                    unpadded.getCharsetName(), unpadded.getCollation()));
+            return rexBuilder.makeCharLiteral(
+                new NlsString(
+                    Spaces.padRight(unpadded.getValue(), type.getPrecision()),
+                    unpadded.getCharsetName(),
+                    unpadded.getCollation()));
         }
         return literal;
     }
@@ -1549,41 +1794,53 @@ public class SqlToRelConverter {
      *                                     node, only register it if it's a scalar
      *                                     sub-query
      */
-    private void findSubQueries(Blackboard bb, SqlNode node, RelOptUtil.Logic logic,
-            boolean registerOnlyScalarSubQueries) {
+    private void findSubQueries(
+        Blackboard bb,
+        SqlNode node,
+        RelOptUtil.Logic logic,
+        boolean registerOnlyScalarSubQueries) {
         final SqlKind kind = node.getKind();
         switch (kind) {
-        case EXISTS:
-        case SELECT:
-        case MULTISET_QUERY_CONSTRUCTOR:
-        case MULTISET_VALUE_CONSTRUCTOR:
-        case ARRAY_QUERY_CONSTRUCTOR:
-        case CURSOR:
-        case SCALAR_QUERY:
-            if (!registerOnlyScalarSubQueries || (kind == SqlKind.SCALAR_QUERY)) {
-                bb.registerSubQuery(node, RelOptUtil.Logic.TRUE_FALSE);
-            }
-            return;
-        case IN:
-            if (((SqlCall) node).getOperator() == SqlStdOperatorTable.NOT_IN) {
+            case EXISTS:
+            case SELECT:
+            case MULTISET_QUERY_CONSTRUCTOR:
+            case MULTISET_VALUE_CONSTRUCTOR:
+            case ARRAY_QUERY_CONSTRUCTOR:
+            case CURSOR:
+            case SCALAR_QUERY:
+                if (!registerOnlyScalarSubQueries
+                    || (kind == SqlKind.SCALAR_QUERY)) {
+                    bb.registerSubQuery(node, RelOptUtil.Logic.TRUE_FALSE);
+                }
+                return;
+            case IN:
+                if (((SqlCall) node).getOperator() == SqlStdOperatorTable.NOT_IN) {
+                    logic = logic.negate();
+                }
+                break;
+            case NOT:
                 logic = logic.negate();
-            }
-            break;
-        case NOT:
-            logic = logic.negate();
-            break;
+                break;
         }
         if (node instanceof SqlCall) {
             for (SqlNode operand : ((SqlCall) node).getOperandList()) {
                 if (operand != null) {
                     // In the case of an IN expression, locate scalar
                     // sub-queries so we can convert them to constants
-                    findSubQueries(bb, operand, logic, kind == SqlKind.IN || registerOnlyScalarSubQueries);
+                    findSubQueries(
+                        bb,
+                        operand,
+                        logic,
+                        kind == SqlKind.IN || registerOnlyScalarSubQueries);
                 }
             }
         } else if (node instanceof SqlNodeList) {
             for (SqlNode child : (SqlNodeList) node) {
-                findSubQueries(bb, child, logic, kind == SqlKind.IN || registerOnlyScalarSubQueries);
+                findSubQueries(
+                    bb,
+                    child,
+                    logic,
+                    kind == SqlKind.IN || registerOnlyScalarSubQueries);
             }
         }
 
@@ -1593,15 +1850,15 @@ public class SqlToRelConverter {
         // before the IN expression is converted.
         if (kind == SqlKind.IN) {
             switch (logic) {
-            case TRUE_FALSE_UNKNOWN:
-                if (validator.getValidatedNodeType(node).isNullable()) {
-                    break;
-                } else if (true) {
-                    break;
-                }
-                // fall through
-            case UNKNOWN_AS_FALSE:
-                logic = RelOptUtil.Logic.TRUE;
+                case TRUE_FALSE_UNKNOWN:
+                    if (validator.getValidatedNodeType(node).isNullable()) {
+                        break;
+                    } else if (true) {
+                        break;
+                    }
+                    // fall through
+                case UNKNOWN_AS_FALSE:
+                    logic = RelOptUtil.Logic.TRUE;
             }
             bb.registerSubQuery(node, logic);
         }
@@ -1613,9 +1870,11 @@ public class SqlToRelConverter {
      * @param node Expression to translate
      * @return Converted expression
      */
-    public RexNode convertExpression(SqlNode node) {
+    public RexNode convertExpression(
+        SqlNode node) {
         Map<String, RelDataType> nameToTypeMap = Collections.emptyMap();
-        final ParameterScope scope = new ParameterScope((SqlValidatorImpl) validator, nameToTypeMap);
+        final ParameterScope scope =
+            new ParameterScope((SqlValidatorImpl) validator, nameToTypeMap);
         final Blackboard bb = createBlackboard(scope, null, false);
         return bb.convertExpression(node);
     }
@@ -1631,12 +1890,15 @@ public class SqlToRelConverter {
      *                      this map
      * @return Converted expression
      */
-    public RexNode convertExpression(SqlNode node, Map<String, RexNode> nameToNodeMap) {
+    public RexNode convertExpression(
+        SqlNode node,
+        Map<String, RexNode> nameToNodeMap) {
         final Map<String, RelDataType> nameToTypeMap = new HashMap<>();
         for (Map.Entry<String, RexNode> entry : nameToNodeMap.entrySet()) {
             nameToTypeMap.put(entry.getKey(), entry.getValue().getType());
         }
-        final ParameterScope scope = new ParameterScope((SqlValidatorImpl) validator, nameToTypeMap);
+        final ParameterScope scope =
+            new ParameterScope((SqlValidatorImpl) validator, nameToTypeMap);
         final Blackboard bb = createBlackboard(scope, nameToNodeMap, false);
         return bb.convertExpression(node);
     }
@@ -1652,7 +1914,9 @@ public class SqlToRelConverter {
      * @param bb   Blackboard
      * @return null to proceed with the usual expression translation process
      */
-    protected RexNode convertExtendedExpression(SqlNode node, Blackboard bb) {
+    protected RexNode convertExtendedExpression(
+        SqlNode node,
+        Blackboard bb) {
         return null;
     }
 
@@ -1660,7 +1924,8 @@ public class SqlToRelConverter {
         SqlCall call = (SqlCall) node;
         SqlCall aggCall = call.operand(0);
         SqlNode windowOrRef = call.operand(1);
-        final SqlWindow window = validator.resolveWindow(windowOrRef, bb.scope, true);
+        final SqlWindow window =
+            validator.resolveWindow(windowOrRef, bb.scope, true);
 
         // ROW_NUMBER() expects specific kind of framing.
         if (aggCall.getKind() == SqlKind.ROW_NUMBER) {
@@ -1669,7 +1934,8 @@ public class SqlToRelConverter {
             window.setRows(SqlLiteral.createBoolean(true, SqlParserPos.ZERO));
         }
         final SqlNodeList partitionList = window.getPartitionList();
-        final ImmutableList.Builder<RexNode> partitionKeys = ImmutableList.builder();
+        final ImmutableList.Builder<RexNode> partitionKeys =
+            ImmutableList.builder();
         for (SqlNode partition : partitionList) {
             partitionKeys.add(bb.convertExpression(partition));
         }
@@ -1682,10 +1948,12 @@ public class SqlToRelConverter {
             // have failed validation.
             orderList = bb.scope.getOrderList();
             if (orderList == null) {
-                throw new AssertionError("Relation should have sort key for implicit ORDER BY");
+                throw new AssertionError(
+                    "Relation should have sort key for implicit ORDER BY");
             }
         }
-        final ImmutableList.Builder<RexFieldCollation> orderKeys = ImmutableList.builder();
+        final ImmutableList.Builder<RexFieldCollation> orderKeys =
+            ImmutableList.builder();
         final Set<SqlKind> flags = EnumSet.noneOf(SqlKind.class);
         for (SqlNode order : orderList) {
             flags.clear();
@@ -1693,17 +1961,23 @@ public class SqlToRelConverter {
             orderKeys.add(new RexFieldCollation(e, flags));
         }
         try {
-            Preconditions.checkArgument(bb.window == null, "already in window agg mode");
+            Preconditions.checkArgument(bb.window == null,
+                "already in window agg mode");
             bb.window = window;
             RexNode rexAgg = exprConverter.convertCall(bb, aggCall);
-            rexAgg = rexBuilder.ensureType(validator.getValidatedNodeType(call), rexAgg, false);
+            rexAgg =
+                rexBuilder.ensureType(
+                    validator.getValidatedNodeType(call), rexAgg, false);
 
             // Walk over the tree and apply 'over' to all agg functions. This is
             // necessary because the returned expression is not necessarily a call
             // to an agg function. For example, AVG(x) becomes SUM(x) / COUNT(x).
-            final RexShuttle visitor = new HistogramShuttle(partitionKeys.build(), orderKeys.build(),
+            final RexShuttle visitor =
+                new HistogramShuttle(
+                    partitionKeys.build(), orderKeys.build(),
                     RexWindowBound.create(window.getLowerBound(), lowerBound),
-                    RexWindowBound.create(window.getUpperBound(), upperBound), window);
+                    RexWindowBound.create(window.getUpperBound(), upperBound),
+                    window);
             return rexAgg.accept(visitor);
         } finally {
             bb.window = null;
@@ -1727,7 +2001,9 @@ public class SqlToRelConverter {
      *             <li>or any combination of the above.
      *             </ul>
      */
-    protected void convertFrom(Blackboard bb, SqlNode from) {
+    protected void convertFrom(
+        Blackboard bb,
+        SqlNode from) {
         if (from == null) {
             bb.setRoot(LogicalValues.createOneRow(cluster), false);
             return;
@@ -1736,133 +2012,162 @@ public class SqlToRelConverter {
         final SqlCall call;
         final SqlNode[] operands;
         switch (from.getKind()) {
-        case MATCH_RECOGNIZE:
-            convertMatchRecognize(bb, (SqlCall) from);
-            return;
-
-        case AS:
-            convertFrom(bb, ((SqlCall) from).operand(0));
-            return;
+            case MATCH_RECOGNIZE:
+                convertMatchRecognize(bb, (SqlCall) from);
+                return;
 
-        case WITH_ITEM:
-            convertFrom(bb, ((SqlWithItem) from).query);
-            return;
+            case AS:
+                convertFrom(bb, ((SqlCall) from).operand(0));
+                return;
 
-        case WITH:
-            convertFrom(bb, ((SqlWith) from).body);
-            return;
+            case WITH_ITEM:
+                convertFrom(bb, ((SqlWithItem) from).query);
+                return;
 
-        case TABLESAMPLE:
-            operands = ((SqlBasicCall) from).getOperands();
-            SqlSampleSpec sampleSpec = SqlLiteral.sampleValue(operands[1]);
-            if (sampleSpec instanceof SqlSampleSpec.SqlSubstitutionSampleSpec) {
-                String sampleName = ((SqlSampleSpec.SqlSubstitutionSampleSpec) sampleSpec).getName();
-                datasetStack.push(sampleName);
-                convertFrom(bb, operands[0]);
-                datasetStack.pop();
-            } else if (sampleSpec instanceof SqlSampleSpec.SqlTableSampleSpec) {
-                SqlSampleSpec.SqlTableSampleSpec tableSampleSpec = (SqlSampleSpec.SqlTableSampleSpec) sampleSpec;
-                convertFrom(bb, operands[0]);
-                RelOptSamplingParameters params = new RelOptSamplingParameters(tableSampleSpec.isBernoulli(),
-                        tableSampleSpec.getSamplePercentage(), tableSampleSpec.isRepeatable(),
-                        tableSampleSpec.getRepeatableSeed());
-                bb.setRoot(new Sample(cluster, bb.root, params), false);
-            } else {
-                throw new AssertionError("unknown TABLESAMPLE type: " + sampleSpec);
-            }
-            return;
+            case WITH:
+                convertFrom(bb, ((SqlWith) from).body);
+                return;
 
-        case IDENTIFIER:
-            convertIdentifier(bb, (SqlIdentifier) from, null);
-            return;
+            case TABLESAMPLE:
+                operands = ((SqlBasicCall) from).getOperands();
+                SqlSampleSpec sampleSpec = SqlLiteral.sampleValue(operands[1]);
+                if (sampleSpec instanceof SqlSampleSpec.SqlSubstitutionSampleSpec) {
+                    String sampleName =
+                        ((SqlSampleSpec.SqlSubstitutionSampleSpec) sampleSpec)
+                            .getName();
+                    datasetStack.push(sampleName);
+                    convertFrom(bb, operands[0]);
+                    datasetStack.pop();
+                } else if (sampleSpec instanceof SqlSampleSpec.SqlTableSampleSpec) {
+                    SqlSampleSpec.SqlTableSampleSpec tableSampleSpec =
+                        (SqlSampleSpec.SqlTableSampleSpec) sampleSpec;
+                    convertFrom(bb, operands[0]);
+                    RelOptSamplingParameters params =
+                        new RelOptSamplingParameters(
+                            tableSampleSpec.isBernoulli(),
+                            tableSampleSpec.getSamplePercentage(),
+                            tableSampleSpec.isRepeatable(),
+                            tableSampleSpec.getRepeatableSeed());
+                    bb.setRoot(new Sample(cluster, bb.root, params), false);
+                } else {
+                    throw new AssertionError("unknown TABLESAMPLE type: " + sampleSpec);
+                }
+                return;
 
-        case EXTEND:
-            call = (SqlCall) from;
-            SqlIdentifier id = (SqlIdentifier) call.getOperandList().get(0);
-            SqlNodeList extendedColumns = (SqlNodeList) call.getOperandList().get(1);
-            convertIdentifier(bb, id, extendedColumns);
-            return;
+            case IDENTIFIER:
+                convertIdentifier(bb, (SqlIdentifier) from, null);
+                return;
 
-        case JOIN:
-            final SqlJoin join = (SqlJoin) from;
-            final SqlValidatorScope scope = validator.getJoinScope(from);
-            final Blackboard fromBlackboard = createBlackboard(scope, null, false);
-            SqlNode left = join.getLeft();
-            SqlNode right = join.getRight();
-            final boolean isNatural = join.isNatural();
-            final JoinType joinType = join.getJoinType();
-            final SqlValidatorScope leftScope = Util.first(validator.getJoinScope(left),
-                    ((DelegatingScope) bb.scope).getParent());
-            final Blackboard leftBlackboard = createBlackboard(leftScope, null, false);
-            final SqlValidatorScope rightScope = Util.first(validator.getJoinScope(right),
-                    ((DelegatingScope) bb.scope).getParent());
-            final Blackboard rightBlackboard = createBlackboard(rightScope, null, false);
-            convertFrom(leftBlackboard, left);
-            RelNode leftRel = leftBlackboard.root;
-            convertFrom(rightBlackboard, right);
-            RelNode rightRel = rightBlackboard.root;
-            JoinRelType convertedJoinType = convertJoinType(joinType);
-            RexNode conditionExp;
-            final SqlValidatorNamespace leftNamespace = validator.getNamespace(left);
-            final SqlValidatorNamespace rightNamespace = validator.getNamespace(right);
-            if (isNatural) {
-                final RelDataType leftRowType = leftNamespace.getRowType();
-                final RelDataType rightRowType = rightNamespace.getRowType();
-                final List<String> columnList = SqlValidatorUtil.deriveNaturalJoinColumnList(leftRowType, rightRowType);
-                conditionExp = convertUsing(leftNamespace, rightNamespace, columnList);
-            } else {
-                conditionExp = convertJoinCondition(fromBlackboard, leftNamespace, rightNamespace, join.getCondition(),
-                        join.getConditionType(), leftRel, rightRel);
-            }
+            case EXTEND:
+                call = (SqlCall) from;
+                SqlIdentifier id = (SqlIdentifier) call.getOperandList().get(0);
+                SqlNodeList extendedColumns = (SqlNodeList) call.getOperandList().get(1);
+                convertIdentifier(bb, id, extendedColumns);
+                return;
 
-            final RelNode joinRel = createJoin(fromB

<TRUNCATED>

[12/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/main/java/org/apache/kylin/jdbc/KylinMeta.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinMeta.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinMeta.java
index 46ad628..8e69e68 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinMeta.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinMeta.java
@@ -59,29 +59,25 @@ public class KylinMeta extends MetaImpl {
     }
 
     @Override
-    public ExecuteBatchResult prepareAndExecuteBatch(StatementHandle sh, List<String> sqlCommands)
-            throws NoSuchStatementException {
-        return new ExecuteBatchResult(new long[] {});
+    public ExecuteBatchResult prepareAndExecuteBatch(StatementHandle sh, List<String> sqlCommands) throws NoSuchStatementException {
+        return new ExecuteBatchResult(new long[]{});
     }
 
     @Override
-    public ExecuteBatchResult executeBatch(StatementHandle sh, List<List<TypedValue>> parameterValues)
-            throws NoSuchStatementException {
-        return new ExecuteBatchResult(new long[] {});
+    public ExecuteBatchResult executeBatch(StatementHandle sh, List<List<TypedValue>> parameterValues) throws NoSuchStatementException {
+        return new ExecuteBatchResult(new long[]{});
     }
 
     // real execution happens in KylinResultSet.execute()
     @Override
     @Deprecated
-    public ExecuteResult execute(StatementHandle sh, List<TypedValue> parameterValues, long maxRowCount)
-            throws NoSuchStatementException {
+    public ExecuteResult execute(StatementHandle sh, List<TypedValue> parameterValues, long maxRowCount) throws NoSuchStatementException {
         final MetaResultSet metaResultSet = MetaResultSet.create(sh.connectionId, sh.id, false, sh.signature, null);
         return new ExecuteResult(Collections.singletonList(metaResultSet));
     }
 
     @Override
-    public ExecuteResult execute(StatementHandle sh, List<TypedValue> parameterValues, int maxRowsInFirstFrame)
-            throws NoSuchStatementException {
+    public ExecuteResult execute(StatementHandle sh, List<TypedValue> parameterValues, int maxRowsInFirstFrame) throws NoSuchStatementException {
         final MetaResultSet metaResultSet = MetaResultSet.create(sh.connectionId, sh.id, false, sh.signature, null);
         return new ExecuteResult(Collections.singletonList(metaResultSet));
     }
@@ -105,8 +101,7 @@ public class KylinMeta extends MetaImpl {
     }
 
     @Override
-    public ExecuteResult prepareAndExecute(StatementHandle sh, String sql, long maxRowCount, int maxRowsInFirstFrame,
-            PrepareCallback callback) throws NoSuchStatementException {
+    public ExecuteResult prepareAndExecute(StatementHandle sh, String sql, long maxRowCount, int maxRowsInFirstFrame, PrepareCallback callback) throws NoSuchStatementException {
         try {
             synchronized (callback.getMonitor()) {
                 callback.clear();
@@ -156,8 +151,7 @@ public class KylinMeta extends MetaImpl {
     }
 
     @Override
-    public MetaResultSet getTables(ConnectionHandle ch, String catalog, Pat schemaPattern, Pat tableNamePattern,
-            List<String> typeList) {
+    public MetaResultSet getTables(ConnectionHandle ch, String catalog, Pat schemaPattern, Pat tableNamePattern, List<String> typeList) {
         List<KMetaTable> tables = getMetaProject().getTables(catalog, schemaPattern, tableNamePattern, typeList);
         return createResultSet(tables, KMetaTable.class, //
                 "TABLE_CAT", //
@@ -173,10 +167,8 @@ public class KylinMeta extends MetaImpl {
     }
 
     @Override
-    public MetaResultSet getColumns(ConnectionHandle ch, String catalog, Pat schemaPattern, Pat tableNamePattern,
-            Pat columnNamePattern) {
-        List<KMetaColumn> columns = getMetaProject().getColumns(catalog, schemaPattern, tableNamePattern,
-                columnNamePattern);
+    public MetaResultSet getColumns(ConnectionHandle ch, String catalog, Pat schemaPattern, Pat tableNamePattern, Pat columnNamePattern) {
+        List<KMetaColumn> columns = getMetaProject().getColumns(catalog, schemaPattern, tableNamePattern, columnNamePattern);
         return createResultSet(columns, KMetaColumn.class, //
                 "TABLE_CAT", //
                 "TABLE_SCHEM", //
@@ -223,8 +215,7 @@ public class KylinMeta extends MetaImpl {
         }
 
         CursorFactory cursorFactory = CursorFactory.record(clazz, fields, fieldNames);
-        Signature signature = new Signature(columns, "", null, Collections.<String, Object> emptyMap(), cursorFactory,
-                StatementType.SELECT);
+        Signature signature = new Signature(columns, "", null, Collections.<String, Object> emptyMap(), cursorFactory, StatementType.SELECT);
         StatementHandle sh = this.createStatement(connection().handle);
         Frame frame = new Frame(0, true, iterable);
 
@@ -327,16 +318,13 @@ public class KylinMeta extends MetaImpl {
         }
 
         @SuppressWarnings("unchecked")
-        public List<KMetaTable> getTables(String catalog, Pat schemaPattern, Pat tableNamePattern,
-                List<String> typeList) {
+        public List<KMetaTable> getTables(String catalog, Pat schemaPattern, Pat tableNamePattern, List<String> typeList) {
             return (List<KMetaTable>) searchByPatterns(this, Pat.of(catalog), schemaPattern, tableNamePattern);
         }
 
         @SuppressWarnings("unchecked")
-        public List<KMetaColumn> getColumns(String catalog, Pat schemaPattern, Pat tableNamePattern,
-                Pat columnNamePattern) {
-            return (List<KMetaColumn>) searchByPatterns(this, Pat.of(catalog), schemaPattern, tableNamePattern,
-                    columnNamePattern);
+        public List<KMetaColumn> getColumns(String catalog, Pat schemaPattern, Pat tableNamePattern, Pat columnNamePattern) {
+            return (List<KMetaColumn>) searchByPatterns(this, Pat.of(catalog), schemaPattern, tableNamePattern, columnNamePattern);
         }
 
         @Override
@@ -387,8 +375,7 @@ public class KylinMeta extends MetaImpl {
     public static class KMetaTable extends MetaTable implements NamedWithChildren {
         public final List<KMetaColumn> columns;
 
-        public KMetaTable(String tableCat, String tableSchem, String tableName, String tableType,
-                List<KMetaColumn> columns) {
+        public KMetaTable(String tableCat, String tableSchem, String tableName, String tableType, List<KMetaColumn> columns) {
             super(tableCat, tableSchem, tableName, tableType);
             this.columns = columns;
         }
@@ -401,11 +388,8 @@ public class KylinMeta extends MetaImpl {
 
     public static class KMetaColumn extends MetaColumn implements NamedWithChildren {
 
-        public KMetaColumn(String tableCat, String tableSchem, String tableName, String columnName, int dataType,
-                String typeName, int columnSize, Integer decimalDigits, int numPrecRadix, int nullable,
-                int charOctetLength, int ordinalPosition, String isNullable) {
-            super(tableCat, tableSchem, tableName, columnName, dataType, typeName, columnSize, decimalDigits,
-                    numPrecRadix, nullable, charOctetLength, ordinalPosition, isNullable);
+        public KMetaColumn(String tableCat, String tableSchem, String tableName, String columnName, int dataType, String typeName, int columnSize, Integer decimalDigits, int numPrecRadix, int nullable, int charOctetLength, int ordinalPosition, String isNullable) {
+            super(tableCat, tableSchem, tableName, columnName, dataType, typeName, columnSize, decimalDigits, numPrecRadix, nullable, charOctetLength, ordinalPosition, isNullable);
         }
 
         @Override
@@ -415,8 +399,7 @@ public class KylinMeta extends MetaImpl {
     }
 
     @Override
-    public Frame fetch(StatementHandle h, long offset, int fetchMaxRowCount)
-            throws NoSuchStatementException, MissingResultsException {
+    public Frame fetch(StatementHandle h, long offset, int fetchMaxRowCount) throws NoSuchStatementException, MissingResultsException {
         // TODO Auto-generated method stub
         return null;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/main/java/org/apache/kylin/jdbc/KylinPreparedStatement.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinPreparedStatement.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinPreparedStatement.java
index baa740d..510243b 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinPreparedStatement.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinPreparedStatement.java
@@ -35,8 +35,7 @@ import org.apache.calcite.avatica.remote.TypedValue;
 
 public class KylinPreparedStatement extends AvaticaPreparedStatement {
 
-    protected KylinPreparedStatement(AvaticaConnection connection, StatementHandle h, Signature signature,
-            int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
+    protected KylinPreparedStatement(AvaticaConnection connection, StatementHandle h, Signature signature, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
         super(connection, h, signature, resultSetType, resultSetConcurrency, resultSetHoldability);
         if (this.handle.signature == null)
             this.handle.signature = signature;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java
index b267942..1c1157a 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinResultSet.java
@@ -37,8 +37,7 @@ import org.apache.kylin.jdbc.IRemoteClient.QueryResult;
 
 public class KylinResultSet extends AvaticaResultSet {
 
-    public KylinResultSet(AvaticaStatement statement, QueryState state, Signature signature,
-            ResultSetMetaData resultSetMetaData, TimeZone timeZone, Frame firstFrame) {
+    public KylinResultSet(AvaticaStatement statement, QueryState state, Signature signature, ResultSetMetaData resultSetMetaData, TimeZone timeZone, Frame firstFrame) {
         super(statement, state, signature, resultSetMetaData, timeZone, firstFrame);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/main/java/org/apache/kylin/jdbc/KylinStatement.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinStatement.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinStatement.java
index 1f3c184..6596389 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinStatement.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinStatement.java
@@ -24,8 +24,7 @@ import org.apache.calcite.avatica.Meta.StatementHandle;
 
 public class KylinStatement extends AvaticaStatement {
 
-    protected KylinStatement(AvaticaConnection connection, StatementHandle h, int resultSetType,
-            int resultSetConcurrency, int resultSetHoldability) {
+    protected KylinStatement(AvaticaConnection connection, StatementHandle h, int resultSetType, int resultSetConcurrency, int resultSetHoldability) {
         super(connection, h, resultSetType, resultSetConcurrency, resultSetHoldability);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/main/java/org/apache/kylin/jdbc/json/SQLResponseStub.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/json/SQLResponseStub.java b/jdbc/src/main/java/org/apache/kylin/jdbc/json/SQLResponseStub.java
index a444155..a05b6d6 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/json/SQLResponseStub.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/json/SQLResponseStub.java
@@ -154,7 +154,7 @@ public class SQLResponseStub implements Serializable {
     }
 
     @JsonIgnoreProperties(ignoreUnknown = true)
-    public static class ColumnMetaStub implements Serializable {
+    public static class ColumnMetaStub implements Serializable{
 
         private boolean isAutoIncrement;
         private boolean isCaseSensitive;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/test/java/org/apache/kylin/jdbc/DriverTest.java
----------------------------------------------------------------------
diff --git a/jdbc/src/test/java/org/apache/kylin/jdbc/DriverTest.java b/jdbc/src/test/java/org/apache/kylin/jdbc/DriverTest.java
index b4cd199..0887634 100644
--- a/jdbc/src/test/java/org/apache/kylin/jdbc/DriverTest.java
+++ b/jdbc/src/test/java/org/apache/kylin/jdbc/DriverTest.java
@@ -177,8 +177,7 @@ public class DriverTest {
         info.put("password", "KYLIN");
         Connection conn = driver.connect("jdbc:kylin://localhost:7070/default", info);
 
-        PreparedStatement state = conn
-                .prepareStatement("select cal_dt, count(*) from test_kylin_fact where seller_id=? group by cal_dt");
+        PreparedStatement state = conn.prepareStatement("select cal_dt, count(*) from test_kylin_fact where seller_id=? group by cal_dt");
         state.setLong(1, 10000001);
         ResultSet resultSet = state.executeQuery();
 
@@ -212,12 +211,7 @@ public class DriverTest {
         System.out.println("Metadata:");
 
         for (int i = 0; i < metadata.getColumnCount(); i++) {
-            String metaStr = metadata.getCatalogName(i + 1) + " " + metadata.getColumnClassName(i + 1) + " "
-                    + metadata.getColumnDisplaySize(i + 1) + " " + metadata.getColumnLabel(i + 1) + " "
-                    + metadata.getColumnName(i + 1) + " " + metadata.getColumnType(i + 1) + " "
-                    + metadata.getColumnTypeName(i + 1) + " " + metadata.getPrecision(i + 1) + " "
-                    + metadata.getScale(i + 1) + " " + metadata.getSchemaName(i + 1) + " "
-                    + metadata.getTableName(i + 1);
+            String metaStr = metadata.getCatalogName(i + 1) + " " + metadata.getColumnClassName(i + 1) + " " + metadata.getColumnDisplaySize(i + 1) + " " + metadata.getColumnLabel(i + 1) + " " + metadata.getColumnName(i + 1) + " " + metadata.getColumnType(i + 1) + " " + metadata.getColumnTypeName(i + 1) + " " + metadata.getPrecision(i + 1) + " " + metadata.getScale(i + 1) + " " + metadata.getSchemaName(i + 1) + " " + metadata.getTableName(i + 1);
             System.out.println(metaStr);
         }
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/test/java/org/apache/kylin/jdbc/DummyClient.java
----------------------------------------------------------------------
diff --git a/jdbc/src/test/java/org/apache/kylin/jdbc/DummyClient.java b/jdbc/src/test/java/org/apache/kylin/jdbc/DummyClient.java
index bd0ce5c..6578825 100644
--- a/jdbc/src/test/java/org/apache/kylin/jdbc/DummyClient.java
+++ b/jdbc/src/test/java/org/apache/kylin/jdbc/DummyClient.java
@@ -64,8 +64,7 @@ public class DummyClient implements IRemoteClient {
     }
 
     @Override
-    public QueryResult executeQuery(String sql, List<AvaticaParameter> params, List<Object> paramValues,
-            Map<String, String> queryToggles) throws IOException {
+    public QueryResult executeQuery(String sql, List<AvaticaParameter> params, List<Object> paramValues, Map<String, String> queryToggles) throws IOException {
         List<Object> data = new ArrayList<Object>();
         Object[] row = new Object[] { "foo", "bar", "tool" };
         data.add(row);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/test/java/org/apache/kylin/jdbc/SQLResonseStubTest.java
----------------------------------------------------------------------
diff --git a/jdbc/src/test/java/org/apache/kylin/jdbc/SQLResonseStubTest.java b/jdbc/src/test/java/org/apache/kylin/jdbc/SQLResonseStubTest.java
index c2e5445..e090af6 100644
--- a/jdbc/src/test/java/org/apache/kylin/jdbc/SQLResonseStubTest.java
+++ b/jdbc/src/test/java/org/apache/kylin/jdbc/SQLResonseStubTest.java
@@ -36,8 +36,7 @@ public class SQLResonseStubTest {
 
     @Test
     public void testReadValuePartRecognizedField() throws IOException {
-        final String payload = "{ \"columnMetas\":[ { \"isNullable\":1, \"displaySize\":0, \"schemaName\":null, \"catelogName\":null, \"tableName\":null, \"precision\":0, \"scale\":0, \"columnType\":91, \"columnTypeName\":\"DATE\", \"readOnly\":true, \"writable\":false, \"caseSensitive\":true, \"searchable\":false, \"currency\":false, \"signed\":true, \"autoIncrement\":false, \"definitelyWritable\":false },"
-                + "{ \"isNullable\":1, \"displaySize\":10, \"label\":\"LEAF_CATEG_ID\", \"name\":\"LEAF_CATEG_ID\", "
+        final String payload = "{ \"columnMetas\":[ { \"isNullable\":1, \"displaySize\":0, \"schemaName\":null, \"catelogName\":null, \"tableName\":null, \"precision\":0, \"scale\":0, \"columnType\":91, \"columnTypeName\":\"DATE\", \"readOnly\":true, \"writable\":false, \"caseSensitive\":true, \"searchable\":false, \"currency\":false, \"signed\":true, \"autoIncrement\":false, \"definitelyWritable\":false }," + "{ \"isNullable\":1, \"displaySize\":10, \"label\":\"LEAF_CATEG_ID\", \"name\":\"LEAF_CATEG_ID\", "
                 + "\"schemaName\":null, \"catelogName\":null, \"tableName\":null, \"precision\":10, \"scale\":0, \"columnType\":4, \"columnTypeName\":\"INTEGER\", \"readOnly\":true, \"writable\":false, \"caseSensitive\":true, \"searchable\":false, \"currency\":false, \"signed\":true, \"autoIncrement\":false, \"definitelyWritable\":false } ], \"results\":[ [ \"2013-08-07\", \"32996\", \"15\", \"15\", \"Auction\", \"10000000\", \"49.048952730908745\", \"49.048952730908745\", \"49.048952730908745\", \"1\" ], [ \"2013-08-07\", \"43398\", \"0\", \"14\", \"ABIN\", \"10000633\", \"85.78317064220418\", \"85.78317064220418\", \"85.78317064220418\", \"1\" ] ], \"cube\":\"test_kylin_cube_with_slr_desc\", \"affectedRowCount\":0, \"isException\":false, \"exceptionMessage\":null, \"duration\":3451, \"partial\":false }";
         final SQLResponseStub stub = new ObjectMapper().readValue(payload, SQLResponseStub.class);
         assertEquals("test_kylin_cube_with_slr_desc", stub.getCube());
@@ -51,10 +50,7 @@ public class SQLResonseStubTest {
 
     @Test
     public void testReadValueWithUnrecognizedField() throws IOException {
-        final String payload = "{ \"columnMetas\":[ { \"Unrecognized\":0, \"isNullable\":1, \"displaySize\":0, "
-                + "\"label\":\"CAL_DT\", \"name\":\"CAL_DT\", \"schemaName\":null, \"catelogName\":null, "
-                + "\"tableName\":null, \"precision\":0, \"scale\":0, \"columnType\":91, \"columnTypeName\":\"DATE\", "
-                + "\"readOnly\":true, \"writable\":false, \"caseSensitive\":true, \"searchable\":false, \"currency\":false, \"signed\":true, \"autoIncrement\":false, \"definitelyWritable\":false },"
+        final String payload = "{ \"columnMetas\":[ { \"Unrecognized\":0, \"isNullable\":1, \"displaySize\":0, " + "\"label\":\"CAL_DT\", \"name\":\"CAL_DT\", \"schemaName\":null, \"catelogName\":null, " + "\"tableName\":null, \"precision\":0, \"scale\":0, \"columnType\":91, \"columnTypeName\":\"DATE\", " + "\"readOnly\":true, \"writable\":false, \"caseSensitive\":true, \"searchable\":false, \"currency\":false, \"signed\":true, \"autoIncrement\":false, \"definitelyWritable\":false },"
                 + " { \"isNullable\":1, \"displaySize\":10, \"label\":\"LEAF_CATEG_ID\", \"name\":\"LEAF_CATEG_ID\", \"schemaName\":null, \"catelogName\":null, \"tableName\":null, \"precision\":10, \"scale\":0, \"columnType\":4, \"columnTypeName\":\"INTEGER\", \"readOnly\":true, \"writable\":false, \"caseSensitive\":true, \"searchable\":false, \"currency\":false, \"signed\":true, \"autoIncrement\":false, \"definitelyWritable\":false } ], \"results\":[ [ \"2013-08-07\", \"32996\", \"15\", \"15\", \"Auction\", \"10000000\", \"49.048952730908745\", \"49.048952730908745\", \"49.048952730908745\", \"1\" ], [ \"2013-08-07\", \"43398\", \"0\", \"14\", \"ABIN\", \"10000633\", \"85.78317064220418\", \"85.78317064220418\", \"85.78317064220418\", \"1\" ] ], \"cube\":\"test_kylin_cube_with_slr_desc\", \"affectedRowCount\":0, \"isException\":false, \"exceptionMessage\":null, \"duration\":3451, \"partial\":false, \"hitCache\":false }";
         final SQLResponseStub stub = new ObjectMapper().readValue(payload, SQLResponseStub.class);
         assertEquals("test_kylin_cube_with_slr_desc", stub.getCube());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java b/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java
index 833672b..4dcfdb2 100644
--- a/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java
@@ -60,8 +60,7 @@ public class ITDictionaryManagerTest extends LocalFileMetadataTestCase {
     @Test
     public void basic() throws Exception {
         dictMgr = DictionaryManager.getInstance(getTestConfig());
-        CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig())
-                .getCubeDesc("test_kylin_cube_without_slr_desc");
+        CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_without_slr_desc");
         TblColRef col = cubeDesc.findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_FORMAT_NAME");
 
         MockDistinctColumnValuesProvider mockupData = new MockDistinctColumnValuesProvider("A", "B", "C");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/cube/inmemcubing/ITDoggedCubeBuilderTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/cube/inmemcubing/ITDoggedCubeBuilderTest.java b/kylin-it/src/test/java/org/apache/kylin/cube/inmemcubing/ITDoggedCubeBuilderTest.java
index fae04f3..dbd9ce2 100644
--- a/kylin-it/src/test/java/org/apache/kylin/cube/inmemcubing/ITDoggedCubeBuilderTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/cube/inmemcubing/ITDoggedCubeBuilderTest.java
@@ -71,8 +71,7 @@ public class ITDoggedCubeBuilderTest extends LocalFileMetadataTestCase {
         CubeManager cubeManager = CubeManager.getInstance(kylinConfig);
 
         cube = cubeManager.getCube("ssb");
-        flatTable = LocalFileMetadataTestCase.LOCALMETA_TEST_DATA
-                + "/data/kylin_intermediate_ssb_19920101000000_19920201000000.csv";
+        flatTable = LocalFileMetadataTestCase.LOCALMETA_TEST_DATA + "/data/kylin_intermediate_ssb_19920101000000_19920201000000.csv";
         dictionaryMap = ITInMemCubeBuilderTest.getDictionaryMap(cube, flatTable);
     }
 
@@ -162,4 +161,4 @@ public class ITDoggedCubeBuilderTest extends LocalFileMetadataTestCase {
             writer.close();
         }
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/cube/inmemcubing/ITInMemCubeBuilderTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/cube/inmemcubing/ITInMemCubeBuilderTest.java b/kylin-it/src/test/java/org/apache/kylin/cube/inmemcubing/ITInMemCubeBuilderTest.java
index f42a231..ea66c73 100644
--- a/kylin-it/src/test/java/org/apache/kylin/cube/inmemcubing/ITInMemCubeBuilderTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/cube/inmemcubing/ITInMemCubeBuilderTest.java
@@ -119,24 +119,21 @@ public class ITInMemCubeBuilderTest extends LocalFileMetadataTestCase {
         try {
             // round 1
             {
-                Future<?> future = executorService
-                        .submit(cubeBuilder.buildAsRunnable(queue, new ConsoleGTRecordWriter()));
+                Future<?> future = executorService.submit(cubeBuilder.buildAsRunnable(queue, new ConsoleGTRecordWriter()));
                 feedData(cube, flatTable, queue, nInpRows);
                 future.get();
             }
 
             // round 2, zero input
             {
-                Future<?> future = executorService
-                        .submit(cubeBuilder.buildAsRunnable(queue, new ConsoleGTRecordWriter()));
+                Future<?> future = executorService.submit(cubeBuilder.buildAsRunnable(queue, new ConsoleGTRecordWriter()));
                 feedData(cube, flatTable, queue, 0);
                 future.get();
             }
 
             // round 3
             {
-                Future<?> future = executorService
-                        .submit(cubeBuilder.buildAsRunnable(queue, new ConsoleGTRecordWriter()));
+                Future<?> future = executorService.submit(cubeBuilder.buildAsRunnable(queue, new ConsoleGTRecordWriter()));
                 feedData(cube, flatTable, queue, nInpRows);
                 future.get();
             }
@@ -147,13 +144,11 @@ public class ITInMemCubeBuilderTest extends LocalFileMetadataTestCase {
         }
     }
 
-    static void feedData(final CubeInstance cube, final String flatTable, ArrayBlockingQueue<List<String>> queue,
-            int count) throws IOException, InterruptedException {
+    static void feedData(final CubeInstance cube, final String flatTable, ArrayBlockingQueue<List<String>> queue, int count) throws IOException, InterruptedException {
         feedData(cube, flatTable, queue, count, 0);
     }
 
-    static void feedData(final CubeInstance cube, final String flatTable, ArrayBlockingQueue<List<String>> queue,
-            int count, long randSeed) throws IOException, InterruptedException {
+    static void feedData(final CubeInstance cube, final String flatTable, ArrayBlockingQueue<List<String>> queue, int count, long randSeed) throws IOException, InterruptedException {
         IJoinedFlatTableDesc flatDesc = EngineFactory.getJoinedFlatTableDesc(cube.getDescriptor());
         int nColumns = flatDesc.getAllColumns().size();
 
@@ -195,8 +190,7 @@ public class ITInMemCubeBuilderTest extends LocalFileMetadataTestCase {
     static Map<TblColRef, Dictionary<String>> getDictionaryMap(CubeInstance cube, String flatTable) throws IOException {
         Map<TblColRef, Dictionary<String>> result = Maps.newHashMap();
         CubeDesc desc = cube.getDescriptor();
-        CubeJoinedFlatTableEnrich flatDesc = new CubeJoinedFlatTableEnrich(EngineFactory.getJoinedFlatTableDesc(desc),
-                desc);
+        CubeJoinedFlatTableEnrich flatDesc = new CubeJoinedFlatTableEnrich(EngineFactory.getJoinedFlatTableDesc(desc), desc);
         int nColumns = flatDesc.getAllColumns().size();
 
         List<TblColRef> columns = Cuboid.getBaseCuboid(desc).getColumns();
@@ -205,8 +199,7 @@ public class ITInMemCubeBuilderTest extends LocalFileMetadataTestCase {
             if (desc.getRowkey().isUseDictionary(col)) {
                 logger.info("Building dictionary for " + col);
                 List<String> valueList = readValueList(flatTable, nColumns, flatDesc.getRowKeyColumnIndexes()[c]);
-                Dictionary<String> dict = DictionaryGenerator.buildDictionary(col.getType(),
-                        new IterableDictionaryValueEnumerator(valueList));
+                Dictionary<String> dict = DictionaryGenerator.buildDictionary(col.getType(), new IterableDictionaryValueEnumerator(valueList));
                 result.put(col, dict);
             }
         }
@@ -226,8 +219,7 @@ public class ITInMemCubeBuilderTest extends LocalFileMetadataTestCase {
                     int colIdxOnFlat = flatTableIdx[i];
                     logger.info("Building dictionary for " + col);
                     List<String> valueList = readValueList(flatTable, nColumns, colIdxOnFlat);
-                    Dictionary<String> dict = DictionaryGenerator.buildDictionary(col.getType(),
-                            new IterableDictionaryValueEnumerator(valueList));
+                    Dictionary<String> dict = DictionaryGenerator.buildDictionary(col.getType(), new IterableDictionaryValueEnumerator(valueList));
 
                     result.put(col, dict);
                 }
@@ -276,4 +268,4 @@ public class ITInMemCubeBuilderTest extends LocalFileMetadataTestCase {
             }
         }
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/dict/ITGlobalDictionaryBuilderTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/dict/ITGlobalDictionaryBuilderTest.java b/kylin-it/src/test/java/org/apache/kylin/dict/ITGlobalDictionaryBuilderTest.java
index 193a86b..df2ebf7 100644
--- a/kylin-it/src/test/java/org/apache/kylin/dict/ITGlobalDictionaryBuilderTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/dict/ITGlobalDictionaryBuilderTest.java
@@ -18,12 +18,6 @@
 
 package org.apache.kylin.dict;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
-
-import java.io.IOException;
-import java.util.concurrent.CountDownLatch;
-
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Dictionary;
@@ -33,6 +27,12 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.io.IOException;
+import java.util.concurrent.CountDownLatch;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+
 public class ITGlobalDictionaryBuilderTest extends HBaseMetadataTestCase {
     private DictionaryInfo dictionaryInfo;
 
@@ -49,8 +49,7 @@ public class ITGlobalDictionaryBuilderTest extends HBaseMetadataTestCase {
     }
 
     private void cleanup() {
-        String BASE_DIR = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "/resources/GlobalDict"
-                + dictionaryInfo.getResourceDir() + "/";
+        String BASE_DIR = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "/resources/GlobalDict" + dictionaryInfo.getResourceDir() + "/";
         Path basePath = new Path(BASE_DIR);
         try {
             HadoopUtil.getFileSystem(basePath).delete(basePath, true);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/jdbc/ITJDBCDriverTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/jdbc/ITJDBCDriverTest.java b/kylin-it/src/test/java/org/apache/kylin/jdbc/ITJDBCDriverTest.java
index 765b12c..bdf61f8 100644
--- a/kylin-it/src/test/java/org/apache/kylin/jdbc/ITJDBCDriverTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/jdbc/ITJDBCDriverTest.java
@@ -222,9 +222,7 @@ public class ITJDBCDriverTest extends HBaseMetadataTestCase {
     public void testPreparedStatement() throws Exception {
         Connection conn = getConnection();
 
-        PreparedStatement statement = conn.prepareStatement(
-                "select LSTG_FORMAT_NAME, sum(price) as GMV, count(1) as TRANS_CNT from test_kylin_fact "
-                        + "where LSTG_FORMAT_NAME = ? group by LSTG_FORMAT_NAME");
+        PreparedStatement statement = conn.prepareStatement("select LSTG_FORMAT_NAME, sum(price) as GMV, count(1) as TRANS_CNT from test_kylin_fact " + "where LSTG_FORMAT_NAME = ? group by LSTG_FORMAT_NAME");
 
         statement.setString(1, "FP-GTC");
 
@@ -244,8 +242,7 @@ public class ITJDBCDriverTest extends HBaseMetadataTestCase {
 
     @Test
     public void testResultSet() throws Exception {
-        String sql = "select LSTG_FORMAT_NAME, sum(price) as GMV, count(1) as TRANS_CNT from test_kylin_fact \n"
-                + " group by LSTG_FORMAT_NAME ";
+        String sql = "select LSTG_FORMAT_NAME, sum(price) as GMV, count(1) as TRANS_CNT from test_kylin_fact \n" + " group by LSTG_FORMAT_NAME ";
 
         Connection conn = getConnection();
         Statement statement = conn.createStatement();
@@ -273,8 +270,7 @@ public class ITJDBCDriverTest extends HBaseMetadataTestCase {
 
     @Test
     public void testResultSetWithMaxRows() throws Exception {
-        String sql = "select LSTG_FORMAT_NAME, sum(price) as GMV, count(1) as TRANS_CNT from test_kylin_fact \n"
-                + " group by LSTG_FORMAT_NAME ";
+        String sql = "select LSTG_FORMAT_NAME, sum(price) as GMV, count(1) as TRANS_CNT from test_kylin_fact \n" + " group by LSTG_FORMAT_NAME ";
 
         Connection conn = getConnection();
         Statement statement = conn.createStatement();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/job/BaseTestDistributedScheduler.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/job/BaseTestDistributedScheduler.java b/kylin-it/src/test/java/org/apache/kylin/job/BaseTestDistributedScheduler.java
index 9e51306..1ea6507 100644
--- a/kylin-it/src/test/java/org/apache/kylin/job/BaseTestDistributedScheduler.java
+++ b/kylin-it/src/test/java/org/apache/kylin/job/BaseTestDistributedScheduler.java
@@ -86,7 +86,7 @@ public class BaseTestDistributedScheduler extends HBaseMetadataTestCase {
         kylinConfig2 = KylinConfig.createInstanceFromUri(new File(confDstPath2).getAbsolutePath());
 
         initZk();
-
+        
         ZookeeperDistributedLock.Factory factory = new ZookeeperDistributedLock.Factory(kylinConfig1);
         jobLock1 = (ZookeeperDistributedLock) factory.lockForClient(serverName1);
         jobLock2 = (ZookeeperDistributedLock) factory.lockForClient(serverName2);
@@ -114,7 +114,7 @@ public class BaseTestDistributedScheduler extends HBaseMetadataTestCase {
     @AfterClass
     public static void after() throws Exception {
         jobLock1.purgeLocks("");
-
+        
         if (scheduler1 != null) {
             scheduler1.shutdown();
             scheduler1 = null;
@@ -138,8 +138,7 @@ public class BaseTestDistributedScheduler extends HBaseMetadataTestCase {
         while (true) {
             AbstractExecutable job = execMgr.getJob(jobId);
             final ExecutableState status = job.getStatus();
-            if (status == ExecutableState.SUCCEED || status == ExecutableState.ERROR
-                    || status == ExecutableState.STOPPED || status == ExecutableState.DISCARDED) {
+            if (status == ExecutableState.SUCCEED || status == ExecutableState.ERROR || status == ExecutableState.STOPPED || status == ExecutableState.DISCARDED) {
                 break;
             } else {
                 try {
@@ -197,7 +196,6 @@ public class BaseTestDistributedScheduler extends HBaseMetadataTestCase {
     }
 
     private String getFullLockPath(String segName) {
-        return DistributedScheduler.dropDoubleSlash(
-                "/kylin/" + kylinConfig1.getMetadataUrlPrefix() + DistributedScheduler.getLockPath(segName));
+        return DistributedScheduler.dropDoubleSlash("/kylin/" + kylinConfig1.getMetadataUrlPrefix() + DistributedScheduler.getLockPath(segName));
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 4c6f387..5719523 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -55,13 +55,13 @@ import org.apache.kylin.job.execution.DefaultChainedExecutable;
 import org.apache.kylin.job.execution.ExecutableManager;
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
-import org.apache.kylin.rest.job.StorageCleanupJob;
 import org.apache.kylin.source.ISource;
 import org.apache.kylin.source.SourceFactory;
 import org.apache.kylin.source.SourcePartition;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
 import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
+import org.apache.kylin.rest.job.StorageCleanupJob;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -125,8 +125,7 @@ public class BuildCubeWithEngine {
         System.setProperty("SPARK_HOME", "/usr/local/spark"); // need manually create and put spark to this folder on Jenkins
         System.setProperty("kylin.hadoop.conf.dir", HBaseMetadataTestCase.SANDBOX_TEST_DATA);
         if (StringUtils.isEmpty(System.getProperty("hdp.version"))) {
-            throw new RuntimeException(
-                    "No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
+            throw new RuntimeException("No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
         }
 
         HBaseMetadataTestCase.staticCreateTestMetadata(HBaseMetadataTestCase.SANDBOX_TEST_DATA);
@@ -141,10 +140,7 @@ public class BuildCubeWithEngine {
                 throw new IOException("mkdir fails");
             }
         } catch (IOException e) {
-            throw new RuntimeException(
-                    "failed to create kylin.env.hdfs-working-dir, Please make sure the user has right to access "
-                            + KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory(),
-                    e);
+            throw new RuntimeException("failed to create kylin.env.hdfs-working-dir, Please make sure the user has right to access " + KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory(), e);
         }
     }
 
@@ -344,8 +340,7 @@ public class BuildCubeWithEngine {
     private Boolean buildSegment(String cubeName, long startDate, long endDate) throws Exception {
         CubeInstance cubeInstance = cubeManager.getCube(cubeName);
         ISource source = SourceFactory.getSource(cubeInstance);
-        SourcePartition partition = source.enrichSourcePartitionBeforeBuild(cubeInstance,
-                new SourcePartition(0, endDate, 0, 0, null, null));
+        SourcePartition partition = source.enrichSourcePartitionBeforeBuild(cubeInstance, new SourcePartition(0, endDate, 0, 0, null, null));
         CubeSegment segment = cubeManager.appendSegment(cubeInstance, partition.getStartDate(), partition.getEndDate());
         DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
         jobService.addJob(job);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index 901486d..9c80413 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -60,7 +60,6 @@ import org.apache.kylin.job.streaming.Kafka10DataLoader;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.streaming.StreamingConfig;
 import org.apache.kylin.metadata.streaming.StreamingManager;
-import org.apache.kylin.rest.job.StorageCleanupJob;
 import org.apache.kylin.source.ISource;
 import org.apache.kylin.source.SourceFactory;
 import org.apache.kylin.source.SourcePartition;
@@ -69,6 +68,7 @@ import org.apache.kylin.source.kafka.config.BrokerConfig;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
 import org.apache.kylin.storage.hbase.util.ZookeeperUtil;
+import org.apache.kylin.rest.job.StorageCleanupJob;
 import org.junit.Assert;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -235,8 +235,7 @@ public class BuildCubeWithStream {
         for (int i = 0; i < futures.size(); i++) {
             ExecutableState result = futures.get(i).get(20, TimeUnit.MINUTES);
             logger.info("Checking building task " + i + " whose state is " + result);
-            Assert.assertTrue(
-                    result == null || result == ExecutableState.SUCCEED || result == ExecutableState.DISCARDED);
+            Assert.assertTrue(result == null || result == ExecutableState.SUCCEED || result == ExecutableState.DISCARDED);
             if (result == ExecutableState.SUCCEED)
                 succeedBuild++;
         }
@@ -265,8 +264,7 @@ public class BuildCubeWithStream {
     }
 
     private ExecutableState mergeSegment(String cubeName, long startOffset, long endOffset) throws Exception {
-        CubeSegment segment = cubeManager.mergeSegments(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset,
-                false);
+        CubeSegment segment = cubeManager.mergeSegments(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset, false);
         DefaultChainedExecutable job = EngineFactory.createBatchMergeJob(segment, "TEST");
         jobService.addJob(job);
         waitForJob(job.getId());
@@ -284,8 +282,7 @@ public class BuildCubeWithStream {
     protected ExecutableState buildSegment(String cubeName, long startOffset, long endOffset) throws Exception {
         CubeInstance cubeInstance = cubeManager.getCube(cubeName);
         ISource source = SourceFactory.getSource(cubeInstance);
-        SourcePartition partition = source.enrichSourcePartitionBeforeBuild(cubeInstance,
-                new SourcePartition(0, 0, startOffset, endOffset, null, null));
+        SourcePartition partition = source.enrichSourcePartitionBeforeBuild(cubeInstance, new SourcePartition(0, 0, startOffset, endOffset, null, null));
         CubeSegment segment = cubeManager.appendSegment(cubeManager.getCube(cubeName), partition);
         DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
         jobService.addJob(job);
@@ -304,8 +301,7 @@ public class BuildCubeWithStream {
         ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
         System.setProperty(KylinConfig.KYLIN_CONF, HBaseMetadataTestCase.SANDBOX_TEST_DATA);
         if (StringUtils.isEmpty(System.getProperty("hdp.version"))) {
-            throw new RuntimeException(
-                    "No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
+            throw new RuntimeException("No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
         }
         HBaseMetadataTestCase.staticCreateTestMetadata(HBaseMetadataTestCase.SANDBOX_TEST_DATA);
     }
@@ -333,8 +329,7 @@ public class BuildCubeWithStream {
     protected void waitForJob(String jobId) {
         while (true) {
             AbstractExecutable job = jobService.getJob(jobId);
-            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR
-                    || job.getStatus() == ExecutableState.DISCARDED) {
+            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR || job.getStatus() == ExecutableState.DISCARDED) {
                 break;
             } else {
                 try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java b/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
index a57acb6..fce422a 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
@@ -38,8 +38,7 @@ import kafka.server.KafkaServerStartable;
 import kafka.utils.ZkUtils;
 
 public class MockKafka {
-    private static Properties createProperties(ZkConnection zkServerConnection, String logDir, String port,
-            String brokerId) {
+    private static Properties createProperties(ZkConnection zkServerConnection, String logDir, String port, String brokerId) {
         Properties properties = new Properties();
         properties.put("port", port);
         properties.put("broker.id", brokerId);
@@ -60,8 +59,7 @@ public class MockKafka {
     private ZkConnection zkConnection;
 
     public MockKafka(ZkConnection zkServerConnection) {
-        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + UUID.randomUUID().toString(), "9092",
-                "1");
+        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + UUID.randomUUID().toString(), "9092", "1");
         start();
     }
 
@@ -71,16 +69,14 @@ public class MockKafka {
     }
 
     public MockKafka(ZkConnection zkServerConnection, int port, int brokerId) {
-        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + UUID.randomUUID().toString(),
-                String.valueOf(port), String.valueOf(brokerId));
+        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + UUID.randomUUID().toString(), String.valueOf(port), String.valueOf(brokerId));
         //start();
     }
 
     private MockKafka(ZkConnection zkServerConnection, String logDir, String port, String brokerId) {
         this(createProperties(zkServerConnection, logDir, port, brokerId));
         this.zkConnection = zkServerConnection;
-        System.out.println(String.format("Kafka %s:%s dir:%s", kafkaServer.serverConfig().brokerId(),
-                kafkaServer.serverConfig().port(), kafkaServer.serverConfig().logDirs()));
+        System.out.println(String.format("Kafka %s:%s dir:%s", kafkaServer.serverConfig().brokerId(), kafkaServer.serverConfig().port(), kafkaServer.serverConfig().logDirs()));
     }
 
     public void createTopic(String topic, int partition, int replication) {
@@ -191,4 +187,4 @@ class ZKStringSerializer implements ZkSerializer {
             }
     }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java b/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
index ec0ba6b..78ed1b6 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
@@ -29,6 +29,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import com.google.common.collect.Lists;
 import org.apache.commons.io.IOUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metadata.MetadataManager;
@@ -37,8 +38,6 @@ import org.apache.kylin.metadata.model.TableDesc;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
-
 public class H2Database {
     @SuppressWarnings("unused")
     private static final Logger logger = LoggerFactory.getLogger(H2Database.class);
@@ -139,8 +138,7 @@ public class H2Database {
             csvColumns.append(col.getName());
         }
         ddl.append(")" + "\n");
-        ddl.append("AS SELECT * FROM CSVREAD('" + csvFilePath + "', '" + csvColumns
-                + "', 'charset=UTF-8 fieldSeparator=,');");
+        ddl.append("AS SELECT * FROM CSVREAD('" + csvFilePath + "', '" + csvColumns + "', 'charset=UTF-8 fieldSeparator=,');");
 
         return ddl.toString();
     }
@@ -151,8 +149,7 @@ public class H2Database {
         for (ColumnDesc col : tableDesc.getColumns()) {
             if ("T".equalsIgnoreCase(col.getIndex())) {
                 StringBuilder ddl = new StringBuilder();
-                ddl.append("CREATE INDEX IDX_" + tableDesc.getName() + "_" + x + " ON " + tableDesc.getIdentity() + "("
-                        + col.getName() + ")");
+                ddl.append("CREATE INDEX IDX_" + tableDesc.getName() + "_" + x + " ON " + tableDesc.getIdentity() + "(" + col.getName() + ")");
                 ddl.append("\n");
                 result.add(ddl.toString());
                 x++;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/query/HackedDbUnitAssert.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/HackedDbUnitAssert.java b/kylin-it/src/test/java/org/apache/kylin/query/HackedDbUnitAssert.java
index 5b9c5d7..3a21570 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/HackedDbUnitAssert.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/HackedDbUnitAssert.java
@@ -54,8 +54,7 @@ public class HackedDbUnitAssert extends DbUnitAssert {
     }
 
     // THIS METHOD IS MOSTLY COPIED FROM DbUnitAssert. CHANGES ARE LEAD BY hackXXX CONDITION CHECKS.
-    public void assertEquals(ITable expectedTable, ITable actualTable, FailureHandler failureHandler)
-            throws DatabaseUnitException {
+    public void assertEquals(ITable expectedTable, ITable actualTable, FailureHandler failureHandler) throws DatabaseUnitException {
         logger.trace("assertEquals(expectedTable, actualTable, failureHandler) - start");
         logger.debug("assertEquals: expectedTable={}", expectedTable);
         logger.debug("assertEquals: actualTable={}", actualTable);
@@ -63,8 +62,7 @@ public class HackedDbUnitAssert extends DbUnitAssert {
 
         // Do not continue if same instance
         if (expectedTable == actualTable) {
-            logger.debug("The given tables reference the same object. Will return immediately. (Table={})",
-                    expectedTable);
+            logger.debug("The given tables reference the same object. Will return immediately. (Table={})", expectedTable);
             return;
         }
 
@@ -83,8 +81,7 @@ public class HackedDbUnitAssert extends DbUnitAssert {
         if (!hackCheckContains) {
             if (expectedRowsCount != actualRowsCount) {
                 String msg = "row count (table=" + expectedTableName + ")";
-                Error error = failureHandler.createFailure(msg, String.valueOf(expectedRowsCount),
-                        String.valueOf(actualRowsCount));
+                Error error = failureHandler.createFailure(msg, String.valueOf(expectedRowsCount), String.valueOf(actualRowsCount));
                 logger.error(error.toString());
                 throw error;
             }
@@ -105,15 +102,13 @@ public class HackedDbUnitAssert extends DbUnitAssert {
         Columns.ColumnDiff columnDiff = Columns.getColumnDiff(expectedMetaData, actualMetaData);
         if (columnDiff.hasDifference()) {
             String message = columnDiff.getMessage();
-            Error error = failureHandler.createFailure(message, Columns.getColumnNamesAsString(expectedColumns),
-                    Columns.getColumnNamesAsString(actualColumns));
+            Error error = failureHandler.createFailure(message, Columns.getColumnNamesAsString(expectedColumns), Columns.getColumnNamesAsString(actualColumns));
             logger.error(error.toString());
             throw error;
         }
 
         // Get the datatypes to be used for comparing the sorted columns
-        ComparisonColumn[] comparisonCols = getComparisonColumns(expectedTableName, expectedColumns, actualColumns,
-                failureHandler);
+        ComparisonColumn[] comparisonCols = getComparisonColumns(expectedTableName, expectedColumns, actualColumns, failureHandler);
 
         // Finally compare the data
         if (hackCheckContains)
@@ -124,8 +119,7 @@ public class HackedDbUnitAssert extends DbUnitAssert {
 
     // THIS METHOD IS COPIED FROM SUPER CLASS TO CHANGE ComparisonColumn TO OUR OWN.
     @Override
-    protected ComparisonColumn[] getComparisonColumns(String expectedTableName, Column[] expectedColumns,
-            Column[] actualColumns, FailureHandler failureHandler) {
+    protected ComparisonColumn[] getComparisonColumns(String expectedTableName, Column[] expectedColumns, Column[] actualColumns, FailureHandler failureHandler) {
         ComparisonColumn[] result = new ComparisonColumn[expectedColumns.length];
 
         for (int j = 0; j < expectedColumns.length; j++) {
@@ -141,12 +135,11 @@ public class HackedDbUnitAssert extends DbUnitAssert {
         private String columnName;
         private DataType dataType;
 
-        public HackedComparisonColumn(String tableName, Column expectedColumn, Column actualColumn,
-                FailureHandler failureHandler) {
-
+        public HackedComparisonColumn(String tableName, Column expectedColumn, Column actualColumn, FailureHandler failureHandler) {
+            
             // super class is actually useless, all public methods are overridden below
             super(tableName, expectedColumn, expectedColumn, failureHandler);
-
+            
             this.columnName = expectedColumn.getColumnName();
             this.dataType = getComparisonDataType(tableName, expectedColumn, actualColumn, failureHandler);
         }
@@ -162,12 +155,9 @@ public class HackedDbUnitAssert extends DbUnitAssert {
         }
 
         // COPIED FROM SUPER CLASS, CHANGES ARE LEAD BY hackXXX CONDITION CHECKS.
-        private DataType getComparisonDataType(String tableName, Column expectedColumn, Column actualColumn,
-                FailureHandler failureHandler) {
+        private DataType getComparisonDataType(String tableName, Column expectedColumn, Column actualColumn, FailureHandler failureHandler) {
             if (logger.isDebugEnabled())
-                logger.debug(
-                        "getComparisonDataType(tableName={}, expectedColumn={}, actualColumn={}, failureHandler={}) - start",
-                        new Object[] { tableName, expectedColumn, actualColumn, failureHandler });
+                logger.debug("getComparisonDataType(tableName={}, expectedColumn={}, actualColumn={}, failureHandler={}) - start", new Object[] { tableName, expectedColumn, actualColumn, failureHandler });
 
             DataType expectedDataType = expectedColumn.getDataType();
             DataType actualDataType = actualColumn.getDataType();
@@ -183,17 +173,15 @@ public class HackedDbUnitAssert extends DbUnitAssert {
                 if (actualDataType instanceof UnknownDataType) {
                     return expectedDataType;
                 }
-
+                
                 if (hackIgnoreIntBigIntMismatch) {
                     if (expectedDataType instanceof IntegerDataType && actualDataType instanceof BigIntegerDataType)
                         return actualDataType;
                 }
 
                 // Impossible to determine which data type to use
-                String msg = "Incompatible data types: (table=" + tableName + ", col=" + expectedColumn.getColumnName()
-                        + ")";
-                throw failureHandler.createFailure(msg, String.valueOf(expectedDataType),
-                        String.valueOf(actualDataType));
+                String msg = "Incompatible data types: (table=" + tableName + ", col=" + expectedColumn.getColumnName() + ")";
+                throw failureHandler.createFailure(msg, String.valueOf(expectedDataType), String.valueOf(actualDataType));
             }
 
             // Both columns have same data type, return any one of them
@@ -202,10 +190,8 @@ public class HackedDbUnitAssert extends DbUnitAssert {
 
     }
 
-    private void compareDataContains(ITable expectedTable, ITable actualTable, ComparisonColumn[] comparisonCols,
-            FailureHandler failureHandler) throws DataSetException {
-        logger.debug("compareData(expectedTable={}, actualTable={}, " + "comparisonCols={}, failureHandler={}) - start",
-                new Object[] { expectedTable, actualTable, comparisonCols, failureHandler });
+    private void compareDataContains(ITable expectedTable, ITable actualTable, ComparisonColumn[] comparisonCols, FailureHandler failureHandler) throws DataSetException {
+        logger.debug("compareData(expectedTable={}, actualTable={}, " + "comparisonCols={}, failureHandler={}) - start", new Object[] { expectedTable, actualTable, comparisonCols, failureHandler });
 
         if (expectedTable == null) {
             throw new NullPointerException("The parameter 'expectedTable' must not be null");
@@ -228,8 +214,7 @@ public class HackedDbUnitAssert extends DbUnitAssert {
 
     }
 
-    private boolean findRowInExpectedTable(ITable expectedTable, ITable actualTable, ComparisonColumn[] comparisonCols,
-            FailureHandler failureHandler, int index) throws DataSetException {
+    private boolean findRowInExpectedTable(ITable expectedTable, ITable actualTable, ComparisonColumn[] comparisonCols, FailureHandler failureHandler, int index) throws DataSetException {
 
         // iterate over all rows
         for (int i = 0; i < expectedTable.getRowCount(); i++) {
@@ -247,8 +232,7 @@ public class HackedDbUnitAssert extends DbUnitAssert {
                 // Compare the values
                 if (skipCompare(columnName, expectedValue, actualValue)) {
                     if (logger.isTraceEnabled()) {
-                        logger.trace("ignoring comparison " + expectedValue + "=" + actualValue + " on column "
-                                + columnName);
+                        logger.trace("ignoring comparison " + expectedValue + "=" + actualValue + " on column " + columnName);
                     }
                     continue;
                 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
index f5b41d6..0bed5ed 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
@@ -74,8 +74,7 @@ public class ITCombinationTest extends ITKylinQueryTest {
 
     public ITCombinationTest(String joinType, String coprocessorToggle, String queryEngine) throws Exception {
 
-        logger.info("Into combination join type: " + joinType + ", coprocessor toggle: " + coprocessorToggle
-                + ", query engine: " + queryEngine);
+        logger.info("Into combination join type: " + joinType + ", coprocessor toggle: " + coprocessorToggle + ", query engine: " + queryEngine);
 
         ITKylinQueryTest.clean();
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index 55270e4..55041e3 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -83,8 +83,7 @@ public class ITKylinQueryTest extends KylinTestBase {
         try {
 
             Map<String, String> toggles = Maps.newHashMap();
-            toggles.put(BackdoorToggles.DEBUG_TOGGLE_COPROCESSOR_BEHAVIOR,
-                    StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString());//delay 10ms for every scan
+            toggles.put(BackdoorToggles.DEBUG_TOGGLE_COPROCESSOR_BEHAVIOR, StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString());//delay 10ms for every scan
             BackdoorToggles.setToggles(toggles);
 
             KylinConfig.getInstanceFromEnv().setProperty("kylin.storage.hbase.coprocessor-timeout-seconds", "3");
@@ -106,8 +105,7 @@ public class ITKylinQueryTest extends KylinTestBase {
     }
 
     protected void runTimeoutQueries() throws Exception {
-        List<File> sqlFiles = getFilesFromFolder(
-                new File(getQueryFolderPrefix() + "src/test/resources/query/sql_timeout"), ".sql");
+        List<File> sqlFiles = getFilesFromFolder(new File(getQueryFolderPrefix() + "src/test/resources/query/sql_timeout"), ".sql");
         for (File sqlFile : sqlFiles) {
             try {
                 runSQL(sqlFile, false, false);
@@ -239,8 +237,7 @@ public class ITKylinQueryTest extends KylinTestBase {
 
     @Test
     public void testComputedColumnsQuery() throws Exception {
-        execAndCompQuery(getQueryFolderPrefix() + "src/test/resources/query/sql_computedcolumn", null, true,
-                CompareQueryBySuffix.INSTANCE);
+        execAndCompQuery(getQueryFolderPrefix() + "src/test/resources/query/sql_computedcolumn", null, true, CompareQueryBySuffix.INSTANCE);
     }
 
     @Test
@@ -339,8 +336,7 @@ public class ITKylinQueryTest extends KylinTestBase {
 
     @Test
     public void testLimitEnabled() throws Exception {
-        List<File> sqlFiles = getFilesFromFolder(
-                new File(getQueryFolderPrefix() + "src/test/resources/query/sql_limit"), ".sql");
+        List<File> sqlFiles = getFilesFromFolder(new File(getQueryFolderPrefix() + "src/test/resources/query/sql_limit"), ".sql");
         for (File sqlFile : sqlFiles) {
             runSQL(sqlFile, false, false);
             assertTrue(checkFinalPushDownLimit());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/query/ITMassInQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITMassInQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITMassInQueryTest.java
index c1e3ef5..cca0be6 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITMassInQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITMassInQueryTest.java
@@ -142,8 +142,7 @@ public class ITMassInQueryTest extends KylinTestBase {
             ITable kylinTable = executeQuery(kylinConn, queryName, sql, needSort);
 
             // execute H2
-            sql = sql.replace("massin(test_kylin_fact.SELLER_ID,'vip_customers')", "test_kylin_fact.SELLER_ID in ( "
-                    + org.apache.commons.lang.StringUtils.join(vipSellers, ",") + ")");
+            sql = sql.replace("massin(test_kylin_fact.SELLER_ID,'vip_customers')", "test_kylin_fact.SELLER_ID in ( " + org.apache.commons.lang.StringUtils.join(vipSellers, ",") + ")");
             logger.info("Query Result from H2 - " + queryName);
             logger.info("Query for H2 - " + sql);
             ITable h2Table = executeQuery(newH2Connection(), queryName, sql, needSort);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index f6b520a..42f3a44 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -42,14 +42,15 @@ import java.util.Set;
 import java.util.TreeSet;
 import java.util.logging.LogManager;
 
+import com.google.common.collect.Lists;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.metadata.project.ProjectInstance;
-import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.apache.kylin.query.relnode.OLAPContext;
 import org.apache.kylin.query.routing.rules.RemoveBlackoutRealizationsRule;
+import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.apache.kylin.rest.util.AdHocUtil;
 import org.dbunit.DatabaseUnitException;
 import org.dbunit.database.DatabaseConfig;
@@ -69,7 +70,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Lists;
 import com.google.common.io.Files;
 
 /**
@@ -110,8 +110,7 @@ public class KylinTestBase {
     // h2 (BIGINT)
     public static class TestH2DataTypeFactory extends H2DataTypeFactory {
         @Override
-        public DataType createDataType(int sqlType, String sqlTypeName, String tableName, String columnName)
-                throws DataTypeException {
+        public DataType createDataType(int sqlType, String sqlTypeName, String tableName, String columnName) throws DataTypeException {
 
             if ((columnName.startsWith("COL") || columnName.startsWith("col")) && sqlType == Types.BIGINT) {
                 return DataType.INTEGER;
@@ -224,8 +223,7 @@ public class KylinTestBase {
     // ////////////////////////////////////////////////////////////////////////////////////////
     // execute
 
-    protected ITable executeQuery(IDatabaseConnection dbConn, String queryName, String sql, boolean needSort)
-            throws Exception {
+    protected ITable executeQuery(IDatabaseConnection dbConn, String queryName, String sql, boolean needSort) throws Exception {
 
         // change join type to match current setting
         sql = changeJoinType(sql, joinType);
@@ -260,7 +258,7 @@ public class KylinTestBase {
 
             return output(resultSet, needDisplay);
         } catch (SQLException sqlException) {
-            List<List<String>> results = Lists.newArrayList();
+            List<List<String>> results =  Lists.newArrayList();
             List<SelectedColumnMeta> columnMetas = Lists.newArrayList();
             AdHocUtil.doAdHocQuery(sql, results, columnMetas, sqlException);
             return results.size();
@@ -282,8 +280,7 @@ public class KylinTestBase {
         }
     }
 
-    protected ITable executeDynamicQuery(IDatabaseConnection dbConn, String queryName, String sql,
-            List<String> parameters, boolean needSort) throws Exception {
+    protected ITable executeDynamicQuery(IDatabaseConnection dbConn, String queryName, String sql, List<String> parameters, boolean needSort) throws Exception {
 
         // change join type to match current setting
         sql = changeJoinType(sql, joinType);
@@ -319,8 +316,7 @@ public class KylinTestBase {
 
         String[] tokens = StringUtils.split(sql, null);// split white spaces
         for (int i = 0; i < tokens.length - 1; ++i) {
-            if ((tokens[i].equalsIgnoreCase("inner") || tokens[i].equalsIgnoreCase("left"))
-                    && tokens[i + 1].equalsIgnoreCase("join")) {
+            if ((tokens[i].equalsIgnoreCase("inner") || tokens[i].equalsIgnoreCase("left")) && tokens[i + 1].equalsIgnoreCase("join")) {
                 tokens[i] = targetType.toLowerCase();
             }
         }
@@ -411,8 +407,7 @@ public class KylinTestBase {
         }
     }
 
-    protected void execAndCompResultSize(String queryFolder, String[] exclusiveQuerys, boolean needSort)
-            throws Exception {
+    protected void execAndCompResultSize(String queryFolder, String[] exclusiveQuerys, boolean needSort) throws Exception {
         logger.info("---------- test folder: " + queryFolder);
         Set<String> exclusiveSet = buildExclusiveSet(exclusiveQuerys);
 
@@ -509,6 +504,7 @@ public class KylinTestBase {
         logger.info("Queries appended with limit: " + appendLimitQueries);
     }
 
+
     protected void execAndCompQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort) throws Exception {
         execAndCompQuery(queryFolder, exclusiveQuerys, needSort, new ICompareQueryTranslator() {
             @Override
@@ -522,8 +518,7 @@ public class KylinTestBase {
         });
     }
 
-    protected void execAndCompQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort,
-            ICompareQueryTranslator translator) throws Exception {
+    protected void execAndCompQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort, ICompareQueryTranslator translator) throws Exception {
         logger.info("---------- test folder: " + new File(queryFolder).getAbsolutePath());
         Set<String> exclusiveSet = buildExclusiveSet(exclusiveQuerys);
 
@@ -562,8 +557,7 @@ public class KylinTestBase {
         }
     }
 
-    protected void execAndCompDynamicQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort)
-            throws Exception {
+    protected void execAndCompDynamicQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort) throws Exception {
         logger.info("---------- test folder: " + queryFolder);
         Set<String> exclusiveSet = buildExclusiveSet(exclusiveQuerys);
 
@@ -689,8 +683,7 @@ public class KylinTestBase {
         cubeConnection = QueryDataSource.create(ProjectInstance.DEFAULT_PROJECT_NAME, config).getConnection();
 
         //setup h2
-        h2Connection = DriverManager.getConnection("jdbc:h2:mem:db" + (h2InstanceCount++) + ";CACHE_SIZE=32072", "sa",
-                "");
+        h2Connection = DriverManager.getConnection("jdbc:h2:mem:db" + (h2InstanceCount++) + ";CACHE_SIZE=32072", "sa", "");
         // Load H2 Tables (inner join)
         H2Database h2DB = new H2Database(h2Connection, config);
         h2DB.loadAllTables();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java b/kylin-it/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java
index 9bfb643..d972eeb 100644
--- a/kylin-it/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/source/hive/ITHiveSourceTableLoaderTest.java
@@ -25,8 +25,8 @@ import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TableExtDesc;
 import org.apache.kylin.source.ISource;
-import org.apache.kylin.source.ISourceMetadataExplorer;
 import org.apache.kylin.source.SourceFactory;
+import org.apache.kylin.source.ISourceMetadataExplorer;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -48,13 +48,13 @@ public class ITHiveSourceTableLoaderTest extends HBaseMetadataTestCase {
         ISource source = SourceFactory.getDefaultSource();
         ISourceMetadataExplorer explr = source.getSourceMetadataExplorer();
         Pair<TableDesc, TableExtDesc> pair;
-
+        
         pair = explr.loadTableMetadata("DEFAULT", "TEST_KYLIN_FACT");
         assertTrue(pair.getFirst().getIdentity().equals("DEFAULT.TEST_KYLIN_FACT"));
-
+        
         pair = explr.loadTableMetadata("EDW", "TEST_CAL_DT");
         assertTrue(pair.getFirst().getIdentity().equals("EDW.TEST_CAL_DT"));
-
+        
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITAclTableMigrationToolTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITAclTableMigrationToolTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITAclTableMigrationToolTest.java
index 8540116..65d5b52 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITAclTableMigrationToolTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITAclTableMigrationToolTest.java
@@ -71,8 +71,7 @@ public class ITAclTableMigrationToolTest extends HBaseMetadataTestCase {
 
     private TableName userTable = TableName.valueOf(STORE_WITH_OLD_TABLE + AclHBaseStorage.USER_TABLE_NAME);
 
-    private Serializer<UserGrantedAuthority[]> ugaSerializer = new Serializer<UserGrantedAuthority[]>(
-            UserGrantedAuthority[].class);
+    private Serializer<UserGrantedAuthority[]> ugaSerializer = new Serializer<UserGrantedAuthority[]>(UserGrantedAuthority[].class);
 
     private AclTableMigrationTool aclTableMigrationJob;
 
@@ -128,8 +127,7 @@ public class ITAclTableMigrationToolTest extends HBaseMetadataTestCase {
     private void createTestHTables() throws IOException {
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
         Admin hbaseAdmin = new HBaseAdmin(conf);
-        creatTable(hbaseAdmin, conf, aclTable,
-                new String[] { AclHBaseStorage.ACL_INFO_FAMILY, AclHBaseStorage.ACL_ACES_FAMILY });
+        creatTable(hbaseAdmin, conf, aclTable, new String[] { AclHBaseStorage.ACL_INFO_FAMILY, AclHBaseStorage.ACL_ACES_FAMILY });
         creatTable(hbaseAdmin, conf, userTable, new String[] { AclHBaseStorage.USER_AUTHORITY_FAMILY });
     }
 
@@ -137,8 +135,7 @@ public class ITAclTableMigrationToolTest extends HBaseMetadataTestCase {
         Table htable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(userTable);
         Pair<byte[], byte[]> pair = getRandomUserRecord();
         Put put = new Put(pair.getKey());
-        put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY),
-                Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
+        put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
         htable.put(put);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITHBaseResourceStoreTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITHBaseResourceStoreTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITHBaseResourceStoreTest.java
index 6be724e..fd91397 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITHBaseResourceStoreTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITHBaseResourceStoreTest.java
@@ -18,9 +18,6 @@
 
 package org.apache.kylin.storage.hbase;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -33,6 +30,9 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 public class ITHBaseResourceStoreTest extends HBaseMetadataTestCase {
 
     private KylinConfig kylinConfig;
@@ -59,8 +59,7 @@ public class ITHBaseResourceStoreTest extends HBaseMetadataTestCase {
         String path = "/cube/_test_large_cell.json";
         String largeContent = "THIS_IS_A_LARGE_CELL";
         StringEntity content = new StringEntity(largeContent);
-        String oldUrl = ResourceStoreTest.replaceMetadataUrl(kylinConfig,
-                ResourceStoreTest.mockUrl("hbase", kylinConfig));
+        String oldUrl = ResourceStoreTest.replaceMetadataUrl(kylinConfig, ResourceStoreTest.mockUrl("hbase", kylinConfig));
         HBaseResourceStore store = new HBaseResourceStore(KylinConfig.getInstanceFromEnv());
         Configuration hconf = store.getConnection().getConfiguration();
         int origSize = Integer.parseInt(hconf.get("hbase.client.keyvalue.maxsize", "10485760"));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java
index 112740e..24589a8 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITStorageTest.java
@@ -131,16 +131,13 @@ public class ITStorageTest extends HBaseMetadataTestCase {
             assertTrue(count > 0);
         }
     */
-    private int search(List<TblColRef> groups, List<FunctionDesc> aggregations, TupleFilter filter,
-            StorageContext context) {
+    private int search(List<TblColRef> groups, List<FunctionDesc> aggregations, TupleFilter filter, StorageContext context) {
         int count = 0;
         ITupleIterator iterator = null;
         try {
-            SQLDigest sqlDigest = new SQLDigest("default.test_kylin_fact",
-                    /*allCol*/ Collections.<TblColRef> emptySet(), /*join*/ null, //
+            SQLDigest sqlDigest = new SQLDigest("default.test_kylin_fact", /*allCol*/ Collections.<TblColRef> emptySet(), /*join*/ null, //
                     groups, /*subqueryJoinParticipants*/ Sets.<TblColRef> newHashSet(), //
-                    /*metricCol*/ Collections.<TblColRef> emptySet(), aggregations,
-                    /*aggrSqlCalls*/ Collections.<SQLCall> emptyList(), //
+                    /*metricCol*/ Collections.<TblColRef> emptySet(), aggregations, /*aggrSqlCalls*/ Collections.<SQLCall> emptyList(), //
                     /*filter col*/ Collections.<TblColRef> emptySet(), filter, null, //
                     /*sortCol*/ new ArrayList<TblColRef>(), new ArrayList<SQLDigest.OrderEnum>(), false);
             iterator = storageEngine.search(context, sqlDigest, mockup.newTupleInfo(groups, aggregations));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITZookeeperDistributedLockTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITZookeeperDistributedLockTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITZookeeperDistributedLockTest.java
index 00d57fd..48d6736 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITZookeeperDistributedLockTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITZookeeperDistributedLockTest.java
@@ -176,7 +176,7 @@ public class ITZookeeperDistributedLockTest extends HBaseMetadataTestCase {
         for (int i = 0; i < nClients; i++) {
             threads[i].join();
         }
-
+        
         Thread.sleep(3000);
 
         // verify counters

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
index a64bc7c..82e0406 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.storage.hdfs;
 
+
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStoreTest;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITLockManagerTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITLockManagerTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITLockManagerTest.java
index 6a0779f..5587eee 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITLockManagerTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITLockManagerTest.java
@@ -17,19 +17,6 @@
 */
 package org.apache.kylin.storage.hdfs;
 
-import static org.junit.Assert.assertEquals;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.FutureTask;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-
 import org.apache.curator.RetryPolicy;
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
@@ -44,6 +31,19 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.junit.Assert.assertEquals;
+
 public class ITLockManagerTest extends HBaseMetadataTestCase {
 
     private KylinConfig kylinConfig;
@@ -110,8 +110,7 @@ public class ITLockManagerTest extends HBaseMetadataTestCase {
                     public Void call() throws Exception {
                         LockManager threadLocalLockManager = new LockManager(kylinConfig, lockRootPath);
                         try {
-                            ExampleClientThatLocks example = new ExampleClientThatLocks(threadLocalLockManager,
-                                    lockRootPath, resource, "Client " + index);
+                            ExampleClientThatLocks example = new ExampleClientThatLocks(threadLocalLockManager, lockRootPath, resource, "Client " + index);
                             for (int j = 0; j < REPETITIONS; ++j) {
                                 example.doWork(10, TimeUnit.SECONDS);
                             }
@@ -183,8 +182,7 @@ public class ITLockManagerTest extends HBaseMetadataTestCase {
 
         private String lockPath;
 
-        public ExampleClientThatLocks(LockManager lockManager, String lockPath, FakeLimitedResource resource,
-                String clientName) {
+        public ExampleClientThatLocks(LockManager lockManager, String lockPath, FakeLimitedResource resource, String clientName) {
             this.resource = resource;
             this.clientName = clientName;
             this.lockManager = lockManager;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/QueryCli.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/QueryCli.java b/query/src/main/java/org/apache/kylin/query/QueryCli.java
index 98f2b4e..5ced8e3 100644
--- a/query/src/main/java/org/apache/kylin/query/QueryCli.java
+++ b/query/src/main/java/org/apache/kylin/query/QueryCli.java
@@ -35,12 +35,10 @@ import org.apache.kylin.common.util.DBUtils;
 public class QueryCli {
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_METADATA = OptionBuilder.withArgName("metadata url").hasArg().isRequired()
-            .withDescription("Metadata URL").create("metadata");
+    private static final Option OPTION_METADATA = OptionBuilder.withArgName("metadata url").hasArg().isRequired().withDescription("Metadata URL").create("metadata");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_SQL = OptionBuilder.withArgName("input sql").hasArg().isRequired()
-            .withDescription("SQL").create("sql");
+    private static final Option OPTION_SQL = OptionBuilder.withArgName("input sql").hasArg().isRequired().withDescription("SQL").create("sql");
 
     public static void main(String[] args) throws Exception {
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/QueryDataSource.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/QueryDataSource.java b/query/src/main/java/org/apache/kylin/query/QueryDataSource.java
index 78b3189..9ffdf0e 100644
--- a/query/src/main/java/org/apache/kylin/query/QueryDataSource.java
+++ b/query/src/main/java/org/apache/kylin/query/QueryDataSource.java
@@ -71,12 +71,12 @@ public class QueryDataSource {
         if (project == null) {
             throw new IllegalArgumentException("project should not be null");
         }
-
+        
         DataSource ds = olapDataSources.get(project);
         if (ds != null) {
             return ds;
         }
-
+        
         WrappedDataSource wrappedDS = getWrapped(project, config, props);
         ds = wrappedDS.getDataSource();
         olapDataSources.putIfAbsent(project, ds);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/enumerator/LookupTableEnumerator.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/enumerator/LookupTableEnumerator.java b/query/src/main/java/org/apache/kylin/query/enumerator/LookupTableEnumerator.java
index 14e3a1d..28ee623 100644
--- a/query/src/main/java/org/apache/kylin/query/enumerator/LookupTableEnumerator.java
+++ b/query/src/main/java/org/apache/kylin/query/enumerator/LookupTableEnumerator.java
@@ -64,8 +64,7 @@ public class LookupTableEnumerator implements Enumerator<Object[]> {
         String lookupTableName = olapContext.firstTableScan.getTableName();
         DimensionDesc dim = cube.getDescriptor().findDimensionByTable(lookupTableName);
         if (dim == null)
-            throw new IllegalStateException("No dimension with derived columns found for lookup table "
-                    + lookupTableName + ", cube desc " + cube.getDescriptor());
+            throw new IllegalStateException("No dimension with derived columns found for lookup table " + lookupTableName + ", cube desc " + cube.getDescriptor());
 
         CubeManager cubeMgr = CubeManager.getInstance(cube.getConfig());
         LookupStringTable table = cubeMgr.getLookupTable(cube.getLatestReadySegment(), dim.getJoin());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java b/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java
index fee2877..56b82b9 100644
--- a/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java
+++ b/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java
@@ -117,8 +117,7 @@ public class OLAPEnumerator implements Enumerator<Object[]> {
 
         // query storage engine
         IStorageQuery storageEngine = StorageFactory.createQuery(olapContext.realization);
-        ITupleIterator iterator = storageEngine.search(olapContext.storageContext, sqlDigest,
-                olapContext.returnTupleInfo);
+        ITupleIterator iterator = storageEngine.search(olapContext.storageContext, sqlDigest, olapContext.returnTupleInfo);
         if (logger.isDebugEnabled()) {
             logger.debug("return TupleIterator...");
         }


[51/67] [abbrv] kylin git commit: minor, api to get AbstractExecutable

Posted by li...@apache.org.
minor, api to get AbstractExecutable


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/361ac006
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/361ac006
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/361ac006

Branch: refs/heads/master
Commit: 361ac0068661ad5627bc590b5174eb2b57c6ea60
Parents: afaa95a
Author: lidongsjtu <li...@apache.org>
Authored: Tue May 30 00:10:41 2017 +0800
Committer: Dong Li <li...@apache.org>
Committed: Tue May 30 14:04:27 2017 +0800

----------------------------------------------------------------------
 .../kylin/job/execution/ExecutableManager.java     | 17 +++++++++++++----
 1 file changed, 13 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/361ac006/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
index 2272582..170a254 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
@@ -18,6 +18,10 @@
 
 package org.apache.kylin.job.execution;
 
+import static org.apache.kylin.job.constant.ExecutableConstants.MR_JOB_ID;
+import static org.apache.kylin.job.constant.ExecutableConstants.YARN_APP_ID;
+import static org.apache.kylin.job.constant.ExecutableConstants.YARN_APP_URL;
+
 import java.lang.reflect.Constructor;
 import java.util.HashMap;
 import java.util.IllegalFormatException;
@@ -42,10 +46,6 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
-import static org.apache.kylin.job.constant.ExecutableConstants.MR_JOB_ID;
-import static org.apache.kylin.job.constant.ExecutableConstants.YARN_APP_ID;
-import static org.apache.kylin.job.constant.ExecutableConstants.YARN_APP_URL;
-
 /**
  */
 public class ExecutableManager {
@@ -253,6 +253,15 @@ public class ExecutableManager {
         }
     }
 
+    public AbstractExecutable getAbstractExecutable(String uuid, Class<? extends AbstractExecutable> expectedClass) {
+        try {
+            return parseToAbstract(executableDao.getJob(uuid), expectedClass);
+        } catch (PersistentException e) {
+            logger.error("fail to get job:" + uuid, e);
+            throw new RuntimeException(e);
+        }
+    }
+
     public List<String> getAllJobIds() {
         try {
             return executableDao.getJobIds();


[45/67] [abbrv] kylin git commit: minor, fix release test

Posted by li...@apache.org.
minor, fix release test

* fix release test cont.

* fix release text


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/96e8c7f7
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/96e8c7f7
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/96e8c7f7

Branch: refs/heads/master
Commit: 96e8c7f79a099703ff6cf670b90e63ef2b34bae1
Parents: 99f08a9
Author: hongbin ma <ma...@kyligence.io>
Authored: Mon May 29 00:17:08 2017 +0800
Committer: Dong Li <li...@apache.org>
Committed: Mon May 29 00:17:08 2017 +0800

----------------------------------------------------------------------
 build/smoke-test/sql/sql1.json | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/96e8c7f7/build/smoke-test/sql/sql1.json
----------------------------------------------------------------------
diff --git a/build/smoke-test/sql/sql1.json b/build/smoke-test/sql/sql1.json
index 21e4c01..abbd529 100644
--- a/build/smoke-test/sql/sql1.json
+++ b/build/smoke-test/sql/sql1.json
@@ -1,8 +1,9 @@
 {
-  "cube": "kylin_sales_cube",
+  "cube": "CUBE[name=kylin_sales_cube]",
   "partial": false,
   "affectedRowCount": 0,
   "isException": false,
+  "queryAdHoc": false,
   "results": [
     [
       "10000"
@@ -32,4 +33,4 @@
       "isNullable": 0
     }
   ]
-}
\ No newline at end of file
+}


[17/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/MeasureDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/MeasureDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/MeasureDesc.java
index 22ab809..deec4f2 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/MeasureDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/MeasureDesc.java
@@ -84,8 +84,7 @@ public class MeasureDesc implements Serializable {
         if (!function.equals(that.getFunction()))
             return false;
 
-        if (dependentMeasureRef != null && that.getDependentMeasureRef() == null
-                || dependentMeasureRef == null && that.getDependentMeasureRef() != null)
+        if (dependentMeasureRef != null && that.getDependentMeasureRef() == null || dependentMeasureRef == null && that.getDependentMeasureRef() != null)
             return false;
 
         if (dependentMeasureRef == null && that.getDependentMeasureRef() == null)

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
index d484019..c0ddbad 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
@@ -31,7 +31,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
 @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
 public class ModelDimensionDesc implements Serializable {
     private static final long serialVersionUID = 1L;
-
+    
     @JsonProperty("table")
     private String table;
     @JsonProperty("columns")
@@ -58,7 +58,7 @@ public class ModelDimensionDesc implements Serializable {
         if (columns != null) {
             StringUtil.toUpperCaseArray(columns, columns);
         }
-
+        
         if (model != null) {
             table = model.findTable(table).getAlias();
             if (columns != null) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
index 2acb12f..3c00149 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
@@ -18,8 +18,6 @@
 
 package org.apache.kylin.metadata.model;
 
-import java.io.Serializable;
-
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.DateFormat;
@@ -29,12 +27,14 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.fasterxml.jackson.annotation.JsonProperty;
 
+import java.io.Serializable;
+
 /**
  */
 @JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
 public class PartitionDesc implements Serializable {
 
-    public static enum PartitionType implements Serializable {
+    public static enum PartitionType implements Serializable{
         APPEND, //
         UPDATE_INSERT // not used since 0.7.1
     }
@@ -80,7 +80,7 @@ public class PartitionDesc implements Serializable {
     public boolean partitionColumnIsYmdInt() {
         if (partitionDateColumnRef == null)
             return false;
-
+        
         DataType type = partitionDateColumnRef.getType();
         return (type.isInt() || type.isBigInt()) && DateFormat.isDatePattern(partitionDateFormat);
     }
@@ -88,7 +88,7 @@ public class PartitionDesc implements Serializable {
     public boolean partitionColumnIsTimeMillis() {
         if (partitionDateColumnRef == null)
             return false;
-
+        
         DataType type = partitionDateColumnRef.getType();
         return type.isBigInt() && !DateFormat.isDatePattern(partitionDateFormat);
     }
@@ -105,12 +105,12 @@ public class PartitionDesc implements Serializable {
     public void setPartitionDateColumn(String partitionDateColumn) {
         this.partitionDateColumn = partitionDateColumn;
     }
-
+    
     // for test
     void setPartitionDateColumnRef(TblColRef partitionDateColumnRef) {
         this.partitionDateColumnRef = partitionDateColumnRef;
     }
-
+    
     public String getPartitionTimeColumn() {
         return partitionTimeColumn;
     }
@@ -124,7 +124,7 @@ public class PartitionDesc implements Serializable {
     void setPartitionTimeColumnRef(TblColRef partitionTimeColumnRef) {
         this.partitionTimeColumnRef = partitionTimeColumnRef;
     }
-
+    
     @Deprecated
     public long getPartitionDateStart() {
         return partitionDateStart;
@@ -170,7 +170,7 @@ public class PartitionDesc implements Serializable {
     public TblColRef getPartitionTimeColumnRef() {
         return partitionTimeColumnRef;
     }
-
+    
     // ============================================================================
 
     public static interface IPartitionConditionBuilder {
@@ -190,21 +190,17 @@ public class PartitionDesc implements Serializable {
             } else if (partDesc.partitionColumnIsTimeMillis()) {
                 buildSingleColumnRangeCondAsTimeMillis(builder, partitionDateColumn, startInclusive, endExclusive);
             } else if (partitionDateColumn != null && partitionTimeColumn == null) {
-                buildSingleColumnRangeCondition(builder, partitionDateColumn, startInclusive, endExclusive,
-                        partDesc.getPartitionDateFormat());
+                buildSingleColumnRangeCondition(builder, partitionDateColumn, startInclusive, endExclusive, partDesc.getPartitionDateFormat());
             } else if (partitionDateColumn == null && partitionTimeColumn != null) {
-                buildSingleColumnRangeCondition(builder, partitionTimeColumn, startInclusive, endExclusive,
-                        partDesc.getPartitionTimeFormat());
+                buildSingleColumnRangeCondition(builder, partitionTimeColumn, startInclusive, endExclusive, partDesc.getPartitionTimeFormat());
             } else if (partitionDateColumn != null && partitionTimeColumn != null) {
-                buildMultipleColumnRangeCondition(builder, partitionDateColumn, partitionTimeColumn, startInclusive,
-                        endExclusive, partDesc.getPartitionDateFormat(), partDesc.getPartitionTimeFormat());
+                buildMultipleColumnRangeCondition(builder, partitionDateColumn, partitionTimeColumn, startInclusive, endExclusive, partDesc.getPartitionDateFormat(), partDesc.getPartitionTimeFormat());
             }
 
             return builder.toString();
         }
 
-        private static void buildSingleColumnRangeCondAsTimeMillis(StringBuilder builder, TblColRef partitionColumn,
-                long startInclusive, long endExclusive) {
+        private static void buildSingleColumnRangeCondAsTimeMillis(StringBuilder builder, TblColRef partitionColumn, long startInclusive, long endExclusive) {
             String partitionColumnName = partitionColumn.getIdentity();
             if (startInclusive > 0) {
                 builder.append(partitionColumnName + " >= " + startInclusive);
@@ -213,47 +209,35 @@ public class PartitionDesc implements Serializable {
             builder.append(partitionColumnName + " < " + endExclusive);
         }
 
-        private static void buildSingleColumnRangeCondAsYmdInt(StringBuilder builder, TblColRef partitionColumn,
-                long startInclusive, long endExclusive) {
+        private static void buildSingleColumnRangeCondAsYmdInt(StringBuilder builder, TblColRef partitionColumn, long startInclusive, long endExclusive) {
             String partitionColumnName = partitionColumn.getIdentity();
             if (startInclusive > 0) {
-                builder.append(partitionColumnName + " >= "
-                        + DateFormat.formatToDateStr(startInclusive, DateFormat.COMPACT_DATE_PATTERN));
+                builder.append(partitionColumnName + " >= " + DateFormat.formatToDateStr(startInclusive, DateFormat.COMPACT_DATE_PATTERN));
                 builder.append(" AND ");
             }
-            builder.append(partitionColumnName + " < "
-                    + DateFormat.formatToDateStr(endExclusive, DateFormat.COMPACT_DATE_PATTERN));
+            builder.append(partitionColumnName + " < " + DateFormat.formatToDateStr(endExclusive, DateFormat.COMPACT_DATE_PATTERN));
         }
 
-        private static void buildSingleColumnRangeCondition(StringBuilder builder, TblColRef partitionColumn,
-                long startInclusive, long endExclusive, String partitionColumnDateFormat) {
+        private static void buildSingleColumnRangeCondition(StringBuilder builder, TblColRef partitionColumn, long startInclusive, long endExclusive, String partitionColumnDateFormat) {
             String partitionColumnName = partitionColumn.getIdentity();
             if (startInclusive > 0) {
-                builder.append(partitionColumnName + " >= '"
-                        + DateFormat.formatToDateStr(startInclusive, partitionColumnDateFormat) + "'");
+                builder.append(partitionColumnName + " >= '" + DateFormat.formatToDateStr(startInclusive, partitionColumnDateFormat) + "'");
                 builder.append(" AND ");
             }
-            builder.append(partitionColumnName + " < '"
-                    + DateFormat.formatToDateStr(endExclusive, partitionColumnDateFormat) + "'");
+            builder.append(partitionColumnName + " < '" + DateFormat.formatToDateStr(endExclusive, partitionColumnDateFormat) + "'");
         }
 
-        private static void buildMultipleColumnRangeCondition(StringBuilder builder, TblColRef partitionDateColumn,
-                TblColRef partitionTimeColumn, long startInclusive, long endExclusive, String partitionColumnDateFormat,
-                String partitionColumnTimeFormat) {
+        private static void buildMultipleColumnRangeCondition(StringBuilder builder, TblColRef partitionDateColumn, TblColRef partitionTimeColumn, long startInclusive, long endExclusive, String partitionColumnDateFormat, String partitionColumnTimeFormat) {
             String partitionDateColumnName = partitionDateColumn.getIdentity();
             String partitionTimeColumnName = partitionTimeColumn.getIdentity();
             if (startInclusive > 0) {
                 builder.append("(");
                 builder.append("(");
-                builder.append(partitionDateColumnName + " = '"
-                        + DateFormat.formatToDateStr(startInclusive, partitionColumnDateFormat) + "'").append(" AND ")
-                        .append(partitionTimeColumnName + " >= '"
-                                + DateFormat.formatToDateStr(startInclusive, partitionColumnTimeFormat) + "'");
+                builder.append(partitionDateColumnName + " = '" + DateFormat.formatToDateStr(startInclusive, partitionColumnDateFormat) + "'").append(" AND ").append(partitionTimeColumnName + " >= '" + DateFormat.formatToDateStr(startInclusive, partitionColumnTimeFormat) + "'");
                 builder.append(")");
                 builder.append(" OR ");
                 builder.append("(");
-                builder.append(partitionDateColumnName + " > '"
-                        + DateFormat.formatToDateStr(startInclusive, partitionColumnDateFormat) + "'");
+                builder.append(partitionDateColumnName + " > '" + DateFormat.formatToDateStr(startInclusive, partitionColumnDateFormat) + "'");
                 builder.append(")");
                 builder.append(")");
                 builder.append(" AND ");
@@ -261,15 +245,11 @@ public class PartitionDesc implements Serializable {
 
             builder.append("(");
             builder.append("(");
-            builder.append(partitionDateColumnName + " = '"
-                    + DateFormat.formatToDateStr(endExclusive, partitionColumnDateFormat) + "'").append(" AND ")
-                    .append(partitionTimeColumnName + " < '"
-                            + DateFormat.formatToDateStr(endExclusive, partitionColumnTimeFormat) + "'");
+            builder.append(partitionDateColumnName + " = '" + DateFormat.formatToDateStr(endExclusive, partitionColumnDateFormat) + "'").append(" AND ").append(partitionTimeColumnName + " < '" + DateFormat.formatToDateStr(endExclusive, partitionColumnTimeFormat) + "'");
             builder.append(")");
             builder.append(" OR ");
             builder.append("(");
-            builder.append(partitionDateColumnName + " < '"
-                    + DateFormat.formatToDateStr(endExclusive, partitionColumnDateFormat) + "'");
+            builder.append(partitionDateColumnName + " < '" + DateFormat.formatToDateStr(endExclusive, partitionColumnDateFormat) + "'");
             builder.append(")");
             builder.append(")");
         }
@@ -287,8 +267,7 @@ public class PartitionDesc implements Serializable {
             TblColRef partitionColumn = partDesc.getPartitionDateColumnRef();
             String tableAlias = partitionColumn.getTableAlias();
 
-            String concatField = String.format("CONCAT(%s.YEAR,'-',%s.MONTH,'-',%s.DAY)", tableAlias, tableAlias,
-                    tableAlias);
+            String concatField = String.format("CONCAT(%s.YEAR,'-',%s.MONTH,'-',%s.DAY)", tableAlias, tableAlias, tableAlias);
             StringBuilder builder = new StringBuilder();
 
             if (startInclusive > 0) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/SegmentStatusEnum.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/SegmentStatusEnum.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/SegmentStatusEnum.java
index 3b30d01..689a314 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/SegmentStatusEnum.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/SegmentStatusEnum.java
@@ -24,6 +24,6 @@ import java.io.Serializable;
  * @author xduo
  * 
  */
-public enum SegmentStatusEnum implements Serializable {
+public enum SegmentStatusEnum implements Serializable{
     NEW, READY, READY_PENDING
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/Segments.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/Segments.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/Segments.java
index af2b0ea..9d22dc6 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/Segments.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/Segments.java
@@ -28,7 +28,7 @@ import org.apache.kylin.common.util.Pair;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class Segments<T extends ISegment> extends ArrayList<T> implements Serializable {
+public class Segments<T extends ISegment> extends ArrayList<T> implements Serializable{
 
     private static final long serialVersionUID = 1L;
 
@@ -110,8 +110,7 @@ public class Segments<T extends ISegment> extends ArrayList<T> implements Serial
 
     public T getSegment(String name, SegmentStatusEnum status) {
         for (T segment : this) {
-            if ((null != segment.getName() && segment.getName().equals(name))
-                    && (status == null || segment.getStatus() == status)) {
+            if ((null != segment.getName() && segment.getName().equals(name)) && (status == null || segment.getStatus() == status)) {
                 return segment;
             }
         }
@@ -122,8 +121,7 @@ public class Segments<T extends ISegment> extends ArrayList<T> implements Serial
         Segments<T> buildingSegments = new Segments();
         if (null != this) {
             for (T segment : this) {
-                if (SegmentStatusEnum.NEW == segment.getStatus()
-                        || SegmentStatusEnum.READY_PENDING == segment.getStatus()) {
+                if (SegmentStatusEnum.NEW == segment.getStatus() || SegmentStatusEnum.READY_PENDING == segment.getStatus()) {
                     buildingSegments.add(segment);
                 }
             }
@@ -146,8 +144,7 @@ public class Segments<T extends ISegment> extends ArrayList<T> implements Serial
             if (sourceOffsetContains(mergedSegment, seg)) {
                 // make sure no holes
                 if (result.size() > 0 && result.getLast().getSourceOffsetEnd() != seg.getSourceOffsetStart())
-                    throw new IllegalStateException(
-                            "Merging segments must not have holes between " + result.getLast() + " and " + seg);
+                    throw new IllegalStateException("Merging segments must not have holes between " + result.getLast() + " and " + seg);
 
                 result.add(seg);
             }
@@ -155,8 +152,7 @@ public class Segments<T extends ISegment> extends ArrayList<T> implements Serial
         return result;
     }
 
-    public Pair<Long, Long> autoMergeCubeSegments(boolean needAutoMerge, String cubeName, long[] timeRanges)
-            throws IOException {
+    public Pair<Long, Long> autoMergeCubeSegments(boolean needAutoMerge, String cubeName, long[] timeRanges) throws IOException {
         if (!needAutoMerge) {
             logger.debug("Cube " + cubeName + " doesn't need auto merge");
             return null;
@@ -175,8 +171,7 @@ public class Segments<T extends ISegment> extends ArrayList<T> implements Serial
             for (ISegment building : getBuildingSegments()) {
                 // exclude those under-merging segs
                 for (ISegment ready : readySegs) {
-                    if (ready.getSourceOffsetStart() >= building.getSourceOffsetStart()
-                            && ready.getSourceOffsetEnd() <= building.getSourceOffsetEnd()) {
+                    if (ready.getSourceOffsetStart() >= building.getSourceOffsetStart() && ready.getSourceOffsetEnd() <= building.getSourceOffsetEnd()) {
                         mergingSegs.add(ready);
                     }
                 }
@@ -194,8 +189,7 @@ public class Segments<T extends ISegment> extends ArrayList<T> implements Serial
             for (int s = 0; s < readySegs.size(); s++) {
                 ISegment seg = readySegs.get(s);
                 Pair<T, T> p = readySegs.getSubList(s, readySegs.size()) //
-                        .findMergeOffsetsByDateRange(seg.getDateRangeStart(), seg.getDateRangeStart() + toMergeRange,
-                                toMergeRange);
+                        .findMergeOffsetsByDateRange(seg.getDateRangeStart(), seg.getDateRangeStart() + toMergeRange, toMergeRange);
                 if (p != null && p.getSecond().getDateRangeEnd() - p.getFirst().getDateRangeStart() >= toMergeRange)
                     return Pair.newPair(p.getFirst().getSourceOffsetStart(), p.getSecond().getSourceOffsetEnd());
             }
@@ -339,4 +333,4 @@ public class Segments<T extends ISegment> extends ArrayList<T> implements Serial
         return result;
     }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
index 692f8a9..b388f11 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
@@ -180,9 +180,9 @@ public class TableDesc extends RootPersistentEntity implements ISourceAware {
         if (columns == null) {
             return -1;
         }
-
+        
         int max = -1;
-
+        
         for (ColumnDesc col : columns) {
             int idx = col.getZeroBasedIndex();
             max = Math.max(max, idx);
@@ -264,9 +264,7 @@ public class TableDesc extends RootPersistentEntity implements ISourceAware {
 
     @Override
     public String toString() {
-        return "TableDesc{" + "name='" + name + '\'' + ", columns=" + Arrays.toString(columns) + ", sourceType="
-                + sourceType + ", tableType='" + tableType + '\'' + ", database=" + database + ", identity='"
-                + getIdentity() + '\'' + '}';
+        return "TableDesc{" + "name='" + name + '\'' + ", columns=" + Arrays.toString(columns) + ", sourceType=" + sourceType + ", tableType='" + tableType + '\'' + ", database=" + database + ", identity='" + getIdentity() + '\'' + '}';
     }
 
     /** create a mockup table for unit test */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
index 14e3049..a0f67f0 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
@@ -181,8 +181,7 @@ public class TableExtDesc extends RootPersistentEntity {
     }
 
     public boolean isPartitioned() {
-        return this.dataSourceProps.get("partition_column") == null ? false
-                : !this.dataSourceProps.get("partition_column").isEmpty();
+        return this.dataSourceProps.get("partition_column") == null ? false : !this.dataSourceProps.get("partition_column").isEmpty();
     }
 
     @Override
@@ -199,8 +198,7 @@ public class TableExtDesc extends RootPersistentEntity {
 
     @Override
     public String toString() {
-        return "TableExtDesc{" + "name='" + (null == tableName ? "NULL" : tableName) + '\'' + ", columns_samples="
-                + (null == columnStats ? "null" : Arrays.toString(columnStats.toArray()));
+        return "TableExtDesc{" + "name='" + (null == tableName ? "NULL" : tableName) + '\'' + ", columns_samples=" + (null == columnStats ? "null" : Arrays.toString(columnStats.toArray()));
     }
 
     @JsonIgnoreProperties(ignoreUnknown = true)

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
index 9669922..b213eae 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
@@ -26,6 +26,7 @@ import java.util.TreeSet;
 
 import javax.annotation.Nullable;
 
+import com.fasterxml.jackson.annotation.JsonInclude;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.RootPersistentEntity;
@@ -33,7 +34,6 @@ import org.apache.kylin.metadata.realization.RealizationType;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
-import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Predicate;
 import com.google.common.collect.ImmutableList;
@@ -99,9 +99,7 @@ public class ProjectInstance extends RootPersistentEntity {
         return project.toUpperCase();
     }
 
-    public static ProjectInstance create(String name, String owner, String description,
-            LinkedHashMap<String, String> overrideProps, List<RealizationEntry> realizationEntries,
-            List<String> models) {
+    public static ProjectInstance create(String name, String owner, String description, LinkedHashMap<String, String> overrideProps, List<RealizationEntry> realizationEntries, List<String> models) {
         ProjectInstance projectInstance = new ProjectInstance();
 
         projectInstance.updateRandomUuid();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectL2Cache.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectL2Cache.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectL2Cache.java
index 4eeb7f0..cf5498c 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectL2Cache.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectL2Cache.java
@@ -212,8 +212,7 @@ class ProjectL2Cache {
             if (filterDesc != null) {
                 projectCache.extFilters.put(extFilterName, filterDesc);
             } else {
-                logger.warn(
-                        "External Filter '" + extFilterName + "' defined under project '" + project + "' is not found");
+                logger.warn("External Filter '" + extFilterName + "' defined under project '" + project + "' is not found");
             }
         }
 
@@ -260,17 +259,14 @@ class ProjectL2Cache {
         for (TblColRef col : allColumns) {
             TableDesc table = metaMgr.getTableDesc(col.getTable());
             if (table == null) {
-                logger.error("Realization '" + realization.getCanonicalName() + "' reports column '"
-                        + col.getCanonicalName() + "', but its table is not found by MetadataManager");
+                logger.error("Realization '" + realization.getCanonicalName() + "' reports column '" + col.getCanonicalName() + "', but its table is not found by MetadataManager");
                 return false;
             }
 
             if (!col.getColumnDesc().isComputedColumnn()) {
                 ColumnDesc foundCol = table.findColumnByName(col.getName());
                 if (col.getColumnDesc().equals(foundCol) == false) {
-                    logger.error("Realization '" + realization.getCanonicalName() + "' reports column '"
-                            + col.getCanonicalName() + "', but it is not equal to '" + foundCol
-                            + "' according to MetadataManager");
+                    logger.error("Realization '" + realization.getCanonicalName() + "' reports column '" + col.getCanonicalName() + "', but it is not equal to '" + foundCol + "' according to MetadataManager");
                     return false;
                 }
             } else {
@@ -280,9 +276,7 @@ class ProjectL2Cache {
             // auto-define table required by realization for some legacy test case
             if (prjCache.tables.get(table.getIdentity()) == null) {
                 prjCache.tables.put(table.getIdentity(), new TableCache(table));
-                logger.warn(
-                        "Realization '" + realization.getCanonicalName() + "' reports column '" + col.getCanonicalName()
-                                + "' whose table is not defined in project '" + prjCache.project + "'");
+                logger.warn("Realization '" + realization.getCanonicalName() + "' reports column '" + col.getCanonicalName() + "' whose table is not defined in project '" + prjCache.project + "'");
             }
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
index 6f2705e..ff2d3e6 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
@@ -52,8 +52,7 @@ import com.google.common.collect.Lists;
 public class ProjectManager {
     private static final Logger logger = LoggerFactory.getLogger(ProjectManager.class);
     private static final ConcurrentMap<KylinConfig, ProjectManager> CACHE = new ConcurrentHashMap<KylinConfig, ProjectManager>();
-    public static final Serializer<ProjectInstance> PROJECT_SERIALIZER = new JsonSerializer<ProjectInstance>(
-            ProjectInstance.class);
+    public static final Serializer<ProjectInstance> PROJECT_SERIALIZER = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
 
     public static ProjectManager getInstance(KylinConfig config) {
         ProjectManager r = CACHE.get(config);
@@ -109,8 +108,7 @@ public class ProjectManager {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
             String project = cacheKey;
 
             if (event == Event.DROP)
@@ -131,8 +129,7 @@ public class ProjectManager {
         ResourceStore store = getStore();
         List<String> paths = store.collectResourceRecursively(ResourceStore.PROJECT_RESOURCE_ROOT, ".json");
 
-        logger.debug(
-                "Loading Project from folder " + store.getReadableResourcePath(ResourceStore.PROJECT_RESOURCE_ROOT));
+        logger.debug("Loading Project from folder " + store.getReadableResourcePath(ResourceStore.PROJECT_RESOURCE_ROOT));
 
         for (String path : paths) {
             reloadProjectLocalAt(path);
@@ -168,8 +165,7 @@ public class ProjectManager {
         return projectMap.get(projectName);
     }
 
-    public ProjectInstance createProject(String projectName, String owner, String description,
-            LinkedHashMap<String, String> overrideProps) throws IOException {
+    public ProjectInstance createProject(String projectName, String owner, String description, LinkedHashMap<String, String> overrideProps) throws IOException {
         logger.info("Creating project " + projectName);
 
         ProjectInstance currentProject = getProject(projectName);
@@ -195,8 +191,7 @@ public class ProjectManager {
         }
 
         if (projectInstance.getRealizationCount(null) != 0) {
-            throw new IllegalStateException("The project named " + projectName
-                    + " can not be deleted because there's still realizations in it. Delete them first.");
+            throw new IllegalStateException("The project named " + projectName + " can not be deleted because there's still realizations in it. Delete them first.");
         }
 
         logger.info("Dropping project '" + projectInstance.getName() + "'");
@@ -215,8 +210,7 @@ public class ProjectManager {
     }
 
     //update project itself
-    public ProjectInstance updateProject(ProjectInstance project, String newName, String newDesc,
-            LinkedHashMap<String, String> overrideProps) throws IOException {
+    public ProjectInstance updateProject(ProjectInstance project, String newName, String newDesc, LinkedHashMap<String, String> overrideProps) throws IOException {
         if (!project.getName().equals(newName)) {
             ProjectInstance newProject = this.createProject(newName, project.getOwner(), newDesc, overrideProps);
 
@@ -292,21 +286,16 @@ public class ProjectManager {
         return newProject;
     }
 
-    public ProjectInstance moveRealizationToProject(RealizationType type, String realizationName, String newProjectName,
-            String owner) throws IOException {
+    public ProjectInstance moveRealizationToProject(RealizationType type, String realizationName, String newProjectName, String owner) throws IOException {
         removeRealizationsFromProjects(type, realizationName);
         return addRealizationToProject(type, realizationName, newProjectName, owner);
     }
 
-    private ProjectInstance addRealizationToProject(RealizationType type, String realizationName, String project,
-            String user) throws IOException {
+    private ProjectInstance addRealizationToProject(RealizationType type, String realizationName, String project, String user) throws IOException {
         String newProjectName = norm(project);
         ProjectInstance newProject = getProject(newProjectName);
         if (newProject == null) {
-            newProject = this.createProject(newProjectName, user,
-                    "This is a project automatically added when adding realization " + realizationName + "(" + type
-                            + ")",
-                    null);
+            newProject = this.createProject(newProjectName, user, "This is a project automatically added when adding realization " + realizationName + "(" + type + ")", null);
         }
         newProject.addRealizationEntry(type, realizationName);
         updateProject(newProject);
@@ -433,13 +422,11 @@ public class ProjectManager {
     }
 
     public boolean isExposedTable(String project, String table) {
-        return config.isAdhocEnabled() ? l2Cache.isDefinedTable(norm(project), table)
-                : l2Cache.isExposedTable(norm(project), table);
+        return config.isAdhocEnabled() ? l2Cache.isDefinedTable(norm(project), table) : l2Cache.isExposedTable(norm(project), table);
     }
 
     public boolean isExposedColumn(String project, String table, String col) {
-        return config.isAdhocEnabled() ? l2Cache.isDefinedColumn(norm(project), table, col)
-                : l2Cache.isExposedColumn(norm(project), table, col);
+        return config.isAdhocEnabled() ? l2Cache.isDefinedColumn(norm(project), table, col) : l2Cache.isExposedColumn(norm(project), table, col);
     }
 
     public Set<IRealization> listAllRealizations(String project) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/project/RealizationEntry.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/RealizationEntry.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/RealizationEntry.java
index 8139053..421f802 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/RealizationEntry.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/RealizationEntry.java
@@ -18,17 +18,17 @@
 
 package org.apache.kylin.metadata.project;
 
-import java.io.Serializable;
-
 import org.apache.kylin.metadata.realization.RealizationType;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonProperty;
 
+import java.io.Serializable;
+
 /**
  */
 @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
-public class RealizationEntry implements Serializable {
+public class RealizationEntry implements Serializable{
 
     @JsonProperty("type")
     private RealizationType type;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMeta.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMeta.java b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMeta.java
index b591c58..89b5675 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMeta.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMeta.java
@@ -51,11 +51,7 @@ public class ColumnMeta implements Serializable {
     public ColumnMeta() {
     }
 
-    public ColumnMeta(String tABLE_CAT, String tABLE_SCHEM, String tABLE_NAME, String cOLUMN_NAME, int dATA_TYPE,
-            String tYPE_NAME, int cOLUMN_SIZE, int bUFFER_LENGTH, int dECIMAL_DIGITS, int nUM_PREC_RADIX, int nULLABLE,
-            String rEMARKS, String cOLUMN_DEF, int sQL_DATA_TYPE, int sQL_DATETIME_SUB, int cHAR_OCTET_LENGTH,
-            int oRDINAL_POSITION, String iS_NULLABLE, String sCOPE_CATLOG, String sCOPE_SCHEMA, String sCOPE_TABLE,
-            short sOURCE_DATA_TYPE, String iS_AUTOINCREMENT) {
+    public ColumnMeta(String tABLE_CAT, String tABLE_SCHEM, String tABLE_NAME, String cOLUMN_NAME, int dATA_TYPE, String tYPE_NAME, int cOLUMN_SIZE, int bUFFER_LENGTH, int dECIMAL_DIGITS, int nUM_PREC_RADIX, int nULLABLE, String rEMARKS, String cOLUMN_DEF, int sQL_DATA_TYPE, int sQL_DATETIME_SUB, int cHAR_OCTET_LENGTH, int oRDINAL_POSITION, String iS_NULLABLE, String sCOPE_CATLOG, String sCOPE_SCHEMA, String sCOPE_TABLE, short sOURCE_DATA_TYPE, String iS_AUTOINCREMENT) {
         super();
         TABLE_CAT = tABLE_CAT;
         TABLE_SCHEM = tABLE_SCHEM;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMetaWithType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMetaWithType.java b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMetaWithType.java
index 245ede4..e3cb86b 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMetaWithType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/ColumnMetaWithType.java
@@ -33,11 +33,7 @@ public class ColumnMetaWithType extends ColumnMeta {
 
     private HashSet<columnTypeEnum> TYPE;
 
-    public ColumnMetaWithType(String tABLE_CAT, String tABLE_SCHEM, String tABLE_NAME, String cOLUMN_NAME,
-            int dATA_TYPE, String tYPE_NAME, int cOLUMN_SIZE, int bUFFER_LENGTH, int dECIMAL_DIGITS, int nUM_PREC_RADIX,
-            int nULLABLE, String rEMARKS, String cOLUMN_DEF, int sQL_DATA_TYPE, int sQL_DATETIME_SUB,
-            int cHAR_OCTET_LENGTH, int oRDINAL_POSITION, String iS_NULLABLE, String sCOPE_CATLOG, String sCOPE_SCHEMA,
-            String sCOPE_TABLE, short sOURCE_DATA_TYPE, String iS_AUTOINCREMENT) {
+    public ColumnMetaWithType(String tABLE_CAT, String tABLE_SCHEM, String tABLE_NAME, String cOLUMN_NAME, int dATA_TYPE, String tYPE_NAME, int cOLUMN_SIZE, int bUFFER_LENGTH, int dECIMAL_DIGITS, int nUM_PREC_RADIX, int nULLABLE, String rEMARKS, String cOLUMN_DEF, int sQL_DATA_TYPE, int sQL_DATETIME_SUB, int cHAR_OCTET_LENGTH, int oRDINAL_POSITION, String iS_NULLABLE, String sCOPE_CATLOG, String sCOPE_SCHEMA, String sCOPE_TABLE, short sOURCE_DATA_TYPE, String iS_AUTOINCREMENT) {
         TABLE_CAT = tABLE_CAT;
         TABLE_SCHEM = tABLE_SCHEM;
         TABLE_NAME = tABLE_NAME;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/SelectedColumnMeta.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/SelectedColumnMeta.java b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/SelectedColumnMeta.java
index c3bd0b7..9ba0da2 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/SelectedColumnMeta.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/SelectedColumnMeta.java
@@ -23,10 +23,7 @@ import java.io.Serializable;
 /**
  */
 public class SelectedColumnMeta implements Serializable {
-    public SelectedColumnMeta(boolean isAutoIncrement, boolean isCaseSensitive, boolean isSearchable,
-            boolean isCurrency, int isNullalbe, boolean isSigned, int displaySize, String label, String name,
-            String schemaName, String catelogName, String tableName, int precision, int scale, int columnType,
-            String columnTypeName, boolean isReadOnly, boolean isWritable, boolean isDefinitelyWritable) {
+    public SelectedColumnMeta(boolean isAutoIncrement, boolean isCaseSensitive, boolean isSearchable, boolean isCurrency, int isNullalbe, boolean isSigned, int displaySize, String label, String name, String schemaName, String catelogName, String tableName, int precision, int scale, int columnType, String columnTypeName, boolean isReadOnly, boolean isWritable, boolean isDefinitelyWritable) {
         super();
         this.isAutoIncrement = isAutoIncrement;
         this.isCaseSensitive = isCaseSensitive;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMeta.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMeta.java b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMeta.java
index bf9532e..b02fb4e 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMeta.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMeta.java
@@ -42,9 +42,7 @@ public class TableMeta implements Serializable {
     public TableMeta() {
     }
 
-    public TableMeta(String tABLE_CAT, String tABLE_SCHEM, String tABLE_NAME, String tABLE_TYPE, String rEMARKS,
-            String tYPE_CAT, String tYPE_SCHEM, String tYPE_NAME, String sELF_REFERENCING_COL_NAME,
-            String rEF_GENERATION) {
+    public TableMeta(String tABLE_CAT, String tABLE_SCHEM, String tABLE_NAME, String tABLE_TYPE, String rEMARKS, String tYPE_CAT, String tYPE_SCHEM, String tYPE_NAME, String sELF_REFERENCING_COL_NAME, String rEF_GENERATION) {
         super();
         TABLE_CAT = tABLE_CAT;
         TABLE_SCHEM = tABLE_SCHEM;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMetaWithType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMetaWithType.java b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMetaWithType.java
index 60df3a4..2ff21e4 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMetaWithType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/querymeta/TableMetaWithType.java
@@ -33,9 +33,7 @@ public class TableMetaWithType extends TableMeta {
 
     private HashSet<tableTypeEnum> TYPE;
 
-    public TableMetaWithType(String tABLE_CAT, String tABLE_SCHEM, String tABLE_NAME, String tABLE_TYPE, String rEMARKS,
-            String tYPE_CAT, String tYPE_SCHEM, String tYPE_NAME, String sELF_REFERENCING_COL_NAME,
-            String rEF_GENERATION) {
+    public TableMetaWithType(String tABLE_CAT, String tABLE_SCHEM, String tABLE_NAME, String tABLE_TYPE, String rEMARKS, String tYPE_CAT, String tYPE_SCHEM, String tYPE_NAME, String sELF_REFERENCING_COL_NAME, String rEF_GENERATION) {
         TABLE_CAT = tABLE_CAT;
         TABLE_SCHEM = tABLE_SCHEM;
         TABLE_NAME = tABLE_NAME;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationRegistry.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationRegistry.java b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationRegistry.java
index 41557c7..2d1a4a5 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationRegistry.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationRegistry.java
@@ -94,8 +94,7 @@ public class RealizationRegistry {
         for (String clsName : providerNames) {
             try {
                 Class<? extends IRealizationProvider> cls = ClassUtil.forName(clsName, IRealizationProvider.class);
-                IRealizationProvider p = (IRealizationProvider) cls.getMethod("getInstance", KylinConfig.class)
-                        .invoke(null, config);
+                IRealizationProvider p = (IRealizationProvider) cls.getMethod("getInstance", KylinConfig.class).invoke(null, config);
                 providers.put(p.getRealizationType(), p);
 
             } catch (Exception | NoClassDefFoundError e) {
@@ -107,8 +106,7 @@ public class RealizationRegistry {
         }
 
         if (providers.isEmpty())
-            throw new IllegalArgumentException(
-                    "Failed to find realization provider by url: " + config.getMetadataUrl());
+            throw new IllegalArgumentException("Failed to find realization provider by url: " + config.getMetadataUrl());
 
         logger.info("RealizationRegistry is " + providers);
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationStatusEnum.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationStatusEnum.java b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationStatusEnum.java
index 1a514c9..872d2e4 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationStatusEnum.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationStatusEnum.java
@@ -20,7 +20,7 @@ package org.apache.kylin.metadata.realization;
 
 import java.io.Serializable;
 
-public enum RealizationStatusEnum implements Serializable {
+public enum RealizationStatusEnum implements Serializable{
 
     DISABLED, READY, DESCBROKEN
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigest.java b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigest.java
index 840a0b3..9ce65bb 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigest.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigest.java
@@ -96,7 +96,7 @@ public class SQLDigest {
         this.sortOrders = sortOrders;
         this.isRawQuery = isRawQuery();
         this.limitPrecedesAggr = limitPrecedesAggr;
-
+        
         this.includeSubqueryJoinParticipants();
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigestUtil.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigestUtil.java b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigestUtil.java
index 821116b..a70a17e 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigestUtil.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/SQLDigestUtil.java
@@ -35,8 +35,7 @@ import com.google.common.collect.Range;
  */
 public class SQLDigestUtil {
 
-    public static <F, T> T appendTsFilterToExecute(SQLDigest sqlDigest, TblColRef partitionColRef, Range<Long> tsRange,
-            Function<F, T> action) {
+    public static <F, T> T appendTsFilterToExecute(SQLDigest sqlDigest, TblColRef partitionColRef, Range<Long> tsRange, Function<F, T> action) {
 
         // add the boundary condition to query real-time
         TupleFilter originalFilter = sqlDigest.filter;
@@ -81,8 +80,7 @@ public class SQLDigestUtil {
         return ret;
     }
 
-    private static TupleFilter createFilterForRealtime(TupleFilter originFilter, TblColRef partitionColRef,
-            Range<Long> tsRange) {
+    private static TupleFilter createFilterForRealtime(TupleFilter originFilter, TblColRef partitionColRef, Range<Long> tsRange) {
         DataType type = partitionColRef.getColumnDesc().getType();
 
         String startTimeStr, endTimeStr;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/realization/StreamSQLDigest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/StreamSQLDigest.java b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/StreamSQLDigest.java
index 3bb09ab..345350e 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/StreamSQLDigest.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/StreamSQLDigest.java
@@ -58,29 +58,21 @@ public class StreamSQLDigest {
 
         StreamSQLDigest other = (StreamSQLDigest) o;
 
-        if (filterSerialized != null ? !Arrays.equals(filterSerialized, other.filterSerialized)
-                : other.filterSerialized != null)
+        if (filterSerialized != null ? !Arrays.equals(filterSerialized, other.filterSerialized) : other.filterSerialized != null)
             return false;
-        if (sqlDigest.aggregations != null ? !sqlDigest.aggregations.equals(other.sqlDigest.aggregations)
-                : other.sqlDigest.aggregations != null)
+        if (sqlDigest.aggregations != null ? !sqlDigest.aggregations.equals(other.sqlDigest.aggregations) : other.sqlDigest.aggregations != null)
             return false;
-        if (sqlDigest.allColumns != null ? !sqlDigest.allColumns.equals(other.sqlDigest.allColumns)
-                : other.sqlDigest.allColumns != null)
+        if (sqlDigest.allColumns != null ? !sqlDigest.allColumns.equals(other.sqlDigest.allColumns) : other.sqlDigest.allColumns != null)
             return false;
-        if (sqlDigest.factTable != null ? !sqlDigest.factTable.equals(other.sqlDigest.factTable)
-                : other.sqlDigest.factTable != null)
+        if (sqlDigest.factTable != null ? !sqlDigest.factTable.equals(other.sqlDigest.factTable) : other.sqlDigest.factTable != null)
             return false;
-        if (sqlDigest.filterColumns != null ? !sqlDigest.filterColumns.equals(other.sqlDigest.filterColumns)
-                : other.sqlDigest.filterColumns != null)
+        if (sqlDigest.filterColumns != null ? !sqlDigest.filterColumns.equals(other.sqlDigest.filterColumns) : other.sqlDigest.filterColumns != null)
             return false;
-        if (sqlDigest.groupbyColumns != null ? !sqlDigest.groupbyColumns.equals(other.sqlDigest.groupbyColumns)
-                : other.sqlDigest.groupbyColumns != null)
+        if (sqlDigest.groupbyColumns != null ? !sqlDigest.groupbyColumns.equals(other.sqlDigest.groupbyColumns) : other.sqlDigest.groupbyColumns != null)
             return false;
-        if (sqlDigest.joinDescs != null ? !sqlDigest.joinDescs.equals(other.sqlDigest.joinDescs)
-                : other.sqlDigest.joinDescs != null)
+        if (sqlDigest.joinDescs != null ? !sqlDigest.joinDescs.equals(other.sqlDigest.joinDescs) : other.sqlDigest.joinDescs != null)
             return false;
-        if (sqlDigest.metricColumns != null ? !sqlDigest.metricColumns.equals(other.sqlDigest.metricColumns)
-                : other.sqlDigest.metricColumns != null)
+        if (sqlDigest.metricColumns != null ? !sqlDigest.metricColumns.equals(other.sqlDigest.metricColumns) : other.sqlDigest.metricColumns != null)
             return false;
 
         return true;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/streaming/StreamingManager.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/streaming/StreamingManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/streaming/StreamingManager.java
index 4af0c22..48febeb 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/streaming/StreamingManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/streaming/StreamingManager.java
@@ -45,8 +45,7 @@ public class StreamingManager {
     // static cached instances
     private static final ConcurrentMap<KylinConfig, StreamingManager> CACHE = new ConcurrentHashMap<KylinConfig, StreamingManager>();
 
-    public static final Serializer<StreamingConfig> STREAMING_SERIALIZER = new JsonSerializer<StreamingConfig>(
-            StreamingConfig.class);
+    public static final Serializer<StreamingConfig> STREAMING_SERIALIZER = new JsonSerializer<StreamingConfig>(StreamingConfig.class);
 
     private KylinConfig config;
 
@@ -60,7 +59,7 @@ public class StreamingManager {
     private StreamingManager(KylinConfig config) throws IOException {
         this.config = config;
         this.streamingMap = new CaseInsensitiveStringCache<StreamingConfig>(config, "streaming");
-
+        
         // touch lower level metadata before registering my listener
         reloadAllStreaming();
         Broadcaster.getInstance(config).registerListener(new StreamingSyncListener(), "streaming");
@@ -73,8 +72,7 @@ public class StreamingManager {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
             if (event == Event.DROP)
                 removeStreamingLocal(cacheKey);
             else
@@ -119,8 +117,7 @@ public class StreamingManager {
     }
 
     private static String formatStreamingOutputPath(String streaming, List<Integer> partitions) {
-        return ResourceStore.STREAMING_OUTPUT_RESOURCE_ROOT + "/" + streaming + "_" + StringUtils.join(partitions, "_")
-                + ".json";
+        return ResourceStore.STREAMING_OUTPUT_RESOURCE_ROOT + "/" + streaming + "_" + StringUtils.join(partitions, "_") + ".json";
     }
 
     public StreamingConfig getStreamingConfig(String name) {
@@ -222,13 +219,11 @@ public class StreamingManager {
 
     private void reloadAllStreaming() throws IOException {
         ResourceStore store = getStore();
-        logger.info("Reloading Streaming Metadata from folder "
-                + store.getReadableResourcePath(ResourceStore.STREAMING_RESOURCE_ROOT));
+        logger.info("Reloading Streaming Metadata from folder " + store.getReadableResourcePath(ResourceStore.STREAMING_RESOURCE_ROOT));
 
         streamingMap.clear();
 
-        List<String> paths = store.collectResourceRecursively(ResourceStore.STREAMING_RESOURCE_ROOT,
-                MetadataConstants.FILE_SURFIX);
+        List<String> paths = store.collectResourceRecursively(ResourceStore.STREAMING_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX);
         for (String path : paths) {
             StreamingConfig streamingConfig;
             try {
@@ -238,8 +233,7 @@ public class StreamingManager {
                 continue;
             }
             if (path.equals(streamingConfig.getResourcePath()) == false) {
-                logger.error("Skip suspicious desc at " + path + ", " + streamingConfig + " should be at "
-                        + streamingConfig.getResourcePath());
+                logger.error("Skip suspicious desc at " + path + ", " + streamingConfig + " should be at " + streamingConfig.getResourcePath());
                 continue;
             }
             if (streamingMap.containsKey(streamingConfig.getName())) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java b/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java
index c41d61b..b50a7e4 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java
@@ -188,8 +188,7 @@ public class Tuple implements ITuple {
         if (strValue == null)
             return null;
 
-        if ((strValue.equals("") || strValue.equals("\\N")) && !dataTypeName.equals("string")
-                && !dataTypeName.startsWith("varchar"))
+        if ((strValue.equals("") || strValue.equals("\\N")) && !dataTypeName.equals("string") && !dataTypeName.startsWith("varchar"))
             return null;
 
         switch (dataTypeName) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/source/ISource.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/ISource.java b/core-metadata/src/main/java/org/apache/kylin/source/ISource.java
index 1811a5c..302c53c 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/ISource.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/ISource.java
@@ -41,7 +41,7 @@ public interface ISource {
      * Return a ReadableTable that can iterate through the rows of given table.
      */
     IReadableTable createReadableTable(TableDesc tableDesc);
-
+    
     /**
      * Give the source a chance to enrich a SourcePartition before build start.
      * Particularly, Kafka source use this chance to define start/end offsets within each partition.

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/source/ISourceMetadataExplorer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/ISourceMetadataExplorer.java b/core-metadata/src/main/java/org/apache/kylin/source/ISourceMetadataExplorer.java
index 48897c7..b746f19 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/ISourceMetadataExplorer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/ISourceMetadataExplorer.java
@@ -27,10 +27,10 @@ import org.apache.kylin.metadata.model.TableExtDesc;
 public interface ISourceMetadataExplorer {
 
     List<String> listDatabases() throws Exception;
-
+    
     List<String> listTables(String database) throws Exception;
-
+    
     Pair<TableDesc, TableExtDesc> loadTableMetadata(String database, String table) throws Exception;
-
+    
     List<String> getRelatedKylinResources(TableDesc table);
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/source/SourceFactory.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/SourceFactory.java b/core-metadata/src/main/java/org/apache/kylin/source/SourceFactory.java
index cadd13c..86f89b8 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/SourceFactory.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/SourceFactory.java
@@ -33,7 +33,7 @@ public class SourceFactory {
         Map<Integer, String> impls = KylinConfig.getInstanceFromEnv().getSourceEngines();
         sources = new ImplementationSwitch<>(impls, ISource.class);
     }
-
+    
     public static ISource getDefaultSource() {
         KylinConfig config = KylinConfig.getInstanceFromEnv();
         return sources.get(config.getDefaultSource());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/source/SourcePartition.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/SourcePartition.java b/core-metadata/src/main/java/org/apache/kylin/source/SourcePartition.java
index b77b55c..43e46c6 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/SourcePartition.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/SourcePartition.java
@@ -44,8 +44,7 @@ public class SourcePartition {
     public SourcePartition() {
     }
 
-    public SourcePartition(long startDate, long endDate, long startOffset, long endOffset,
-            Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd) {
+    public SourcePartition(long startDate, long endDate, long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd) {
         this.startDate = startDate;
         this.endDate = endDate;
         this.startOffset = startOffset;
@@ -104,10 +103,7 @@ public class SourcePartition {
 
     @Override
     public String toString() {
-        return Objects.toStringHelper(this).add("startDate", startDate).add("endDate", endDate)
-                .add("startOffset", startOffset).add("endOffset", endOffset)
-                .add("sourcePartitionOffsetStart", sourcePartitionOffsetStart.toString())
-                .add("sourcePartitionOffsetEnd", sourcePartitionOffsetEnd.toString()).toString();
+        return Objects.toStringHelper(this).add("startDate", startDate).add("endDate", endDate).add("startOffset", startOffset).add("endOffset", endOffset).add("sourcePartitionOffsetStart", sourcePartitionOffsetStart.toString()).add("sourcePartitionOffsetEnd", sourcePartitionOffsetEnd.toString()).toString();
     }
 
     public static SourcePartition getCopyOf(SourcePartition origin) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenConfig.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenConfig.java b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenConfig.java
index 292b2a0..62fe46c 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenConfig.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenConfig.java
@@ -33,22 +33,22 @@ public class ColumnGenConfig {
     public static final String ID = "ID";
     public static final String RAND = "RAND";
     public static final String $RANDOM = "${RANDOM}";
-
+    
     // discrete values
     boolean isDiscrete;
     boolean isFK;
     List<String> values;
-
+    
     // random
     boolean isRandom;
     String randFormat;
     int randStart;
     int randEnd;
-
+    
     // ID
     boolean isID;
     int idStart;
-
+    
     // general
     int cardinality;
     boolean genNull;
@@ -56,19 +56,19 @@ public class ColumnGenConfig {
     String genNullStr;
     boolean order;
     boolean unique;
-
+    
     public ColumnGenConfig(ColumnDesc col, ModelDataGenerator modelGen) throws IOException {
         init(col, modelGen);
     }
 
     private void init(ColumnDesc col, ModelDataGenerator modelGen) throws IOException {
-
+        
         Map<String, String> config = Util.parseEqualCommaPairs(col.getDataGen(), "values");
 
         values = Arrays.asList(Util.parseString(config, "values", "").split("[|]"));
-
+        
         List<String> pkValues = modelGen.getPkValuesIfIsFk(col);
-
+        
         if (FK.equals(values.get(0)) || (values.get(0).isEmpty() && pkValues != null)) {
             isFK = true;
             values = getPkValues(modelGen, config, pkValues);
@@ -83,7 +83,7 @@ public class ColumnGenConfig {
         } else {
             isDiscrete = true;
         }
-
+        
         cardinality = Util.parseInt(config, "card", guessCardinality(col.getName()));
         genNull = Util.parseBoolean(config, "null", guessGenNull(col.getName()));
         genNullPct = Util.parseDouble(config, "nullpct", 0.01);
@@ -92,19 +92,17 @@ public class ColumnGenConfig {
         unique = Util.parseBoolean(config, "uniq", modelGen.isPK(col));
     }
 
-    private List<String> getPkValues(ModelDataGenerator modelGen, Map<String, String> config, List<String> dftPkValues)
-            throws IOException {
+    private List<String> getPkValues(ModelDataGenerator modelGen, Map<String, String> config, List<String> dftPkValues) throws IOException {
         String pkColName = config.get("pk");
         if (pkColName == null)
             return dftPkValues;
-
+        
         int cut = pkColName.lastIndexOf('.');
         String pkTableName = pkColName.substring(0, cut);
         pkColName = pkColName.substring(cut + 1);
-
+        
         KylinConfig kylinConfig = modelGen.getModle().getConfig();
-        ColumnDesc pkcol = MetadataManager.getInstance(kylinConfig).getTableDesc(pkTableName)
-                .findColumnByName(pkColName);
+        ColumnDesc pkcol = MetadataManager.getInstance(kylinConfig).getTableDesc(pkTableName).findColumnByName(pkColName);
         return modelGen.getPkValues(pkcol);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java
index 858844f..f086ae9 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java
@@ -250,7 +250,7 @@ public class ColumnGenerator {
         public String next() {
             if (values.isEmpty())
                 return null;
-
+            
             return values.get(rand.nextInt(values.size()));
         }
     }
@@ -312,7 +312,7 @@ public class ColumnGenerator {
             if (input.hasNext()) {
                 r = input.next();
             }
-
+            
             if (rand.nextDouble() < nullPct) {
                 r = nullStr;
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java
index fe8a236..3caf2f4 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java
@@ -65,7 +65,7 @@ public class ModelDataGenerator {
     private ModelDataGenerator(DataModelDesc model, int nRows, ResourceStore outputStore) {
         this(model, nRows, outputStore, "/data");
     }
-
+    
     private ModelDataGenerator(DataModelDesc model, int nRows, ResourceStore outputStore, String outputPath) {
         this.model = model;
         this.targetRows = nRows;
@@ -79,10 +79,9 @@ public class ModelDataGenerator {
 
         JoinTableDesc[] allTables = model.getJoinTables();
         for (int i = allTables.length - 1; i >= -1; i--) {
-            TableDesc table = (i == -1) ? model.getRootFactTable().getTableDesc()
-                    : allTables[i].getTableRef().getTableDesc();
+            TableDesc table = (i == -1) ? model.getRootFactTable().getTableDesc() : allTables[i].getTableRef().getTableDesc();
             allTableDesc.add(table);
-
+            
             if (generated.contains(table))
                 continue;
 
@@ -176,7 +175,7 @@ public class ModelDataGenerator {
         for (TableDesc t : tables) {
             if (t.isView())
                 continue;
-
+            
             out.print("DROP TABLE IF EXISTS " + normHiveIdentifier(t.getIdentity()) + ";\n");
 
             out.print("CREATE TABLE " + normHiveIdentifier(t.getIdentity()) + "(" + "\n");
@@ -217,16 +216,15 @@ public class ModelDataGenerator {
                 out.print("-- " + t.getIdentity() + " is view \n");
                 continue;
             }
-
-            out.print("LOAD DATA LOCAL INPATH '" + t.getIdentity() + ".csv' OVERWRITE INTO TABLE "
-                    + normHiveIdentifier(t.getIdentity()) + ";\n");
+            
+            out.print("LOAD DATA LOCAL INPATH '" + t.getIdentity() + ".csv' OVERWRITE INTO TABLE " + normHiveIdentifier(t.getIdentity()) + ";\n");
         }
     }
 
     public boolean existsInStore(TableDesc table) throws IOException {
         return outputStore.exists(path(table));
     }
-
+    
     public boolean isPK(ColumnDesc col) {
         for (JoinTableDesc joinTable : model.getJoinTables()) {
             JoinDesc join = joinTable.getJoin();
@@ -237,7 +235,7 @@ public class ModelDataGenerator {
         }
         return false;
     }
-
+    
     public List<String> getPkValuesIfIsFk(ColumnDesc fk) throws IOException {
         JoinTableDesc[] joinTables = model.getJoinTables();
         for (int i = 0; i < joinTables.length; i++) {
@@ -268,8 +266,7 @@ public class ModelDataGenerator {
 
         List<String> r = new ArrayList<>();
 
-        BufferedReader in = new BufferedReader(
-                new InputStreamReader(outputStore.getResource(path(pk.getTable())).inputStream, "UTF-8"));
+        BufferedReader in = new BufferedReader(new InputStreamReader(outputStore.getResource(path(pk.getTable())).inputStream, "UTF-8"));
         try {
             String line;
             while ((line = in.readLine()) != null) {
@@ -305,12 +302,11 @@ public class ModelDataGenerator {
         String modelName = args[0];
         int nRows = Integer.parseInt(args[1]);
         String outputDir = args.length > 2 ? args[2] : null;
-
+        
         KylinConfig conf = KylinConfig.getInstanceFromEnv();
         DataModelDesc model = MetadataManager.getInstance(conf).getDataModelDesc(modelName);
-        ResourceStore store = outputDir == null ? ResourceStore.getStore(conf)
-                : ResourceStore.getStore(mockup(outputDir));
-
+        ResourceStore store = outputDir == null ? ResourceStore.getStore(conf) : ResourceStore.getStore(mockup(outputDir));
+        
         ModelDataGenerator gen = new ModelDataGenerator(model, nRows, store);
         gen.generate();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/source/datagen/TableGenConfig.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/datagen/TableGenConfig.java b/core-metadata/src/main/java/org/apache/kylin/source/datagen/TableGenConfig.java
index 7a1a540..a0f19e7 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/datagen/TableGenConfig.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/datagen/TableGenConfig.java
@@ -24,28 +24,28 @@ import java.util.Map;
 import org.apache.kylin.metadata.model.TableDesc;
 
 public class TableGenConfig {
-
+    
     boolean needGen;
     double rows;
-
+    
     public TableGenConfig(TableDesc table, ModelDataGenerator modelGen) throws IOException {
         String dataGen = table.getDataGen();
         if (dataGen == null && modelGen.existsInStore(table) == false) {
             dataGen = "";
         }
-
+        
         if (dataGen == null || "no".equals(dataGen) || "false".equals(dataGen) || "skip".equals(dataGen))
             return;
-
+        
         if (table.isView())
             return;
-
+        
         needGen = true;
-
+        
         Map<String, String> config = Util.parseEqualCommaPairs(dataGen, "rows");
-
+        
         // config.rows is either a multiplier (0,1] or an absolute row number
         rows = Util.parseDouble(config, "rows", modelGen.getModle().isFactTable(table.getIdentity()) ? 1.0 : 20);
     }
-
+    
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/source/datagen/Util.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/datagen/Util.java b/core-metadata/src/main/java/org/apache/kylin/source/datagen/Util.java
index 87f3ab1..ca27bbf 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/datagen/Util.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/datagen/Util.java
@@ -27,7 +27,7 @@ public class Util {
 
     static Map<String, String> parseEqualCommaPairs(String equalCommaPairs, String defaultKey) {
         Map<String, String> r = new LinkedHashMap<>();
-
+        
         if (StringUtils.isBlank(equalCommaPairs))
             return r;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/dimension/FixedLenHexDimEncTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/dimension/FixedLenHexDimEncTest.java b/core-metadata/src/test/java/org/apache/kylin/dimension/FixedLenHexDimEncTest.java
index 942ebee..d9a1a0f 100644
--- a/core-metadata/src/test/java/org/apache/kylin/dimension/FixedLenHexDimEncTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/dimension/FixedLenHexDimEncTest.java
@@ -96,15 +96,16 @@ public class FixedLenHexDimEncTest {
         }
     }
 
+
     @Test
     public void testEncodeDecode2() {
         FixedLenHexDimEnc enc = new FixedLenHexDimEnc(5);
         testEncodeDecode(enc, "AF121");
         testEncodeDecode(enc, "00000");
-
+        
         //with a little extra room all F is supported
         testEncodeDecode(enc, "FFFFF");
-
+        
         try {
             testEncodeDecode(enc, "FFF");
             Assert.fail();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/dimension/IntegerDimEncTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/dimension/IntegerDimEncTest.java b/core-metadata/src/test/java/org/apache/kylin/dimension/IntegerDimEncTest.java
index 14566b9..039e056 100644
--- a/core-metadata/src/test/java/org/apache/kylin/dimension/IntegerDimEncTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/dimension/IntegerDimEncTest.java
@@ -39,13 +39,9 @@ public class IntegerDimEncTest {
         successValue.add(new long[] { -32767, -127, 0, 127, 32767 });
         successValue.add(new long[] { -8388607, -32767, -127, 0, 127, 32767, 8388607 });
         successValue.add(new long[] { -2147483647L, -8388607, -32767, -127, 0, 127, 32767, 8388607, 2147483647L });
-        successValue.add(new long[] { -549755813887L, -2147483647L, -8388607, -32767, -127, 0, 127, 32767, 8388607,
-                2147483647L, 549755813887L });
-        successValue.add(new long[] { -140737488355327L, -549755813887L, -2147483647L, -8388607, -32767, -127, 0, 127,
-                32767, 8388607, 2147483647L, 549755813887L, 140737488355327L });
-        successValue.add(new long[] { -36028797018963967L, -140737488355327L, -549755813887L, -2147483647L, -8388607,
-                -32767, -127, 0, 127, 32767, 8388607, 2147483647L, 549755813887L, 140737488355327L,
-                36028797018963967L });
+        successValue.add(new long[] { -549755813887L, -2147483647L, -8388607, -32767, -127, 0, 127, 32767, 8388607, 2147483647L, 549755813887L });
+        successValue.add(new long[] { -140737488355327L, -549755813887L, -2147483647L, -8388607, -32767, -127, 0, 127, 32767, 8388607, 2147483647L, 549755813887L, 140737488355327L });
+        successValue.add(new long[] { -36028797018963967L, -140737488355327L, -549755813887L, -2147483647L, -8388607, -32767, -127, 0, 127, 32767, 8388607, 2147483647L, 549755813887L, 140737488355327L, 36028797018963967L });
         successValue.add(new long[] { //
                 -9223372036854775807L, //
                 -36028797018963967L, //

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/dimension/OneMoreByteVLongDimEncTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/dimension/OneMoreByteVLongDimEncTest.java b/core-metadata/src/test/java/org/apache/kylin/dimension/OneMoreByteVLongDimEncTest.java
index d62600d..17af5c1 100644
--- a/core-metadata/src/test/java/org/apache/kylin/dimension/OneMoreByteVLongDimEncTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/dimension/OneMoreByteVLongDimEncTest.java
@@ -87,6 +87,7 @@ public class OneMoreByteVLongDimEncTest {
         }
     }
 
+ 
     private void testEncodeDecode(OneMoreByteVLongDimEnc enc, long value) {
         String valueStr = "" + value;
         byte[] buf = new byte[enc.getLengthOfEncoding()];
@@ -95,6 +96,7 @@ public class OneMoreByteVLongDimEncTest {
         Assert.assertEquals(valueStr, decode);
     }
 
+
     @Test
     public void testSerDes() {
         OneMoreByteVLongDimEnc enc = new OneMoreByteVLongDimEnc(2);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
index e51e839..4e67d22 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
@@ -84,8 +84,11 @@ public class AggregatorMemEstimateTest extends LocalFileMetadataTestCase {
         decimalMax.aggregate(decimal);
         decimalSum.aggregate(decimal);
 
-        return Lists.newArrayList(longMin, longMax, longSum, doubleMin, doubleMax, doubleSum, decimalMin, decimalMax,
-                decimalSum);
+        return Lists.newArrayList(
+                longMin, longMax, longSum,
+                doubleMin, doubleMax, doubleSum,
+                decimalMin, decimalMax, decimalSum
+        );
     }
 
     private String getAggregatorName(Class<? extends MeasureAggregator> clazz) {
@@ -108,8 +111,7 @@ public class AggregatorMemEstimateTest extends LocalFileMetadataTestCase {
         }
         bitmapAggregator.aggregate(bitmapCounter);
 
-        ExtendedColumnMeasureType extendedColumnType = new ExtendedColumnMeasureType("EXTENDED_COLUMN",
-                DataType.getType("extendedcolumn(100)"));
+        ExtendedColumnMeasureType extendedColumnType = new ExtendedColumnMeasureType("EXTENDED_COLUMN", DataType.getType("extendedcolumn(100)"));
         MeasureAggregator<ByteArray> extendedColumnAggregator = extendedColumnType.newAggregator();
         extendedColumnAggregator.aggregate(new ByteArray(100));
 
@@ -121,8 +123,7 @@ public class AggregatorMemEstimateTest extends LocalFileMetadataTestCase {
         System.out.printf("%40s %10s %10s\n", "Class", "Estimate", "Actual");
         for (MeasureAggregator aggregator : aggregators) {
             String clzName = getAggregatorName(aggregator.getClass());
-            System.out.printf("%40s %10d %10d\n", clzName, aggregator.getMemBytesEstimate(),
-                    meter.measureDeep(aggregator));
+            System.out.printf("%40s %10d %10d\n", clzName, aggregator.getMemBytesEstimate(), meter.measureDeep(aggregator));
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapAggregatorTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapAggregatorTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapAggregatorTest.java
index 4ce82d0..0b82fc4 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapAggregatorTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapAggregatorTest.java
@@ -18,13 +18,13 @@
 
 package org.apache.kylin.measure.bitmap;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 
 public class BitmapAggregatorTest {
     private static final BitmapCounterFactory factory = RoaringBitmapCounterFactory.INSTANCE;
@@ -85,5 +85,6 @@ public class BitmapAggregatorTest {
         result.orWith(counter3);
         assertEquals(result, aggregator.getState());
 
+
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapCounterTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapCounterTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapCounterTest.java
index 6d0c97b..7194a23 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapCounterTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapCounterTest.java
@@ -18,12 +18,12 @@
 
 package org.apache.kylin.measure.bitmap;
 
+import org.junit.Test;
+
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
-import org.junit.Test;
-
 public class BitmapCounterTest {
     private static final BitmapCounterFactory factory = RoaringBitmapCounterFactory.INSTANCE;
 
@@ -43,17 +43,17 @@ public class BitmapCounterTest {
 
         counter2.orWith(counter);
         assertEquals(4, counter.getCount());
-        assertEquals(6, counter2.getCount()); // in-place change
+        assertEquals(6, counter2.getCount());  // in-place change
 
         int i = 0;
         int[] values = new int[(int) counter2.getCount()];
         for (int value : counter2) {
             values[i++] = value;
         }
-        assertArrayEquals(new int[] { 10, 20, 30, 40, 1000, 2000 }, values);
+        assertArrayEquals(new int[]{10, 20, 30, 40, 1000, 2000}, values);
 
         counter2.clear();
         assertEquals(0, counter2.getCount());
     }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapSerializerTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapSerializerTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapSerializerTest.java
index 69aba9a..acbfe88 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapSerializerTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/bitmap/BitmapSerializerTest.java
@@ -18,12 +18,6 @@
 
 package org.apache.kylin.measure.bitmap;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.nio.BufferOverflowException;
-import java.nio.ByteBuffer;
-
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.metadata.datatype.DataType;
 import org.junit.AfterClass;
@@ -31,6 +25,12 @@ import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import java.nio.BufferOverflowException;
+import java.nio.ByteBuffer;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 public class BitmapSerializerTest extends LocalFileMetadataTestCase {
     @BeforeClass
     public static void setUp() throws Exception {
@@ -74,4 +74,4 @@ public class BitmapSerializerTest extends LocalFileMetadataTestCase {
             assertTrue(e instanceof BufferOverflowException);
         }
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/measure/hllc/HLLCMeasureTypeTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/hllc/HLLCMeasureTypeTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/hllc/HLLCMeasureTypeTest.java
index 40b2567..e884094 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/hllc/HLLCMeasureTypeTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/hllc/HLLCMeasureTypeTest.java
@@ -43,23 +43,22 @@ public class HLLCMeasureTypeTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testIngest() {
-        MeasureType<HLLCounter> mtype = (MeasureType<HLLCounter>) MeasureTypeFactory
-                .create(HLLCMeasureType.FUNC_COUNT_DISTINCT, DataType.getType("hllc(10)"));
+        MeasureType<HLLCounter> mtype = (MeasureType<HLLCounter>) MeasureTypeFactory.create(HLLCMeasureType.FUNC_COUNT_DISTINCT, DataType.getType("hllc(10)"));
         MeasureIngester<HLLCounter> ingester = mtype.newIngester();
         HLLCounter hllc;
-
+        
         hllc = ingester.valueOf(new String[] { null }, null, null);
         assertEquals(0, hllc.getCountEstimate());
-
+        
         hllc = ingester.valueOf(new String[] { null, null }, null, null);
         assertEquals(0, hllc.getCountEstimate());
-
+        
         hllc = ingester.valueOf(new String[] { "" }, null, null);
         assertEquals(1, hllc.getCountEstimate());
-
+        
         hllc = ingester.valueOf(new String[] { "", null }, null, null);
         assertEquals(1, hllc.getCountEstimate());
-
+        
         hllc = ingester.valueOf(new String[] { "abc" }, null, null);
         assertEquals(1, hllc.getCountEstimate());
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/measure/percentile/PercentileCounterTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/percentile/PercentileCounterTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/percentile/PercentileCounterTest.java
index ba7cbdd..94a1233 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/percentile/PercentileCounterTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/percentile/PercentileCounterTest.java
@@ -109,7 +109,7 @@ public class PercentileCounterTest {
         ObjectInputStream in = null;
         try {
             in = new ObjectInputStream(is);
-            serialized_counter = (PercentileCounter) in.readObject();
+            serialized_counter = (PercentileCounter)in.readObject();
 
             Assert.assertNotNull(serialized_counter);
             Assert.assertNotNull(serialized_counter.registers);


[62/67] [abbrv] kylin git commit: minor, enhance ClasspathScanner

Posted by li...@apache.org.
minor, enhance ClasspathScanner


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/21a17688
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/21a17688
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/21a17688

Branch: refs/heads/master
Commit: 21a17688d351c533a6032f8d314ff41efc00f51d
Parents: 0a95de0
Author: Yang Li <li...@apache.org>
Authored: Sat Jun 3 18:28:57 2017 +0800
Committer: liyang-gmt8 <li...@apache.org>
Committed: Sat Jun 3 21:35:01 2017 +0800

----------------------------------------------------------------------
 .../main/java/org/apache/kylin/common/util/ClasspathScanner.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/21a17688/core-common/src/main/java/org/apache/kylin/common/util/ClasspathScanner.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/ClasspathScanner.java b/core-common/src/main/java/org/apache/kylin/common/util/ClasspathScanner.java
index 365caa8..be41985 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/ClasspathScanner.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/ClasspathScanner.java
@@ -55,7 +55,7 @@ public class ClasspathScanner {
             private void check(String base, String relativePath) {
                 boolean hit = false;
                 for (int i = 0; i < args.length && !hit; i++) {
-                    hit = relativePath.endsWith(args[i]) || match(args[i], relativePath);
+                    hit = relativePath.contains(args[i]) || match(args[i], relativePath);
                 }
 
                 if (hit) {


[59/67] [abbrv] kylin git commit: minor, refine sample.sh

Posted by li...@apache.org.
minor, refine sample.sh


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/944d3aa1
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/944d3aa1
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/944d3aa1

Branch: refs/heads/master
Commit: 944d3aa1155d299ffe69e194f316cf29777120c8
Parents: de193bc
Author: Cheng Wang <ch...@kyligence.io>
Authored: Fri Jun 2 14:29:29 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Fri Jun 2 14:31:21 2017 +0800

----------------------------------------------------------------------
 build/bin/sample.sh | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/944d3aa1/build/bin/sample.sh
----------------------------------------------------------------------
diff --git a/build/bin/sample.sh b/build/bin/sample.sh
index bc9ba5a..babab86 100644
--- a/build/bin/sample.sh
+++ b/build/bin/sample.sh
@@ -24,7 +24,6 @@ source ${dir}/find-hadoop-conf-dir.sh
 source ${dir}/load-hive-conf.sh
 
 source ${dir}/check-env.sh "if-not-yet"
-job_jar=`find -L ${KYLIN_HOME}/lib/ -name kylin-job*.jar`
 
 cd ${KYLIN_HOME}/sample_cube/data
 
@@ -98,6 +97,6 @@ cd ${KYLIN_HOME}/sample_cube/metadata/table
 ls -1 DEFAULT.KYLIN_*.json|sed "s/\(DEFAULT\)\(.*\)\.json/mv & $sample_database\2.json/"|sh -v
 
 cd ${KYLIN_HOME}
-hbase org.apache.hadoop.util.RunJar ${job_jar} org.apache.kylin.common.persistence.ResourceTool upload ${KYLIN_HOME}/sample_cube/metadata  || { exit 1; }
+${dir}/kylin.sh org.apache.kylin.common.persistence.ResourceTool upload ${KYLIN_HOME}/sample_cube/metadata  || { exit 1; }
 echo "Sample cube is created successfully in project 'learn_kylin'."
 echo "Restart Kylin server or reload the metadata from web UI to see the change."


[10/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 81561e4..f932509 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -83,8 +83,7 @@ import com.google.common.collect.Maps;
 public class CubeController extends BasicController {
     private static final Logger logger = LoggerFactory.getLogger(CubeController.class);
 
-    private static final char[] VALID_CUBENAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_"
-            .toCharArray();
+    private static final char[] VALID_CUBENAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_".toCharArray();
 
     @Autowired
     @Qualifier("cubeMgmtService")
@@ -100,11 +99,7 @@ public class CubeController extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public List<CubeInstance> getCubes(@RequestParam(value = "cubeName", required = false) String cubeName,
-            @RequestParam(value = "modelName", required = false) String modelName,
-            @RequestParam(value = "projectName", required = false) String projectName,
-            @RequestParam(value = "limit", required = false) Integer limit,
-            @RequestParam(value = "offset", required = false) Integer offset) {
+    public List<CubeInstance> getCubes(@RequestParam(value = "cubeName", required = false) String cubeName, @RequestParam(value = "modelName", required = false) String modelName, @RequestParam(value = "projectName", required = false) String projectName, @RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "offset", required = false) Integer offset) {
         List<CubeInstance> cubes;
         cubes = cubeService.listAllCubes(cubeName, projectName, modelName);
 
@@ -153,8 +148,7 @@ public class CubeController extends BasicController {
      * @throws UnknownHostException
      * @throws IOException
      */
-    @RequestMapping(value = "/{cubeName}/segs/{segmentName}/sql", method = { RequestMethod.GET }, produces = {
-            "application/json" })
+    @RequestMapping(value = "/{cubeName}/segs/{segmentName}/sql", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
     public GeneralResponse getSql(@PathVariable String cubeName, @PathVariable String segmentName) {
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -174,8 +168,7 @@ public class CubeController extends BasicController {
      * @param notifyList
      * @throws IOException
      */
-    @RequestMapping(value = "/{cubeName}/notify_list", method = { RequestMethod.PUT }, produces = {
-            "application/json" })
+    @RequestMapping(value = "/{cubeName}/notify_list", method = { RequestMethod.PUT }, produces = { "application/json" })
     @ResponseBody
     public void updateNotifyList(@PathVariable String cubeName, @RequestBody List<String> notifyList) {
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -215,11 +208,9 @@ public class CubeController extends BasicController {
      *
      * @throws IOException
      */
-    @RequestMapping(value = "/{cubeName}/segs/{segmentName}/refresh_lookup", method = {
-            RequestMethod.PUT }, produces = { "application/json" })
+    @RequestMapping(value = "/{cubeName}/segs/{segmentName}/refresh_lookup", method = { RequestMethod.PUT }, produces = { "application/json" })
     @ResponseBody
-    public CubeInstance rebuildLookupSnapshot(@PathVariable String cubeName, @PathVariable String segmentName,
-            @RequestParam(value = "lookupTable") String lookupTable) {
+    public CubeInstance rebuildLookupSnapshot(@PathVariable String cubeName, @PathVariable String segmentName, @RequestParam(value = "lookupTable") String lookupTable) {
         try {
             final CubeManager cubeMgr = cubeService.getCubeManager();
             final CubeInstance cube = cubeMgr.getCube(cubeName);
@@ -235,8 +226,7 @@ public class CubeController extends BasicController {
      *
      * @throws IOException
      */
-    @RequestMapping(value = "/{cubeName}/segs/{segmentName}", method = { RequestMethod.DELETE }, produces = {
-            "application/json" })
+    @RequestMapping(value = "/{cubeName}/segs/{segmentName}", method = { RequestMethod.DELETE }, produces = { "application/json" })
     @ResponseBody
     public CubeInstance deleteSegment(@PathVariable String cubeName, @PathVariable String segmentName) {
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -271,8 +261,7 @@ public class CubeController extends BasicController {
     @RequestMapping(value = "/{cubeName}/rebuild", method = { RequestMethod.PUT }, produces = { "application/json" })
     @ResponseBody
     public JobInstance rebuild(@PathVariable String cubeName, @RequestBody JobBuildRequest req) {
-        return buildInternal(cubeName, req.getStartTime(), req.getEndTime(), 0, 0, null, null, req.getBuildType(),
-                req.isForce() || req.isForceMergeEmptySegment());
+        return buildInternal(cubeName, req.getStartTime(), req.getEndTime(), 0, 0, null, null, req.getBuildType(), req.isForce() || req.isForceMergeEmptySegment());
     }
 
     /** Build/Rebuild a cube segment by source offset */
@@ -298,14 +287,11 @@ public class CubeController extends BasicController {
     @RequestMapping(value = "/{cubeName}/rebuild2", method = { RequestMethod.PUT }, produces = { "application/json" })
     @ResponseBody
     public JobInstance rebuild2(@PathVariable String cubeName, @RequestBody JobBuildRequest2 req) {
-        return buildInternal(cubeName, 0, 0, req.getSourceOffsetStart(), req.getSourceOffsetEnd(),
-                req.getSourcePartitionOffsetStart(), req.getSourcePartitionOffsetEnd(), req.getBuildType(),
-                req.isForce());
+        return buildInternal(cubeName, 0, 0, req.getSourceOffsetStart(), req.getSourceOffsetEnd(), req.getSourcePartitionOffsetStart(), req.getSourcePartitionOffsetEnd(), req.getBuildType(), req.isForce());
     }
 
     private JobInstance buildInternal(String cubeName, long startTime, long endTime, //
-            long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart,
-            Map<Integer, Long> sourcePartitionOffsetEnd, String buildType, boolean force) {
+            long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd, String buildType, boolean force) {
         try {
             String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
             CubeInstance cube = jobService.getCubeManager().getCube(cubeName);
@@ -317,8 +303,7 @@ public class CubeController extends BasicController {
                 throw new BadRequestException("Cannot build draft cube");
             }
             return jobService.submitJob(cube, startTime, endTime, startOffset, endOffset, //
-                    sourcePartitionOffsetStart, sourcePartitionOffsetEnd, CubeBuildTypeEnum.valueOf(buildType), force,
-                    submitter);
+                    sourcePartitionOffsetStart, sourcePartitionOffsetEnd, CubeBuildTypeEnum.valueOf(buildType), force, submitter);
         } catch (Throwable e) {
             logger.error(e.getLocalizedMessage(), e);
             throw new InternalErrorException(e.getLocalizedMessage(), e);
@@ -466,8 +451,7 @@ public class CubeController extends BasicController {
 
         try {
             desc.setUuid(UUID.randomUUID().toString());
-            String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
-                    : cubeRequest.getProject();
+            String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : cubeRequest.getProject();
             cubeService.createCubeAndDesc(name, projectName, desc);
         } catch (Exception e) {
             logger.error("Failed to deal with the request.", e);
@@ -495,8 +479,7 @@ public class CubeController extends BasicController {
             return cubeRequest;
         }
 
-        String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
-                : cubeRequest.getProject();
+        String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : cubeRequest.getProject();
         try {
             CubeInstance cube = cubeService.getCubeManager().getCube(cubeRequest.getCubeName());
 
@@ -508,15 +491,13 @@ public class CubeController extends BasicController {
 
             //cube renaming is not allowed
             if (!cube.getDescriptor().getName().equalsIgnoreCase(desc.getName())) {
-                String error = "Cube Desc renaming is not allowed: desc.getName(): " + desc.getName()
-                        + ", cubeRequest.getCubeName(): " + cubeRequest.getCubeName();
+                String error = "Cube Desc renaming is not allowed: desc.getName(): " + desc.getName() + ", cubeRequest.getCubeName(): " + cubeRequest.getCubeName();
                 updateRequest(cubeRequest, false, error);
                 return cubeRequest;
             }
 
             if (cube.getSegments().size() != 0 && !cube.getDescriptor().consistentWith(desc)) {
-                String error = "CubeDesc " + desc.getName()
-                        + " is inconsistent with existing. Try purge that cube first or avoid updating key cube desc fields.";
+                String error = "CubeDesc " + desc.getName() + " is inconsistent with existing. Try purge that cube first or avoid updating key cube desc fields.";
                 updateRequest(cubeRequest, false, error);
                 return cubeRequest;
             }
@@ -666,8 +647,7 @@ public class CubeController extends BasicController {
      * @param cubeName
      * @return
      */
-    @RequestMapping(value = "/{cubeName}/init_start_offsets", method = { RequestMethod.PUT }, produces = {
-            "application/json" })
+    @RequestMapping(value = "/{cubeName}/init_start_offsets", method = { RequestMethod.PUT }, produces = { "application/json" })
     @ResponseBody
     public GeneralResponse initStartOffsets(@PathVariable String cubeName) {
         checkCubeName(cubeName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java
index 8df13c1..108ec5a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/DiagnosisController.java
@@ -72,11 +72,9 @@ public class DiagnosisController extends BasicController {
     /**
      * Get diagnosis information for project
      */
-    @RequestMapping(value = "/project/{project}/download", method = { RequestMethod.GET }, produces = {
-            "application/json" })
+    @RequestMapping(value = "/project/{project}/download", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public void dumpProjectDiagnosisInfo(@PathVariable String project, final HttpServletRequest request,
-            final HttpServletResponse response) {
+    public void dumpProjectDiagnosisInfo(@PathVariable String project, final HttpServletRequest request, final HttpServletResponse response) {
         String filePath;
         try {
             filePath = dgService.dumpProjectDiagnosisInfo(project);
@@ -92,8 +90,7 @@ public class DiagnosisController extends BasicController {
      */
     @RequestMapping(value = "/job/{jobId}/download", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public void dumpJobDiagnosisInfo(@PathVariable String jobId, final HttpServletRequest request,
-            final HttpServletResponse response) {
+    public void dumpJobDiagnosisInfo(@PathVariable String jobId, final HttpServletRequest request, final HttpServletResponse response) {
         String filePath;
         try {
             filePath = dgService.dumpJobDiagnosisInfo(jobId);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/ExternalFilterController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/ExternalFilterController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/ExternalFilterController.java
index 92212c4..4ae7656 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/ExternalFilterController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/ExternalFilterController.java
@@ -80,8 +80,7 @@ public class ExternalFilterController extends BasicController {
 
     @RequestMapping(value = "/{filter}/{project}", method = { RequestMethod.DELETE }, produces = { "application/json" })
     @ResponseBody
-    public Map<String, String> removeFilter(@PathVariable String filter, @PathVariable String project)
-            throws IOException {
+    public Map<String, String> removeFilter(@PathVariable String filter, @PathVariable String project) throws IOException {
         Map<String, String> result = new HashMap<String, String>();
         extFilterService.removeExtFilterFromProject(filter, project);
         extFilterService.removeExternalFilter(filter);
@@ -91,8 +90,7 @@ public class ExternalFilterController extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public List<ExternalFilterDesc> getExternalFilters(@RequestParam(value = "project", required = true) String project)
-            throws IOException {
+    public List<ExternalFilterDesc> getExternalFilters(@RequestParam(value = "project", required = true) String project) throws IOException {
         List<ExternalFilterDesc> filterDescs = Lists.newArrayList();
         filterDescs.addAll(extFilterService.getProjectManager().listExternalFilterDescs(project).values());
         return filterDescs;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/HybridController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/HybridController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/HybridController.java
index d70b78f..f23f26c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/HybridController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/HybridController.java
@@ -46,8 +46,7 @@ public class HybridController extends BasicController {
         checkRequiredArg("project", request.getProject());
         checkRequiredArg("model", request.getModel());
         checkRequiredArg("cubes", request.getCubes());
-        HybridInstance instance = hybridService.createHybridCube(request.getHybrid(), request.getProject(),
-                request.getModel(), request.getCubes());
+        HybridInstance instance = hybridService.createHybridCube(request.getHybrid(), request.getProject(), request.getModel(), request.getCubes());
         return instance;
     }
 
@@ -58,8 +57,7 @@ public class HybridController extends BasicController {
         checkRequiredArg("project", request.getProject());
         checkRequiredArg("model", request.getModel());
         checkRequiredArg("cubes", request.getCubes());
-        HybridInstance instance = hybridService.updateHybridCube(request.getHybrid(), request.getProject(),
-                request.getModel(), request.getCubes());
+        HybridInstance instance = hybridService.updateHybridCube(request.getHybrid(), request.getProject(), request.getModel(), request.getCubes());
         return instance;
     }
 
@@ -74,8 +72,7 @@ public class HybridController extends BasicController {
 
     @RequestMapping(value = "", method = RequestMethod.GET, produces = { "application/json" })
     @ResponseBody
-    public Collection<HybridInstance> list(@RequestParam(required = false) String project,
-            @RequestParam(required = false) String model) {
+    public Collection<HybridInstance> list(@RequestParam(required = false) String project, @RequestParam(required = false) String model) {
         return hybridService.listHybrids(project, model);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/JobController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/JobController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/JobController.java
index 749c872..7c9c40d 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/JobController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/JobController.java
@@ -72,8 +72,7 @@ public class JobController extends BasicController {
         JobTimeFilterEnum timeFilter = JobTimeFilterEnum.getByCode(jobRequest.getTimeFilter());
 
         try {
-            jobInstanceList = jobService.searchJobs(jobRequest.getCubeName(), jobRequest.getProjectName(), statusList,
-                    jobRequest.getLimit(), jobRequest.getOffset(), timeFilter);
+            jobInstanceList = jobService.searchJobs(jobRequest.getCubeName(), jobRequest.getProjectName(), statusList, jobRequest.getLimit(), jobRequest.getOffset(), timeFilter);
         } catch (Exception e) {
             logger.error(e.getLocalizedMessage(), e);
             throw new InternalErrorException(e);
@@ -107,8 +106,7 @@ public class JobController extends BasicController {
      * @return
      * @throws IOException
      */
-    @RequestMapping(value = "/{jobId}/steps/{stepId}/output", method = { RequestMethod.GET }, produces = {
-            "application/json" })
+    @RequestMapping(value = "/{jobId}/steps/{stepId}/output", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
     public Map<String, String> getStepOutput(@PathVariable String jobId, @PathVariable String stepId) {
         Map<String, String> result = new HashMap<String, String>();
@@ -182,8 +180,7 @@ public class JobController extends BasicController {
      * @return
      * @throws IOException
      */
-    @RequestMapping(value = "/{jobId}/steps/{stepId}/rollback", method = { RequestMethod.PUT }, produces = {
-            "application/json" })
+    @RequestMapping(value = "/{jobId}/steps/{stepId}/rollback", method = { RequestMethod.PUT }, produces = { "application/json" })
     @ResponseBody
     public JobInstance rollback(@PathVariable String jobId, @PathVariable String stepId) {
         try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
index ea293a0..4226e87 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
@@ -63,8 +63,7 @@ import com.fasterxml.jackson.databind.JsonMappingException;
 public class ModelController extends BasicController {
     private static final Logger logger = LoggerFactory.getLogger(ModelController.class);
 
-    private static final char[] VALID_MODELNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_"
-            .toCharArray();
+    private static final char[] VALID_MODELNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_".toCharArray();
 
     @Autowired
     @Qualifier("modelMgmtService")
@@ -76,10 +75,7 @@ public class ModelController extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public List<DataModelDesc> getModels(@RequestParam(value = "modelName", required = false) String modelName,
-            @RequestParam(value = "projectName", required = false) String projectName,
-            @RequestParam(value = "limit", required = false) Integer limit,
-            @RequestParam(value = "offset", required = false) Integer offset) {
+    public List<DataModelDesc> getModels(@RequestParam(value = "modelName", required = false) String modelName, @RequestParam(value = "projectName", required = false) String projectName, @RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "offset", required = false) Integer offset) {
         try {
             return modelService.getModels(modelName, projectName, limit, offset);
         } catch (IOException e) {
@@ -113,8 +109,7 @@ public class ModelController extends BasicController {
 
         try {
             modelDesc.setUuid(UUID.randomUUID().toString());
-            String projectName = (null == modelRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
-                    : modelRequest.getProject();
+            String projectName = (null == modelRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : modelRequest.getProject();
 
             modelService.createModelDesc(projectName, modelDesc);
         } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java
index 2e4f633..89c8b23 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java
@@ -54,8 +54,7 @@ import org.springframework.web.bind.annotation.ResponseBody;
 public class ProjectController extends BasicController {
     private static final Logger logger = LoggerFactory.getLogger(ProjectController.class);
 
-    private static final char[] VALID_PROJECTNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_"
-            .toCharArray();
+    private static final char[] VALID_PROJECTNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_".toCharArray();
 
     @Autowired
     @Qualifier("projectService")
@@ -79,15 +78,13 @@ public class ProjectController extends BasicController {
      */
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public List<ProjectInstance> getProjects(@RequestParam(value = "limit", required = false) Integer limit,
-            @RequestParam(value = "offset", required = false) Integer offset) {
+    public List<ProjectInstance> getProjects(@RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "offset", required = false) Integer offset) {
         return projectService.listProjects(limit, offset);
     }
 
     @RequestMapping(value = "/readable", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public List<ProjectInstance> getReadableProjects(@RequestParam(value = "limit", required = false) Integer limit,
-            @RequestParam(value = "offset", required = false) Integer offset) {
+    public List<ProjectInstance> getReadableProjects(@RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "offset", required = false) Integer offset) {
 
         List<ProjectInstance> readableProjects = new ArrayList<ProjectInstance>();
 
@@ -124,7 +121,7 @@ public class ProjectController extends BasicController {
                     }
                 }
             }
-
+            
             if (hasProjectPermission) {
                 readableProjects.add(projectInstance);
             }
@@ -143,8 +140,7 @@ public class ProjectController extends BasicController {
         }
 
         if (!StringUtils.containsOnly(projectDesc.getName(), VALID_PROJECTNAME)) {
-            logger.info("Invalid Project name {}, only letters, numbers and underline supported.",
-                    projectDesc.getName());
+            logger.info("Invalid Project name {}, only letters, numbers and underline supported.", projectDesc.getName());
             throw new BadRequestException("Invalid Project name, only letters, numbers and underline supported.");
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
index 5338acf..f6bfe3e 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
@@ -26,10 +26,10 @@ import java.util.List;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.commons.io.IOUtils;
-import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
-import org.apache.kylin.metadata.querymeta.TableMeta;
 import org.apache.kylin.rest.exception.InternalErrorException;
 import org.apache.kylin.rest.model.Query;
+import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
+import org.apache.kylin.metadata.querymeta.TableMeta;
 import org.apache.kylin.rest.request.MetaRequest;
 import org.apache.kylin.rest.request.PrepareSqlRequest;
 import org.apache.kylin.rest.request.SQLRequest;
@@ -82,8 +82,7 @@ public class QueryController extends BasicController {
     @ResponseBody
     public void saveQuery(@RequestBody SaveSqlRequest sqlRequest) throws IOException {
         String creator = SecurityContextHolder.getContext().getAuthentication().getName();
-        Query newQuery = new Query(sqlRequest.getName(), sqlRequest.getProject(), sqlRequest.getSql(),
-                sqlRequest.getDescription());
+        Query newQuery = new Query(sqlRequest.getName(), sqlRequest.getProject(), sqlRequest.getSql(), sqlRequest.getDescription());
 
         queryService.saveQuery(creator, newQuery);
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/StreamingController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
index 058182c..b0bb02a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
@@ -77,9 +77,7 @@ public class StreamingController extends BasicController {
 
     @RequestMapping(value = "/getConfig", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public List<StreamingConfig> getStreamings(@RequestParam(value = "table", required = false) String table,
-            @RequestParam(value = "limit", required = false) Integer limit,
-            @RequestParam(value = "offset", required = false) Integer offset) {
+    public List<StreamingConfig> getStreamings(@RequestParam(value = "table", required = false) String table, @RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "offset", required = false) Integer offset) {
         try {
             return streamingService.getStreamingConfigs(table, limit, offset);
         } catch (IOException e) {
@@ -90,10 +88,7 @@ public class StreamingController extends BasicController {
 
     @RequestMapping(value = "/getKfkConfig", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public List<KafkaConfig> getKafkaConfigs(
-            @RequestParam(value = "kafkaConfigName", required = false) String kafkaConfigName,
-            @RequestParam(value = "limit", required = false) Integer limit,
-            @RequestParam(value = "offset", required = false) Integer offset) {
+    public List<KafkaConfig> getKafkaConfigs(@RequestParam(value = "kafkaConfigName", required = false) String kafkaConfigName, @RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "offset", required = false) Integer offset) {
         try {
             return kafkaConfigService.getKafkaConfigs(kafkaConfigName, limit, offset);
         } catch (IOException e) {
@@ -150,8 +145,7 @@ public class StreamingController extends BasicController {
                 try {
                     streamingService.dropStreamingConfig(streamingConfig);
                 } catch (IOException e1) {
-                    throw new InternalErrorException(
-                            "StreamingConfig is created, but failed to create KafkaConfig: " + e.getLocalizedMessage());
+                    throw new InternalErrorException("StreamingConfig is created, but failed to create KafkaConfig: " + e.getLocalizedMessage());
                 }
                 logger.error("Failed to save KafkaConfig:" + e.getLocalizedMessage(), e);
                 throw new InternalErrorException("Failed to save KafkaConfig: " + e.getLocalizedMessage());
@@ -160,14 +154,11 @@ public class StreamingController extends BasicController {
             if (saveKafkaSuccess == false || saveStreamingSuccess == false) {
 
                 if (saveStreamingSuccess == true) {
-                    StreamingConfig sConfig = streamingService.getStreamingManager()
-                            .getStreamingConfig(streamingConfig.getName());
+                    StreamingConfig sConfig = streamingService.getStreamingManager().getStreamingConfig(streamingConfig.getName());
                     try {
                         streamingService.dropStreamingConfig(sConfig);
                     } catch (IOException e) {
-                        throw new InternalErrorException(
-                                "Action failed and failed to rollback the created streaming config: "
-                                        + e.getLocalizedMessage());
+                        throw new InternalErrorException("Action failed and failed to rollback the created streaming config: " + e.getLocalizedMessage());
                     }
                 }
                 if (saveKafkaSuccess == true) {
@@ -175,9 +166,7 @@ public class StreamingController extends BasicController {
                         KafkaConfig kConfig = kafkaConfigService.getKafkaConfig(kafkaConfig.getName());
                         kafkaConfigService.dropKafkaConfig(kConfig);
                     } catch (IOException e) {
-                        throw new InternalErrorException(
-                                "Action failed and failed to rollback the created kafka config: "
-                                        + e.getLocalizedMessage());
+                        throw new InternalErrorException("Action failed and failed to rollback the created kafka config: " + e.getLocalizedMessage());
                     }
                 }
             }
@@ -189,8 +178,7 @@ public class StreamingController extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.PUT }, produces = { "application/json" })
     @ResponseBody
-    public StreamingRequest updateStreamingConfig(@RequestBody StreamingRequest streamingRequest)
-            throws JsonProcessingException {
+    public StreamingRequest updateStreamingConfig(@RequestBody StreamingRequest streamingRequest) throws JsonProcessingException {
         StreamingConfig streamingConfig = deserializeSchemalDesc(streamingRequest);
         KafkaConfig kafkaConfig = deserializeKafkaSchemalDesc(streamingRequest);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
index 06392a8..ebbfeb2 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
@@ -68,8 +68,7 @@ public class TableController extends BasicController {
      */
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public List<TableDesc> getTableDesc(@RequestParam(value = "ext", required = false) boolean withExt,
-            @RequestParam(value = "project", required = true) String project) throws IOException {
+    public List<TableDesc> getTableDesc(@RequestParam(value = "ext", required = false) boolean withExt, @RequestParam(value = "project", required = true) String project) throws IOException {
         try {
             return tableService.getTableDescByProject(project, withExt);
         } catch (IOException e) {
@@ -95,8 +94,7 @@ public class TableController extends BasicController {
 
     @RequestMapping(value = "/{tables}/{project}", method = { RequestMethod.POST }, produces = { "application/json" })
     @ResponseBody
-    public Map<String, String[]> loadHiveTables(@PathVariable String tables, @PathVariable String project,
-            @RequestBody HiveTableRequest request) throws IOException {
+    public Map<String, String[]> loadHiveTables(@PathVariable String tables, @PathVariable String project, @RequestBody HiveTableRequest request) throws IOException {
         String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
         Map<String, String[]> result = new HashMap<String, String[]>();
         String[] tableNames = StringUtil.splitAndTrim(tables, ",");
@@ -152,11 +150,9 @@ public class TableController extends BasicController {
      * @return Table metadata array
      * @throws IOException
      */
-    @RequestMapping(value = "/{tableNames}/cardinality", method = { RequestMethod.PUT }, produces = {
-            "application/json" })
+    @RequestMapping(value = "/{tableNames}/cardinality", method = { RequestMethod.PUT }, produces = { "application/json" })
     @ResponseBody
-    public CardinalityRequest generateCardinality(@PathVariable String tableNames,
-            @RequestBody CardinalityRequest request) throws Exception {
+    public CardinalityRequest generateCardinality(@PathVariable String tableNames, @RequestBody CardinalityRequest request) throws Exception {
         String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
         String[] tables = tableNames.split(",");
         try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/UserController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/UserController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/UserController.java
index 7be73eb..c07c0cb 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/UserController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/UserController.java
@@ -76,8 +76,7 @@ public class UserController extends BasicController {
         return null;
     }
 
-    @RequestMapping(value = "/authentication/authorities", method = RequestMethod.GET, produces = {
-            "application/json" })
+    @RequestMapping(value = "/authentication/authorities", method = RequestMethod.GET, produces = { "application/json" })
     public List<String> getAuthorities() throws IOException {
         return userService.listUserAuthorities();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/AccessControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/AccessControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/AccessControllerV2.java
index 55b9e15..3258de9 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/AccessControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/AccessControllerV2.java
@@ -61,11 +61,9 @@ public class AccessControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getAccessEntitiesV2(@RequestHeader("Accept-Language") String lang,
-            @PathVariable String type, @PathVariable String uuid) {
+    public EnvelopeResponse getAccessEntitiesV2(@RequestHeader("Accept-Language") String lang, @PathVariable String type, @PathVariable String uuid) {
         MsgPicker.setMsg(lang);
 
         AclEntity ae = accessService.getAclEntity(type, uuid);
@@ -79,11 +77,9 @@ public class AccessControllerV2 extends BasicController {
      * @param accessRequest
      */
 
-    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.POST }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse grantV2(@RequestHeader("Accept-Language") String lang, @PathVariable String type,
-            @PathVariable String uuid, @RequestBody AccessRequest accessRequest) {
+    public EnvelopeResponse grantV2(@RequestHeader("Accept-Language") String lang, @PathVariable String type, @PathVariable String uuid, @RequestBody AccessRequest accessRequest) {
         MsgPicker.setMsg(lang);
 
         AclEntity ae = accessService.getAclEntity(type, uuid);
@@ -100,11 +96,9 @@ public class AccessControllerV2 extends BasicController {
      * @param accessRequest
      */
 
-    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateV2(@RequestHeader("Accept-Language") String lang, @PathVariable String type,
-            @PathVariable String uuid, @RequestBody AccessRequest accessRequest) {
+    public EnvelopeResponse updateV2(@RequestHeader("Accept-Language") String lang, @PathVariable String type, @PathVariable String uuid, @RequestBody AccessRequest accessRequest) {
         MsgPicker.setMsg(lang);
 
         AclEntity ae = accessService.getAclEntity(type, uuid);
@@ -120,11 +114,9 @@ public class AccessControllerV2 extends BasicController {
      * @param accessRequest
      */
 
-    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.DELETE }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse revokeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String type,
-            @PathVariable String uuid, AccessRequest accessRequest) {
+    public EnvelopeResponse revokeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String type, @PathVariable String uuid, AccessRequest accessRequest) {
         MsgPicker.setMsg(lang);
 
         AclEntity ae = accessService.getAclEntity(type, uuid);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/AdminControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/AdminControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/AdminControllerV2.java
index c0121d5..01176d0 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/AdminControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/AdminControllerV2.java
@@ -57,8 +57,7 @@ public class AdminControllerV2 extends BasicController {
     @Qualifier("cubeMgmtService")
     private CubeService cubeMgmtService;
 
-    @RequestMapping(value = "/env", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/env", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getEnvV2(@RequestHeader("Accept-Language") String lang) throws ConfigurationException {
         MsgPicker.setMsg(lang);
@@ -66,8 +65,7 @@ public class AdminControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, adminService.getEnv(), "");
     }
 
-    @RequestMapping(value = "/config", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/config", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getConfigV2(@RequestHeader("Accept-Language") String lang) throws IOException {
         MsgPicker.setMsg(lang);
@@ -75,15 +73,13 @@ public class AdminControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, adminService.getConfigAsString(), "");
     }
 
-    @RequestMapping(value = "/metrics/cubes", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/metrics/cubes", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse cubeMetricsV2(@RequestHeader("Accept-Language") String lang, MetricsRequest request) {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeMgmtService.calculateMetrics(request), "");
     }
 
-    @RequestMapping(value = "/storage", method = { RequestMethod.DELETE }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/storage", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public void cleanupStorageV2(@RequestHeader("Accept-Language") String lang) {
         MsgPicker.setMsg(lang);
@@ -91,11 +87,9 @@ public class AdminControllerV2 extends BasicController {
         adminService.cleanupStorage();
     }
 
-    @RequestMapping(value = "/config", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/config", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void updateKylinConfigV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody UpdateConfigRequest updateConfigRequest) {
+    public void updateKylinConfigV2(@RequestHeader("Accept-Language") String lang, @RequestBody UpdateConfigRequest updateConfigRequest) {
         MsgPicker.setMsg(lang);
 
         KylinConfig.getInstanceFromEnv().setProperty(updateConfigRequest.getKey(), updateConfigRequest.getValue());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/CacheControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/CacheControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/CacheControllerV2.java
index 64e8f7c..3cd5abd 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/CacheControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/CacheControllerV2.java
@@ -56,11 +56,9 @@ public class CacheControllerV2 extends BasicController {
      * Announce wipe cache to all cluster nodes
      */
 
-    @RequestMapping(value = "/announce/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/announce/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void announceWipeCacheV2(@RequestHeader("Accept-Language") String lang, @PathVariable String entity,
-            @PathVariable String event, @PathVariable String cacheKey) throws IOException {
+    public void announceWipeCacheV2(@RequestHeader("Accept-Language") String lang, @PathVariable String entity, @PathVariable String event, @PathVariable String cacheKey) throws IOException {
         MsgPicker.setMsg(lang);
 
         cacheService.annouceWipeCache(entity, event, cacheKey);
@@ -70,18 +68,15 @@ public class CacheControllerV2 extends BasicController {
      * Wipe cache on this node
      */
 
-    @RequestMapping(value = "/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void wipeCacheV2(@RequestHeader("Accept-Language") String lang, @PathVariable String entity,
-            @PathVariable String event, @PathVariable String cacheKey) throws IOException {
+    public void wipeCacheV2(@RequestHeader("Accept-Language") String lang, @PathVariable String entity, @PathVariable String event, @PathVariable String cacheKey) throws IOException {
         MsgPicker.setMsg(lang);
 
         cacheService.notifyMetadataChange(entity, Broadcaster.Event.getEvent(event), cacheKey);
     }
 
-    @RequestMapping(value = "/announce/config", method = { RequestMethod.POST }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/announce/config", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public void hotLoadKylinConfigV2(@RequestHeader("Accept-Language") String lang) throws IOException {
         MsgPicker.setMsg(lang);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
index 8a6a37d..386aad2 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
@@ -87,8 +87,7 @@ import com.google.common.collect.Lists;
 public class CubeControllerV2 extends BasicController {
     private static final Logger logger = LoggerFactory.getLogger(CubeControllerV2.class);
 
-    public static final char[] VALID_CUBENAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_"
-            .toCharArray();
+    public static final char[] VALID_CUBENAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_".toCharArray();
 
     @Autowired
     @Qualifier("cubeMgmtService")
@@ -108,12 +107,7 @@ public class CubeControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getCubesPaging(@RequestHeader("Accept-Language") String lang,
-            @RequestParam(value = "cubeName", required = false) String cubeName,
-            @RequestParam(value = "modelName", required = false) String modelName,
-            @RequestParam(value = "projectName", required = false) String projectName,
-            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
-            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
+    public EnvelopeResponse getCubesPaging(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "cubeName", required = false) String cubeName, @RequestParam(value = "modelName", required = false) String modelName, @RequestParam(value = "projectName", required = false) String projectName, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
         MsgPicker.setMsg(lang);
 
         HashMap<String, Object> data = new HashMap<String, Object>();
@@ -142,8 +136,7 @@ public class CubeControllerV2 extends BasicController {
             DataModelDesc getModel = modelService.getMetadataManager().getDataModelDesc(getModelName);
             cubeInstanceResponse.setPartitionDateColumn(getModel.getPartitionDesc().getPartitionDateColumn());
 
-            cubeInstanceResponse.setIs_streaming(
-                    getModel.getRootFactTable().getTableDesc().getSourceType() == ISourceAware.ID_STREAMING);
+            cubeInstanceResponse.setIs_streaming(getModel.getRootFactTable().getTableDesc().getSourceType() == ISourceAware.ID_STREAMING);
 
             if (projectName != null)
                 cubeInstanceResponse.setProject(projectName);
@@ -165,8 +158,7 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }
 
-    @RequestMapping(value = "validEncodings", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "validEncodings", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getValidEncodingsV2(@RequestHeader("Accept-Language") String lang) {
         MsgPicker.setMsg(lang);
@@ -175,8 +167,7 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, encodings, "");
     }
 
-    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
         MsgPicker.setMsg(lang);
@@ -196,8 +187,7 @@ public class CubeControllerV2 extends BasicController {
         DataModelDesc model = modelService.getMetadataManager().getDataModelDesc(modelName);
         cubeInstanceResponse.setPartitionDateColumn(model.getPartitionDesc().getPartitionDateColumn());
 
-        cubeInstanceResponse
-                .setIs_streaming(model.getRootFactTable().getTableDesc().getSourceType() == ISourceAware.ID_STREAMING);
+        cubeInstanceResponse.setIs_streaming(model.getRootFactTable().getTableDesc().getSourceType() == ISourceAware.ID_STREAMING);
 
         List<ProjectInstance> projectInstances = projectService.listProjects(null, null);
         for (ProjectInstance projectInstance : projectInstances) {
@@ -217,8 +207,7 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/sql", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/sql", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getSqlV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
         MsgPicker.setMsg(lang);
@@ -244,11 +233,9 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/notify_list", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/notify_list", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void updateNotifyListV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName,
-            @RequestBody List<String> notifyList) throws IOException {
+    public void updateNotifyListV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody List<String> notifyList) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -262,11 +249,9 @@ public class CubeControllerV2 extends BasicController {
 
     }
 
-    @RequestMapping(value = "/{cubeName}/cost", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/cost", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateCubeCostV2(@RequestHeader("Accept-Language") String lang,
-            @PathVariable String cubeName, @RequestBody Integer cost) throws IOException {
+    public EnvelopeResponse updateCubeCostV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody Integer cost) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -283,12 +268,9 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/segs/{segmentName}/refresh_lookup", method = {
-            RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/segs/{segmentName}/refresh_lookup", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse rebuildLookupSnapshotV2(@RequestHeader("Accept-Language") String lang,
-            @PathVariable String cubeName, @PathVariable String segmentName, @RequestBody String lookupTable)
-            throws IOException {
+    public EnvelopeResponse rebuildLookupSnapshotV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @PathVariable String segmentName, @RequestBody String lookupTable) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -297,8 +279,7 @@ public class CubeControllerV2 extends BasicController {
         if (cube == null) {
             throw new BadRequestException(String.format(msg.getCUBE_NOT_FOUND(), cubeName));
         }
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
-                cubeService.rebuildLookupSnapshot(cube, segmentName, lookupTable), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.rebuildLookupSnapshot(cube, segmentName, lookupTable), "");
     }
 
     /**
@@ -307,11 +288,9 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/segs/{segmentName}", method = { RequestMethod.DELETE }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/segs/{segmentName}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse deleteSegmentV2(@RequestHeader("Accept-Language") String lang,
-            @PathVariable String cubeName, @PathVariable String segmentName) throws IOException {
+    public EnvelopeResponse deleteSegmentV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @PathVariable String segmentName) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -331,36 +310,27 @@ public class CubeControllerV2 extends BasicController {
     /** Build/Rebuild a cube segment */
 
     /** Build/Rebuild a cube segment */
-    @RequestMapping(value = "/{cubeName}/build", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/build", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse buildV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName,
-            @RequestBody JobBuildRequest req) throws IOException {
+    public EnvelopeResponse buildV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody JobBuildRequest req) throws IOException {
         return rebuildV2(lang, cubeName, req);
     }
 
     /** Build/Rebuild a cube segment */
 
-    @RequestMapping(value = "/{cubeName}/rebuild", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/rebuild", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse rebuildV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName,
-            @RequestBody JobBuildRequest req) throws IOException {
+    public EnvelopeResponse rebuildV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody JobBuildRequest req) throws IOException {
         MsgPicker.setMsg(lang);
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
-                buildInternalV2(cubeName, req.getStartTime(), req.getEndTime(), 0, 0, null, null, req.getBuildType(),
-                        req.isForce() || req.isForceMergeEmptySegment()),
-                "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, buildInternalV2(cubeName, req.getStartTime(), req.getEndTime(), 0, 0, null, null, req.getBuildType(), req.isForce() || req.isForceMergeEmptySegment()), "");
     }
 
     /** Build/Rebuild a cube segment by source offset */
 
-    @RequestMapping(value = "/{cubeName}/build_streaming", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/build_streaming", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse build2V2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName,
-            @RequestBody JobBuildRequest2 req) throws IOException {
+    public EnvelopeResponse build2V2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody JobBuildRequest2 req) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -380,23 +350,16 @@ public class CubeControllerV2 extends BasicController {
     }
 
     /** Build/Rebuild a cube segment by source offset */
-    @RequestMapping(value = "/{cubeName}/rebuild_streaming", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/rebuild_streaming", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse rebuild2V2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName,
-            @RequestBody JobBuildRequest2 req) throws IOException {
+    public EnvelopeResponse rebuild2V2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody JobBuildRequest2 req) throws IOException {
         MsgPicker.setMsg(lang);
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
-                buildInternalV2(cubeName, 0, 0, req.getSourceOffsetStart(), req.getSourceOffsetEnd(),
-                        req.getSourcePartitionOffsetStart(), req.getSourcePartitionOffsetEnd(), req.getBuildType(),
-                        req.isForce()),
-                "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, buildInternalV2(cubeName, 0, 0, req.getSourceOffsetStart(), req.getSourceOffsetEnd(), req.getSourcePartitionOffsetStart(), req.getSourcePartitionOffsetEnd(), req.getBuildType(), req.isForce()), "");
     }
 
     private JobInstance buildInternalV2(String cubeName, long startTime, long endTime, //
-            long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart,
-            Map<Integer, Long> sourcePartitionOffsetEnd, String buildType, boolean force) throws IOException {
+            long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd, String buildType, boolean force) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
@@ -409,15 +372,12 @@ public class CubeControllerV2 extends BasicController {
             throw new BadRequestException(msg.getBUILD_DRAFT_CUBE());
         }
         return jobService.submitJob(cube, startTime, endTime, startOffset, endOffset, //
-                sourcePartitionOffsetStart, sourcePartitionOffsetEnd, CubeBuildTypeEnum.valueOf(buildType), force,
-                submitter);
+                sourcePartitionOffsetStart, sourcePartitionOffsetEnd, CubeBuildTypeEnum.valueOf(buildType), force, submitter);
     }
 
-    @RequestMapping(value = "/{cubeName}/disable", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/disable", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse disableCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName)
-            throws IOException {
+    public EnvelopeResponse disableCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -431,11 +391,9 @@ public class CubeControllerV2 extends BasicController {
 
     }
 
-    @RequestMapping(value = "/{cubeName}/purge", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/purge", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse purgeCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName)
-            throws IOException {
+    public EnvelopeResponse purgeCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -447,11 +405,9 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.purgeCube(cube), "");
     }
 
-    @RequestMapping(value = "/{cubeName}/clone", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/clone", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse cloneCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName,
-            @RequestBody CubeRequest cubeRequest) throws IOException {
+    public EnvelopeResponse cloneCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody CubeRequest cubeRequest) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -484,11 +440,9 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, newCube, "");
     }
 
-    @RequestMapping(value = "/{cubeName}/enable", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/enable", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse enableCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName)
-            throws IOException {
+    public EnvelopeResponse enableCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -500,11 +454,9 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.enableCube(cube), "");
     }
 
-    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.DELETE }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void deleteCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName)
-            throws IOException {
+    public void deleteCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -525,11 +477,9 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/hbase", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/hbase", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getHBaseInfoV2(@RequestHeader("Accept-Language") String lang,
-            @PathVariable String cubeName) {
+    public EnvelopeResponse getHBaseInfoV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -581,15 +531,13 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getHolesV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
         MsgPicker.setMsg(lang);
 
         checkCubeNameV2(cubeName);
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.getCubeManager().calculateHoles(cubeName),
-                "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.getCubeManager().calculateHoles(cubeName), "");
     }
 
     /**
@@ -599,8 +547,7 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse fillHolesV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
         MsgPicker.setMsg(lang);
@@ -659,11 +606,9 @@ public class CubeControllerV2 extends BasicController {
      * @return
      */
 
-    @RequestMapping(value = "/{cubeName}/init_start_offsets", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/init_start_offsets", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse initStartOffsetsV2(@RequestHeader("Accept-Language") String lang,
-            @PathVariable String cubeName) throws IOException {
+    public EnvelopeResponse initStartOffsetsV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -684,11 +629,9 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, response, "");
     }
 
-    @RequestMapping(value = "/checkNameAvailability/{cubeName}", method = RequestMethod.GET, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/checkNameAvailability/{cubeName}", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse checkNameAvailabilityV2(@RequestHeader("Accept-Language") String lang,
-            @PathVariable String cubeName) {
+    public EnvelopeResponse checkNameAvailabilityV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.checkNameAvailability(cubeName), "");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeDescControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeDescControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeDescControllerV2.java
index 1f2e8eb..da429f5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeDescControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeDescControllerV2.java
@@ -60,8 +60,7 @@ public class CubeDescControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
         MsgPicker.setMsg(lang);
@@ -89,8 +88,7 @@ public class CubeDescControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/desc", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/desc", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getDescV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
         MsgPicker.setMsg(lang);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/DiagnosisControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/DiagnosisControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/DiagnosisControllerV2.java
index 9285032..636e81c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/DiagnosisControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/DiagnosisControllerV2.java
@@ -67,14 +67,9 @@ public class DiagnosisControllerV2 extends BasicController {
      * Get bad query history
      */
 
-    @RequestMapping(value = "/sql", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/sql", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getBadQuerySqlV2(@RequestHeader("Accept-Language") String lang,
-            @RequestParam(value = "project", required = false) String project,
-            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
-            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize)
-            throws IOException {
+    public EnvelopeResponse getBadQuerySqlV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "project", required = false) String project, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) throws IOException {
         MsgPicker.setMsg(lang);
 
         HashMap<String, Object> data = new HashMap<String, Object>();
@@ -111,11 +106,9 @@ public class DiagnosisControllerV2 extends BasicController {
      * Get diagnosis information for project
      */
 
-    @RequestMapping(value = "/project/{project}/download", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/project/{project}/download", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void dumpProjectDiagnosisInfoV2(@RequestHeader("Accept-Language") String lang, @PathVariable String project,
-            final HttpServletRequest request, final HttpServletResponse response) throws IOException {
+    public void dumpProjectDiagnosisInfoV2(@RequestHeader("Accept-Language") String lang, @PathVariable String project, final HttpServletRequest request, final HttpServletResponse response) throws IOException {
         MsgPicker.setMsg(lang);
 
         String filePath;
@@ -128,11 +121,9 @@ public class DiagnosisControllerV2 extends BasicController {
      * Get diagnosis information for job
      */
 
-    @RequestMapping(value = "/job/{jobId}/download", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/job/{jobId}/download", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void dumpJobDiagnosisInfoV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId,
-            final HttpServletRequest request, final HttpServletResponse response) throws IOException {
+    public void dumpJobDiagnosisInfoV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId, final HttpServletRequest request, final HttpServletResponse response) throws IOException {
         MsgPicker.setMsg(lang);
 
         String filePath;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/EncodingControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/EncodingControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/EncodingControllerV2.java
index 37792d7..edb58b4 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/EncodingControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/EncodingControllerV2.java
@@ -58,8 +58,7 @@ public class EncodingControllerV2 extends BasicController {
      * @return suggestion map
      */
 
-    @RequestMapping(value = "valid_encodings", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "valid_encodings", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getValidEncodingsV2(@RequestHeader("Accept-Language") String lang) {
         MsgPicker.setMsg(lang);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/ExternalFilterControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/ExternalFilterControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/ExternalFilterControllerV2.java
index 5d625a5..4e82b41 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/ExternalFilterControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/ExternalFilterControllerV2.java
@@ -57,11 +57,9 @@ public class ExternalFilterControllerV2 extends BasicController {
     @Qualifier("extFilterService")
     private ExtFilterService extFilterService;
 
-    @RequestMapping(value = "/saveExtFilter", method = { RequestMethod.POST }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/saveExtFilter", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void saveExternalFilterV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody ExternalFilterRequest request) throws IOException {
+    public void saveExternalFilterV2(@RequestHeader("Accept-Language") String lang, @RequestBody ExternalFilterRequest request) throws IOException {
         MsgPicker.setMsg(lang);
 
         String filterProject = request.getProject();
@@ -71,11 +69,9 @@ public class ExternalFilterControllerV2 extends BasicController {
         extFilterService.syncExtFilterToProject(new String[] { desc.getName() }, filterProject);
     }
 
-    @RequestMapping(value = "/updateExtFilter", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/updateExtFilter", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void updateExternalFilterV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody ExternalFilterRequest request) throws IOException {
+    public void updateExternalFilterV2(@RequestHeader("Accept-Language") String lang, @RequestBody ExternalFilterRequest request) throws IOException {
         MsgPicker.setMsg(lang);
 
         ExternalFilterDesc desc = JsonUtil.readValue(request.getExtFilter(), ExternalFilterDesc.class);
@@ -83,11 +79,9 @@ public class ExternalFilterControllerV2 extends BasicController {
         extFilterService.syncExtFilterToProject(new String[] { desc.getName() }, request.getProject());
     }
 
-    @RequestMapping(value = "/{filter}/{project}", method = { RequestMethod.DELETE }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{filter}/{project}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void removeFilterV2(@RequestHeader("Accept-Language") String lang, @PathVariable String filter,
-            @PathVariable String project) throws IOException {
+    public void removeFilterV2(@RequestHeader("Accept-Language") String lang, @PathVariable String filter, @PathVariable String project) throws IOException {
         MsgPicker.setMsg(lang);
 
         extFilterService.removeExtFilterFromProject(filter, project);
@@ -96,8 +90,7 @@ public class ExternalFilterControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getExternalFiltersV2(@RequestHeader("Accept-Language") String lang,
-            @RequestParam(value = "project", required = true) String project) throws IOException {
+    public EnvelopeResponse getExternalFiltersV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "project", required = true) String project) throws IOException {
         MsgPicker.setMsg(lang);
 
         List<ExternalFilterDesc> filterDescs = Lists.newArrayList();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/HybridControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/HybridControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/HybridControllerV2.java
index 852d16c..ddf745a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/HybridControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/HybridControllerV2.java
@@ -44,31 +44,27 @@ public class HybridControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = RequestMethod.POST, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse createV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody HybridRequest request) {
+    public EnvelopeResponse createV2(@RequestHeader("Accept-Language") String lang, @RequestBody HybridRequest request) {
         MsgPicker.setMsg(lang);
 
         checkRequiredArg("hybrid", request.getHybrid());
         checkRequiredArg("project", request.getProject());
         checkRequiredArg("model", request.getModel());
         checkRequiredArg("cubes", request.getCubes());
-        HybridInstance instance = hybridService.createHybridCube(request.getHybrid(), request.getProject(),
-                request.getModel(), request.getCubes());
+        HybridInstance instance = hybridService.createHybridCube(request.getHybrid(), request.getProject(), request.getModel(), request.getCubes());
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, instance, "");
     }
 
     @RequestMapping(value = "", method = RequestMethod.PUT, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody HybridRequest request) {
+    public EnvelopeResponse updateV2(@RequestHeader("Accept-Language") String lang, @RequestBody HybridRequest request) {
         MsgPicker.setMsg(lang);
 
         checkRequiredArg("hybrid", request.getHybrid());
         checkRequiredArg("project", request.getProject());
         checkRequiredArg("model", request.getModel());
         checkRequiredArg("cubes", request.getCubes());
-        HybridInstance instance = hybridService.updateHybridCube(request.getHybrid(), request.getProject(),
-                request.getModel(), request.getCubes());
+        HybridInstance instance = hybridService.updateHybridCube(request.getHybrid(), request.getProject(), request.getModel(), request.getCubes());
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, instance, "");
     }
 
@@ -85,15 +81,13 @@ public class HybridControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse listV2(@RequestHeader("Accept-Language") String lang,
-            @RequestParam(required = false) String project, @RequestParam(required = false) String model) {
+    public EnvelopeResponse listV2(@RequestHeader("Accept-Language") String lang, @RequestParam(required = false) String project, @RequestParam(required = false) String model) {
         MsgPicker.setMsg(lang);
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, hybridService.listHybrids(project, model), "");
     }
 
-    @RequestMapping(value = "{hybrid}", method = RequestMethod.GET, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "{hybrid}", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getV2(@RequestHeader("Accept-Language") String lang, @PathVariable String hybrid) {
         MsgPicker.setMsg(lang);


[55/67] [abbrv] kylin git commit: increase dimension cap to 5 in test cube

Posted by li...@apache.org.
increase dimension cap to 5 in test cube


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/84408d5f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/84408d5f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/84408d5f

Branch: refs/heads/master
Commit: 84408d5f0b9d353e87d1f716eecdfc3f5183542d
Parents: 7c38148
Author: Hongbin Ma <ma...@apache.org>
Authored: Wed May 31 14:11:46 2017 +0800
Committer: Roger Shi <ro...@gmail.com>
Committed: Wed May 31 17:14:55 2017 +0800

----------------------------------------------------------------------
 .../test_case_data/localmeta/cube_desc/ci_inner_join_cube.json   | 2 +-
 .../test_case_data/localmeta/cube_desc/ci_left_join_cube.json    | 2 +-
 kylin-it/src/test/resources/query/sql_limit/query03.sql          | 4 ++--
 3 files changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/84408d5f/examples/test_case_data/localmeta/cube_desc/ci_inner_join_cube.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/ci_inner_join_cube.json b/examples/test_case_data/localmeta/cube_desc/ci_inner_join_cube.json
index a5badcb..28a63d5 100644
--- a/examples/test_case_data/localmeta/cube_desc/ci_inner_join_cube.json
+++ b/examples/test_case_data/localmeta/cube_desc/ci_inner_join_cube.json
@@ -602,7 +602,7 @@
             "BUYER_COUNTRY.NAME"
           ]
         ],
-        "dim_cap" : 3
+        "dim_cap" : 5
       }
     }
   ],

http://git-wip-us.apache.org/repos/asf/kylin/blob/84408d5f/examples/test_case_data/localmeta/cube_desc/ci_left_join_cube.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/ci_left_join_cube.json b/examples/test_case_data/localmeta/cube_desc/ci_left_join_cube.json
index 3b0e852..f7ebfc8 100644
--- a/examples/test_case_data/localmeta/cube_desc/ci_left_join_cube.json
+++ b/examples/test_case_data/localmeta/cube_desc/ci_left_join_cube.json
@@ -548,7 +548,7 @@
             "TEST_KYLIN_FACT.DEAL_YEAR"
           ]
         ],
-        "dim_cap" : 3
+        "dim_cap" : 5
       }
     },
     {

http://git-wip-us.apache.org/repos/asf/kylin/blob/84408d5f/kylin-it/src/test/resources/query/sql_limit/query03.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_limit/query03.sql b/kylin-it/src/test/resources/query/sql_limit/query03.sql
index 4844395..765d8a1 100644
--- a/kylin-it/src/test/resources/query/sql_limit/query03.sql
+++ b/kylin-it/src/test/resources/query/sql_limit/query03.sql
@@ -17,7 +17,7 @@
 --
 
 SELECT 
- test_kylin_fact.lstg_format_name ,test_kylin_fact.cal_dt , sum(test_kylin_fact.price) as GMV, count(*) as TRANS_CNT 
+ test_kylin_fact.cal_dt , sum(test_kylin_fact.price) as GMV, count(*) as TRANS_CNT 
  FROM test_kylin_fact 
  inner JOIN edw.test_cal_dt as test_cal_dt 
  ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt 
@@ -27,5 +27,5 @@ SELECT
  ON test_kylin_fact.lstg_site_id = test_sites.site_id 
  where test_kylin_fact.seller_id = 10000002 + 4 
  
- group by  seller_id,test_kylin_fact.cal_dt,lstg_format_name
+ group by  seller_id,test_kylin_fact.cal_dt
  limit 10


[36/67] [abbrv] kylin git commit: minor, fix UT

Posted by li...@apache.org.
minor, fix UT


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/b6b0dc7c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/b6b0dc7c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/b6b0dc7c

Branch: refs/heads/master
Commit: b6b0dc7c846aa56aec9913931fcf7152067fd3c6
Parents: c341a62
Author: Roger Shi <ro...@hotmail.com>
Authored: Sat May 27 20:14:25 2017 +0800
Committer: 成 <ch...@kyligence.io>
Committed: Sat May 27 20:20:06 2017 +0800

----------------------------------------------------------------------
 .../main/java/org/apache/kylin/common/KylinConfigBase.java    | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/b6b0dc7c/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 854ffbd..05be701 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -195,7 +195,7 @@ abstract public class KylinConfigBase implements Serializable {
         
         String root = getRequired("kylin.env.hdfs-working-dir");
         Path path = new Path(root);
-        if (path.isAbsolute() == false)
+        if (!path.isAbsolute())
             throw new IllegalArgumentException("kylin.env.hdfs-working-dir must be absolute, but got " + root);
         
         // make sure path is qualified
@@ -209,10 +209,13 @@ abstract public class KylinConfigBase implements Serializable {
         // append metadata-url prefix
         root = new Path(path, StringUtils.replaceChars(getMetadataUrlPrefix(), ':', '-')).toString();
         
-        if (root.endsWith("/") == false)
+        if (!root.endsWith("/"))
             root += "/";
         
         cachedHdfsWorkingDirectory = root;
+        if (cachedHdfsWorkingDirectory.startsWith("file:")) {
+            cachedHdfsWorkingDirectory = cachedHdfsWorkingDirectory.replace("file:", "file://");
+        }
         return cachedHdfsWorkingDirectory;
     }
 


[19/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HyperLogLogPlusTable.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HyperLogLogPlusTable.java b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HyperLogLogPlusTable.java
index f914b07..5d7bfeb 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HyperLogLogPlusTable.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HyperLogLogPlusTable.java
@@ -29,803 +29,127 @@ public class HyperLogLogPlusTable implements java.io.Serializable {
 
     // threshold and bias data taken from google's bias correction data set:
     // https://docs.google.com/document/d/1gyjfMHy43U9OWBXxfaeG-3MjGzejW1dlpyMwEYAAWEI/view?fullscreen#
-    public static final double[] thresholdData = { 10, 20, 40, 80, 220, 400, 900, 1800, 3100, 6500, 15500, 20000, 50000,
-            120000, 350000 };
+    public static final double[] thresholdData = { 10, 20, 40, 80, 220, 400, 900, 1800, 3100, 6500, 15500, 20000, 50000, 120000, 350000 };
 
     public static final double[][] rawEstimateData = {
             // precision 4
-            { 11, 11.717, 12.207, 12.7896, 13.2882, 13.8204, 14.3772, 14.9342, 15.5202, 16.161, 16.7722, 17.4636,
-                    18.0396, 18.6766, 19.3566, 20.0454, 20.7936, 21.4856, 22.2666, 22.9946, 23.766, 24.4692, 25.3638,
-                    26.0764, 26.7864, 27.7602, 28.4814, 29.433, 30.2926, 31.0664, 31.9996, 32.7956, 33.5366, 34.5894,
-                    35.5738, 36.2698, 37.3682, 38.0544, 39.2342, 40.0108, 40.7966, 41.9298, 42.8704, 43.6358, 44.5194,
-                    45.773, 46.6772, 47.6174, 48.4888, 49.3304, 50.2506, 51.4996, 52.3824, 53.3078, 54.3984, 55.5838,
-                    56.6618, 57.2174, 58.3514, 59.0802, 60.1482, 61.0376, 62.3598, 62.8078, 63.9744, 64.914, 65.781,
-                    67.1806, 68.0594, 68.8446, 69.7928, 70.8248, 71.8324, 72.8598, 73.6246, 74.7014, 75.393, 76.6708,
-                    77.2394, },
+            { 11, 11.717, 12.207, 12.7896, 13.2882, 13.8204, 14.3772, 14.9342, 15.5202, 16.161, 16.7722, 17.4636, 18.0396, 18.6766, 19.3566, 20.0454, 20.7936, 21.4856, 22.2666, 22.9946, 23.766, 24.4692, 25.3638, 26.0764, 26.7864, 27.7602, 28.4814, 29.433, 30.2926, 31.0664, 31.9996, 32.7956, 33.5366, 34.5894, 35.5738, 36.2698, 37.3682, 38.0544, 39.2342, 40.0108, 40.7966, 41.9298, 42.8704, 43.6358, 44.5194, 45.773, 46.6772, 47.6174, 48.4888, 49.3304, 50.2506, 51.4996, 52.3824, 53.3078, 54.3984, 55.5838, 56.6618, 57.2174, 58.3514, 59.0802, 60.1482, 61.0376, 62.3598, 62.8078, 63.9744, 64.914, 65.781, 67.1806, 68.0594, 68.8446, 69.7928, 70.8248, 71.8324, 72.8598, 73.6246, 74.7014, 75.393, 76.6708, 77.2394, },
             // precision 5
-            { 23, 23.1194, 23.8208, 24.2318, 24.77, 25.2436, 25.7774, 26.2848, 26.8224, 27.3742, 27.9336, 28.503,
-                    29.0494, 29.6292, 30.2124, 30.798, 31.367, 31.9728, 32.5944, 33.217, 33.8438, 34.3696, 35.0956,
-                    35.7044, 36.324, 37.0668, 37.6698, 38.3644, 39.049, 39.6918, 40.4146, 41.082, 41.687, 42.5398,
-                    43.2462, 43.857, 44.6606, 45.4168, 46.1248, 46.9222, 47.6804, 48.447, 49.3454, 49.9594, 50.7636,
-                    51.5776, 52.331, 53.19, 53.9676, 54.7564, 55.5314, 56.4442, 57.3708, 57.9774, 58.9624, 59.8796,
-                    60.755, 61.472, 62.2076, 63.1024, 63.8908, 64.7338, 65.7728, 66.629, 67.413, 68.3266, 69.1524,
-                    70.2642, 71.1806, 72.0566, 72.9192, 73.7598, 74.3516, 75.5802, 76.4386, 77.4916, 78.1524, 79.1892,
-                    79.8414, 80.8798, 81.8376, 82.4698, 83.7656, 84.331, 85.5914, 86.6012, 87.7016, 88.5582, 89.3394,
-                    90.3544, 91.4912, 92.308, 93.3552, 93.9746, 95.2052, 95.727, 97.1322, 98.3944, 98.7588, 100.242,
-                    101.1914, 102.2538, 102.8776, 103.6292, 105.1932, 105.9152, 107.0868, 107.6728, 108.7144, 110.3114,
-                    110.8716, 111.245, 112.7908, 113.7064, 114.636, 115.7464, 116.1788, 117.7464, 118.4896, 119.6166,
-                    120.5082, 121.7798, 122.9028, 123.4426, 124.8854, 125.705, 126.4652, 128.3464, 128.3462, 130.0398,
-                    131.0342, 131.0042, 132.4766, 133.511, 134.7252, 135.425, 136.5172, 138.0572, 138.6694, 139.3712,
-                    140.8598, 141.4594, 142.554, 143.4006, 144.7374, 146.1634, 146.8994, 147.605, 147.9304, 149.1636,
-                    150.2468, 151.5876, 152.2096, 153.7032, 154.7146, 155.807, 156.9228, 157.0372, 158.5852, },
+            { 23, 23.1194, 23.8208, 24.2318, 24.77, 25.2436, 25.7774, 26.2848, 26.8224, 27.3742, 27.9336, 28.503, 29.0494, 29.6292, 30.2124, 30.798, 31.367, 31.9728, 32.5944, 33.217, 33.8438, 34.3696, 35.0956, 35.7044, 36.324, 37.0668, 37.6698, 38.3644, 39.049, 39.6918, 40.4146, 41.082, 41.687, 42.5398, 43.2462, 43.857, 44.6606, 45.4168, 46.1248, 46.9222, 47.6804, 48.447, 49.3454, 49.9594, 50.7636, 51.5776, 52.331, 53.19, 53.9676, 54.7564, 55.5314, 56.4442, 57.3708, 57.9774, 58.9624, 59.8796, 60.755, 61.472, 62.2076, 63.1024, 63.8908, 64.7338, 65.7728, 66.629, 67.413, 68.3266, 69.1524, 70.2642, 71.1806, 72.0566, 72.9192, 73.7598, 74.3516, 75.5802, 76.4386, 77.4916, 78.1524, 79.1892, 79.8414, 80.8798, 81.8376, 82.4698, 83.7656, 84.331, 85.5914, 86.6012, 87.7016, 88.5582, 89.3394, 90.3544, 91.4912, 92.308, 93.3552, 93.9746, 95.2052, 95.727, 97.1322, 98.3944, 98.7588, 100.242, 101.1914, 102.2538, 102.8776, 103.6292, 105.1932, 105.9152, 107.0868, 107.6728, 108.7144, 110.3114, 110.8716,
+                    111.245, 112.7908, 113.7064, 114.636, 115.7464, 116.1788, 117.7464, 118.4896, 119.6166, 120.5082, 121.7798, 122.9028, 123.4426, 124.8854, 125.705, 126.4652, 128.3464, 128.3462, 130.0398, 131.0342, 131.0042, 132.4766, 133.511, 134.7252, 135.425, 136.5172, 138.0572, 138.6694, 139.3712, 140.8598, 141.4594, 142.554, 143.4006, 144.7374, 146.1634, 146.8994, 147.605, 147.9304, 149.1636, 150.2468, 151.5876, 152.2096, 153.7032, 154.7146, 155.807, 156.9228, 157.0372, 158.5852, },
             // precision 6
-            { 46, 46.1902, 47.271, 47.8358, 48.8142, 49.2854, 50.317, 51.354, 51.8924, 52.9436, 53.4596, 54.5262,
-                    55.6248, 56.1574, 57.2822, 57.837, 58.9636, 60.074, 60.7042, 61.7976, 62.4772, 63.6564, 64.7942,
-                    65.5004, 66.686, 67.291, 68.5672, 69.8556, 70.4982, 71.8204, 72.4252, 73.7744, 75.0786, 75.8344,
-                    77.0294, 77.8098, 79.0794, 80.5732, 81.1878, 82.5648, 83.2902, 84.6784, 85.3352, 86.8946, 88.3712,
-                    89.0852, 90.499, 91.2686, 92.6844, 94.2234, 94.9732, 96.3356, 97.2286, 98.7262, 100.3284, 101.1048,
-                    102.5962, 103.3562, 105.1272, 106.4184, 107.4974, 109.0822, 109.856, 111.48, 113.2834, 114.0208,
-                    115.637, 116.5174, 118.0576, 119.7476, 120.427, 122.1326, 123.2372, 125.2788, 126.6776, 127.7926,
-                    129.1952, 129.9564, 131.6454, 133.87, 134.5428, 136.2, 137.0294, 138.6278, 139.6782, 141.792,
-                    143.3516, 144.2832, 146.0394, 147.0748, 148.4912, 150.849, 151.696, 153.5404, 154.073, 156.3714,
-                    157.7216, 158.7328, 160.4208, 161.4184, 163.9424, 165.2772, 166.411, 168.1308, 168.769, 170.9258,
-                    172.6828, 173.7502, 175.706, 176.3886, 179.0186, 180.4518, 181.927, 183.4172, 184.4114, 186.033,
-                    188.5124, 189.5564, 191.6008, 192.4172, 193.8044, 194.997, 197.4548, 198.8948, 200.2346, 202.3086,
-                    203.1548, 204.8842, 206.6508, 206.6772, 209.7254, 210.4752, 212.7228, 214.6614, 215.1676, 217.793,
-                    218.0006, 219.9052, 221.66, 223.5588, 225.1636, 225.6882, 227.7126, 229.4502, 231.1978, 232.9756,
-                    233.1654, 236.727, 238.1974, 237.7474, 241.1346, 242.3048, 244.1948, 245.3134, 246.879, 249.1204,
-                    249.853, 252.6792, 253.857, 254.4486, 257.2362, 257.9534, 260.0286, 260.5632, 262.663, 264.723,
-                    265.7566, 267.2566, 267.1624, 270.62, 272.8216, 273.2166, 275.2056, 276.2202, 278.3726, 280.3344,
-                    281.9284, 283.9728, 284.1924, 286.4872, 287.587, 289.807, 291.1206, 292.769, 294.8708, 296.665,
-                    297.1182, 299.4012, 300.6352, 302.1354, 304.1756, 306.1606, 307.3462, 308.5214, 309.4134, 310.8352,
-                    313.9684, 315.837, 316.7796, 318.9858, },
+            { 46, 46.1902, 47.271, 47.8358, 48.8142, 49.2854, 50.317, 51.354, 51.8924, 52.9436, 53.4596, 54.5262, 55.6248, 56.1574, 57.2822, 57.837, 58.9636, 60.074, 60.7042, 61.7976, 62.4772, 63.6564, 64.7942, 65.5004, 66.686, 67.291, 68.5672, 69.8556, 70.4982, 71.8204, 72.4252, 73.7744, 75.0786, 75.8344, 77.0294, 77.8098, 79.0794, 80.5732, 81.1878, 82.5648, 83.2902, 84.6784, 85.3352, 86.8946, 88.3712, 89.0852, 90.499, 91.2686, 92.6844, 94.2234, 94.9732, 96.3356, 97.2286, 98.7262, 100.3284, 101.1048, 102.5962, 103.3562, 105.1272, 106.4184, 107.4974, 109.0822, 109.856, 111.48, 113.2834, 114.0208, 115.637, 116.5174, 118.0576, 119.7476, 120.427, 122.1326, 123.2372, 125.2788, 126.6776, 127.7926, 129.1952, 129.9564, 131.6454, 133.87, 134.5428, 136.2, 137.0294, 138.6278, 139.6782, 141.792, 143.3516, 144.2832, 146.0394, 147.0748, 148.4912, 150.849, 151.696, 153.5404, 154.073, 156.3714, 157.7216, 158.7328, 160.4208, 161.4184, 163.9424, 165.2772, 166.411, 168.1308, 168.769, 170.9258,
+                    172.6828, 173.7502, 175.706, 176.3886, 179.0186, 180.4518, 181.927, 183.4172, 184.4114, 186.033, 188.5124, 189.5564, 191.6008, 192.4172, 193.8044, 194.997, 197.4548, 198.8948, 200.2346, 202.3086, 203.1548, 204.8842, 206.6508, 206.6772, 209.7254, 210.4752, 212.7228, 214.6614, 215.1676, 217.793, 218.0006, 219.9052, 221.66, 223.5588, 225.1636, 225.6882, 227.7126, 229.4502, 231.1978, 232.9756, 233.1654, 236.727, 238.1974, 237.7474, 241.1346, 242.3048, 244.1948, 245.3134, 246.879, 249.1204, 249.853, 252.6792, 253.857, 254.4486, 257.2362, 257.9534, 260.0286, 260.5632, 262.663, 264.723, 265.7566, 267.2566, 267.1624, 270.62, 272.8216, 273.2166, 275.2056, 276.2202, 278.3726, 280.3344, 281.9284, 283.9728, 284.1924, 286.4872, 287.587, 289.807, 291.1206, 292.769, 294.8708, 296.665, 297.1182, 299.4012, 300.6352, 302.1354, 304.1756, 306.1606, 307.3462, 308.5214, 309.4134, 310.8352, 313.9684, 315.837, 316.7796, 318.9858, },
             // precision 7
-            { 92, 93.4934, 94.9758, 96.4574, 97.9718, 99.4954, 101.5302, 103.0756, 104.6374, 106.1782, 107.7888,
-                    109.9522, 111.592, 113.2532, 114.9086, 116.5938, 118.9474, 120.6796, 122.4394, 124.2176, 125.9768,
-                    128.4214, 130.2528, 132.0102, 133.8658, 135.7278, 138.3044, 140.1316, 142.093, 144.0032, 145.9092,
-                    148.6306, 150.5294, 152.5756, 154.6508, 156.662, 159.552, 161.3724, 163.617, 165.5754, 167.7872,
-                    169.8444, 172.7988, 174.8606, 177.2118, 179.3566, 181.4476, 184.5882, 186.6816, 189.0824, 191.0258,
-                    193.6048, 196.4436, 198.7274, 200.957, 203.147, 205.4364, 208.7592, 211.3386, 213.781, 215.8028,
-                    218.656, 221.6544, 223.996, 226.4718, 229.1544, 231.6098, 234.5956, 237.0616, 239.5758, 242.4878,
-                    244.5244, 248.2146, 250.724, 252.8722, 255.5198, 258.0414, 261.941, 264.9048, 266.87, 269.4304,
-                    272.028, 274.4708, 278.37, 281.0624, 283.4668, 286.5532, 289.4352, 293.2564, 295.2744, 298.2118,
-                    300.7472, 304.1456, 307.2928, 309.7504, 312.5528, 315.979, 318.2102, 322.1834, 324.3494, 327.325,
-                    330.6614, 332.903, 337.2544, 339.9042, 343.215, 345.2864, 348.0814, 352.6764, 355.301, 357.139,
-                    360.658, 363.1732, 366.5902, 369.9538, 373.0828, 375.922, 378.9902, 382.7328, 386.4538, 388.1136,
-                    391.2234, 394.0878, 396.708, 401.1556, 404.1852, 406.6372, 409.6822, 412.7796, 416.6078, 418.4916,
-                    422.131, 424.5376, 428.1988, 432.211, 434.4502, 438.5282, 440.912, 444.0448, 447.7432, 450.8524,
-                    453.7988, 456.7858, 458.8868, 463.9886, 466.5064, 468.9124, 472.6616, 475.4682, 478.582, 481.304,
-                    485.2738, 488.6894, 490.329, 496.106, 497.6908, 501.1374, 504.5322, 506.8848, 510.3324, 513.4512,
-                    516.179, 520.4412, 522.6066, 526.167, 528.7794, 533.379, 536.067, 538.46, 542.9116, 545.692,
-                    547.9546, 552.493, 555.2722, 557.335, 562.449, 564.2014, 569.0738, 571.0974, 574.8564, 578.2996,
-                    581.409, 583.9704, 585.8098, 589.6528, 594.5998, 595.958, 600.068, 603.3278, 608.2016, 609.9632,
-                    612.864, 615.43, 620.7794, 621.272, 625.8644, 629.206, 633.219, 634.5154, 638.6102, },
+            { 92, 93.4934, 94.9758, 96.4574, 97.9718, 99.4954, 101.5302, 103.0756, 104.6374, 106.1782, 107.7888, 109.9522, 111.592, 113.2532, 114.9086, 116.5938, 118.9474, 120.6796, 122.4394, 124.2176, 125.9768, 128.4214, 130.2528, 132.0102, 133.8658, 135.7278, 138.3044, 140.1316, 142.093, 144.0032, 145.9092, 148.6306, 150.5294, 152.5756, 154.6508, 156.662, 159.552, 161.3724, 163.617, 165.5754, 167.7872, 169.8444, 172.7988, 174.8606, 177.2118, 179.3566, 181.4476, 184.5882, 186.6816, 189.0824, 191.0258, 193.6048, 196.4436, 198.7274, 200.957, 203.147, 205.4364, 208.7592, 211.3386, 213.781, 215.8028, 218.656, 221.6544, 223.996, 226.4718, 229.1544, 231.6098, 234.5956, 237.0616, 239.5758, 242.4878, 244.5244, 248.2146, 250.724, 252.8722, 255.5198, 258.0414, 261.941, 264.9048, 266.87, 269.4304, 272.028, 274.4708, 278.37, 281.0624, 283.4668, 286.5532, 289.4352, 293.2564, 295.2744, 298.2118, 300.7472, 304.1456, 307.2928, 309.7504, 312.5528, 315.979, 318.2102, 322.1834, 324.3494, 327.325,
+                    330.6614, 332.903, 337.2544, 339.9042, 343.215, 345.2864, 348.0814, 352.6764, 355.301, 357.139, 360.658, 363.1732, 366.5902, 369.9538, 373.0828, 375.922, 378.9902, 382.7328, 386.4538, 388.1136, 391.2234, 394.0878, 396.708, 401.1556, 404.1852, 406.6372, 409.6822, 412.7796, 416.6078, 418.4916, 422.131, 424.5376, 428.1988, 432.211, 434.4502, 438.5282, 440.912, 444.0448, 447.7432, 450.8524, 453.7988, 456.7858, 458.8868, 463.9886, 466.5064, 468.9124, 472.6616, 475.4682, 478.582, 481.304, 485.2738, 488.6894, 490.329, 496.106, 497.6908, 501.1374, 504.5322, 506.8848, 510.3324, 513.4512, 516.179, 520.4412, 522.6066, 526.167, 528.7794, 533.379, 536.067, 538.46, 542.9116, 545.692, 547.9546, 552.493, 555.2722, 557.335, 562.449, 564.2014, 569.0738, 571.0974, 574.8564, 578.2996, 581.409, 583.9704, 585.8098, 589.6528, 594.5998, 595.958, 600.068, 603.3278, 608.2016, 609.9632, 612.864, 615.43, 620.7794, 621.272, 625.8644, 629.206, 633.219, 634.5154, 638.6102, },
             // precision 8
-            { 184.2152, 187.2454, 190.2096, 193.6652, 196.6312, 199.6822, 203.249, 206.3296, 210.0038, 213.2074,
-                    216.4612, 220.27, 223.5178, 227.4412, 230.8032, 234.1634, 238.1688, 241.6074, 245.6946, 249.2664,
-                    252.8228, 257.0432, 260.6824, 264.9464, 268.6268, 272.2626, 276.8376, 280.4034, 284.8956, 288.8522,
-                    292.7638, 297.3552, 301.3556, 305.7526, 309.9292, 313.8954, 318.8198, 322.7668, 327.298, 331.6688,
-                    335.9466, 340.9746, 345.1672, 349.3474, 354.3028, 358.8912, 364.114, 368.4646, 372.9744, 378.4092,
-                    382.6022, 387.843, 392.5684, 397.1652, 402.5426, 407.4152, 412.5388, 417.3592, 422.1366, 427.486,
-                    432.3918, 437.5076, 442.509, 447.3834, 453.3498, 458.0668, 463.7346, 469.1228, 473.4528, 479.7,
-                    484.644, 491.0518, 495.5774, 500.9068, 506.432, 512.1666, 517.434, 522.6644, 527.4894, 533.6312,
-                    538.3804, 544.292, 550.5496, 556.0234, 562.8206, 566.6146, 572.4188, 579.117, 583.6762, 590.6576,
-                    595.7864, 601.509, 607.5334, 612.9204, 619.772, 624.2924, 630.8654, 636.1836, 642.745, 649.1316,
-                    655.0386, 660.0136, 666.6342, 671.6196, 678.1866, 684.4282, 689.3324, 695.4794, 702.5038, 708.129,
-                    713.528, 720.3204, 726.463, 732.7928, 739.123, 744.7418, 751.2192, 756.5102, 762.6066, 769.0184,
-                    775.2224, 781.4014, 787.7618, 794.1436, 798.6506, 805.6378, 811.766, 819.7514, 824.5776, 828.7322,
-                    837.8048, 843.6302, 849.9336, 854.4798, 861.3388, 867.9894, 873.8196, 880.3136, 886.2308, 892.4588,
-                    899.0816, 905.4076, 912.0064, 917.3878, 923.619, 929.998, 937.3482, 943.9506, 947.991, 955.1144,
-                    962.203, 968.8222, 975.7324, 981.7826, 988.7666, 994.2648, 1000.3128, 1007.4082, 1013.7536,
-                    1020.3376, 1026.7156, 1031.7478, 1037.4292, 1045.393, 1051.2278, 1058.3434, 1062.8726, 1071.884,
-                    1076.806, 1082.9176, 1089.1678, 1095.5032, 1102.525, 1107.2264, 1115.315, 1120.93, 1127.252,
-                    1134.1496, 1139.0408, 1147.5448, 1153.3296, 1158.1974, 1166.5262, 1174.3328, 1175.657, 1184.4222,
-                    1190.9172, 1197.1292, 1204.4606, 1210.4578, 1218.8728, 1225.3336, 1226.6592, 1236.5768, 1241.363,
+            { 184.2152, 187.2454, 190.2096, 193.6652, 196.6312, 199.6822, 203.249, 206.3296, 210.0038, 213.2074, 216.4612, 220.27, 223.5178, 227.4412, 230.8032, 234.1634, 238.1688, 241.6074, 245.6946, 249.2664, 252.8228, 257.0432, 260.6824, 264.9464, 268.6268, 272.2626, 276.8376, 280.4034, 284.8956, 288.8522, 292.7638, 297.3552, 301.3556, 305.7526, 309.9292, 313.8954, 318.8198, 322.7668, 327.298, 331.6688, 335.9466, 340.9746, 345.1672, 349.3474, 354.3028, 358.8912, 364.114, 368.4646, 372.9744, 378.4092, 382.6022, 387.843, 392.5684, 397.1652, 402.5426, 407.4152, 412.5388, 417.3592, 422.1366, 427.486, 432.3918, 437.5076, 442.509, 447.3834, 453.3498, 458.0668, 463.7346, 469.1228, 473.4528, 479.7, 484.644, 491.0518, 495.5774, 500.9068, 506.432, 512.1666, 517.434, 522.6644, 527.4894, 533.6312, 538.3804, 544.292, 550.5496, 556.0234, 562.8206, 566.6146, 572.4188, 579.117, 583.6762, 590.6576, 595.7864, 601.509, 607.5334, 612.9204, 619.772, 624.2924, 630.8654, 636.1836, 642.745, 649.1316,
+                    655.0386, 660.0136, 666.6342, 671.6196, 678.1866, 684.4282, 689.3324, 695.4794, 702.5038, 708.129, 713.528, 720.3204, 726.463, 732.7928, 739.123, 744.7418, 751.2192, 756.5102, 762.6066, 769.0184, 775.2224, 781.4014, 787.7618, 794.1436, 798.6506, 805.6378, 811.766, 819.7514, 824.5776, 828.7322, 837.8048, 843.6302, 849.9336, 854.4798, 861.3388, 867.9894, 873.8196, 880.3136, 886.2308, 892.4588, 899.0816, 905.4076, 912.0064, 917.3878, 923.619, 929.998, 937.3482, 943.9506, 947.991, 955.1144, 962.203, 968.8222, 975.7324, 981.7826, 988.7666, 994.2648, 1000.3128, 1007.4082, 1013.7536, 1020.3376, 1026.7156, 1031.7478, 1037.4292, 1045.393, 1051.2278, 1058.3434, 1062.8726, 1071.884, 1076.806, 1082.9176, 1089.1678, 1095.5032, 1102.525, 1107.2264, 1115.315, 1120.93, 1127.252, 1134.1496, 1139.0408, 1147.5448, 1153.3296, 1158.1974, 1166.5262, 1174.3328, 1175.657, 1184.4222, 1190.9172, 1197.1292, 1204.4606, 1210.4578, 1218.8728, 1225.3336, 1226.6592, 1236.5768, 1241.363,
                     1249.4074, 1254.6566, 1260.8014, 1266.5454, 1274.5192, },
             // precision 9
-            { 369, 374.8294, 381.2452, 387.6698, 394.1464, 400.2024, 406.8782, 413.6598, 420.462, 427.2826, 433.7102,
-                    440.7416, 447.9366, 455.1046, 462.285, 469.0668, 476.306, 483.8448, 491.301, 498.9886, 506.2422,
-                    513.8138, 521.7074, 529.7428, 537.8402, 545.1664, 553.3534, 561.594, 569.6886, 577.7876, 585.65,
-                    594.228, 602.8036, 611.1666, 620.0818, 628.0824, 637.2574, 646.302, 655.1644, 664.0056, 672.3802,
-                    681.7192, 690.5234, 700.2084, 708.831, 718.485, 728.1112, 737.4764, 746.76, 756.3368, 766.5538,
-                    775.5058, 785.2646, 795.5902, 804.3818, 814.8998, 824.9532, 835.2062, 845.2798, 854.4728, 864.9582,
-                    875.3292, 886.171, 896.781, 906.5716, 916.7048, 927.5322, 937.875, 949.3972, 958.3464, 969.7274,
-                    980.2834, 992.1444, 1003.4264, 1013.0166, 1024.018, 1035.0438, 1046.34, 1057.6856, 1068.9836,
-                    1079.0312, 1091.677, 1102.3188, 1113.4846, 1124.4424, 1135.739, 1147.1488, 1158.9202, 1169.406,
-                    1181.5342, 1193.2834, 1203.8954, 1216.3286, 1226.2146, 1239.6684, 1251.9946, 1262.123, 1275.4338,
-                    1285.7378, 1296.076, 1308.9692, 1320.4964, 1333.0998, 1343.9864, 1357.7754, 1368.3208, 1380.4838,
-                    1392.7388, 1406.0758, 1416.9098, 1428.9728, 1440.9228, 1453.9292, 1462.617, 1476.05, 1490.2996,
-                    1500.6128, 1513.7392, 1524.5174, 1536.6322, 1548.2584, 1562.3766, 1572.423, 1587.1232, 1596.5164,
-                    1610.5938, 1622.5972, 1633.1222, 1647.7674, 1658.5044, 1671.57, 1683.7044, 1695.4142, 1708.7102,
-                    1720.6094, 1732.6522, 1747.841, 1756.4072, 1769.9786, 1782.3276, 1797.5216, 1808.3186, 1819.0694,
-                    1834.354, 1844.575, 1856.2808, 1871.1288, 1880.7852, 1893.9622, 1906.3418, 1920.6548, 1932.9302,
-                    1945.8584, 1955.473, 1968.8248, 1980.6446, 1995.9598, 2008.349, 2019.8556, 2033.0334, 2044.0206,
-                    2059.3956, 2069.9174, 2082.6084, 2093.7036, 2106.6108, 2118.9124, 2132.301, 2144.7628, 2159.8422,
-                    2171.0212, 2183.101, 2193.5112, 2208.052, 2221.3194, 2233.3282, 2247.295, 2257.7222, 2273.342,
-                    2286.5638, 2299.6786, 2310.8114, 2322.3312, 2335.516, 2349.874, 2363.5968, 2373.865, 2387.1918,
-                    2401.8328, 2414.8496, 2424.544, 2436.7592, 2447.1682, 2464.1958, 2474.3438, 2489.0006, 2497.4526,
-                    2513.6586, 2527.19, 2540.7028, 2553.768, },
+            { 369, 374.8294, 381.2452, 387.6698, 394.1464, 400.2024, 406.8782, 413.6598, 420.462, 427.2826, 433.7102, 440.7416, 447.9366, 455.1046, 462.285, 469.0668, 476.306, 483.8448, 491.301, 498.9886, 506.2422, 513.8138, 521.7074, 529.7428, 537.8402, 545.1664, 553.3534, 561.594, 569.6886, 577.7876, 585.65, 594.228, 602.8036, 611.1666, 620.0818, 628.0824, 637.2574, 646.302, 655.1644, 664.0056, 672.3802, 681.7192, 690.5234, 700.2084, 708.831, 718.485, 728.1112, 737.4764, 746.76, 756.3368, 766.5538, 775.5058, 785.2646, 795.5902, 804.3818, 814.8998, 824.9532, 835.2062, 845.2798, 854.4728, 864.9582, 875.3292, 886.171, 896.781, 906.5716, 916.7048, 927.5322, 937.875, 949.3972, 958.3464, 969.7274, 980.2834, 992.1444, 1003.4264, 1013.0166, 1024.018, 1035.0438, 1046.34, 1057.6856, 1068.9836, 1079.0312, 1091.677, 1102.3188, 1113.4846, 1124.4424, 1135.739, 1147.1488, 1158.9202, 1169.406, 1181.5342, 1193.2834, 1203.8954, 1216.3286, 1226.2146, 1239.6684, 1251.9946, 1262.123, 1275.4338,
+                    1285.7378, 1296.076, 1308.9692, 1320.4964, 1333.0998, 1343.9864, 1357.7754, 1368.3208, 1380.4838, 1392.7388, 1406.0758, 1416.9098, 1428.9728, 1440.9228, 1453.9292, 1462.617, 1476.05, 1490.2996, 1500.6128, 1513.7392, 1524.5174, 1536.6322, 1548.2584, 1562.3766, 1572.423, 1587.1232, 1596.5164, 1610.5938, 1622.5972, 1633.1222, 1647.7674, 1658.5044, 1671.57, 1683.7044, 1695.4142, 1708.7102, 1720.6094, 1732.6522, 1747.841, 1756.4072, 1769.9786, 1782.3276, 1797.5216, 1808.3186, 1819.0694, 1834.354, 1844.575, 1856.2808, 1871.1288, 1880.7852, 1893.9622, 1906.3418, 1920.6548, 1932.9302, 1945.8584, 1955.473, 1968.8248, 1980.6446, 1995.9598, 2008.349, 2019.8556, 2033.0334, 2044.0206, 2059.3956, 2069.9174, 2082.6084, 2093.7036, 2106.6108, 2118.9124, 2132.301, 2144.7628, 2159.8422, 2171.0212, 2183.101, 2193.5112, 2208.052, 2221.3194, 2233.3282, 2247.295, 2257.7222, 2273.342, 2286.5638, 2299.6786, 2310.8114, 2322.3312, 2335.516, 2349.874, 2363.5968, 2373.865, 2387.1918,
+                    2401.8328, 2414.8496, 2424.544, 2436.7592, 2447.1682, 2464.1958, 2474.3438, 2489.0006, 2497.4526, 2513.6586, 2527.19, 2540.7028, 2553.768, },
             // precision 10
-            { 738.1256, 750.4234, 763.1064, 775.4732, 788.4636, 801.0644, 814.488, 827.9654, 841.0832, 854.7864,
-                    868.1992, 882.2176, 896.5228, 910.1716, 924.7752, 938.899, 953.6126, 968.6492, 982.9474, 998.5214,
-                    1013.1064, 1028.6364, 1044.2468, 1059.4588, 1075.3832, 1091.0584, 1106.8606, 1123.3868, 1139.5062,
-                    1156.1862, 1172.463, 1189.339, 1206.1936, 1223.1292, 1240.1854, 1257.2908, 1275.3324, 1292.8518,
-                    1310.5204, 1328.4854, 1345.9318, 1364.552, 1381.4658, 1400.4256, 1419.849, 1438.152, 1456.8956,
-                    1474.8792, 1494.118, 1513.62, 1532.5132, 1551.9322, 1570.7726, 1590.6086, 1610.5332, 1630.5918,
-                    1650.4294, 1669.7662, 1690.4106, 1710.7338, 1730.9012, 1750.4486, 1770.1556, 1791.6338, 1812.7312,
-                    1833.6264, 1853.9526, 1874.8742, 1896.8326, 1918.1966, 1939.5594, 1961.07, 1983.037, 2003.1804,
-                    2026.071, 2047.4884, 2070.0848, 2091.2944, 2114.333, 2135.9626, 2158.2902, 2181.0814, 2202.0334,
-                    2224.4832, 2246.39, 2269.7202, 2292.1714, 2314.2358, 2338.9346, 2360.891, 2384.0264, 2408.3834,
-                    2430.1544, 2454.8684, 2476.9896, 2501.4368, 2522.8702, 2548.0408, 2570.6738, 2593.5208, 2617.0158,
-                    2640.2302, 2664.0962, 2687.4986, 2714.2588, 2735.3914, 2759.6244, 2781.8378, 2808.0072, 2830.6516,
-                    2856.2454, 2877.2136, 2903.4546, 2926.785, 2951.2294, 2976.468, 3000.867, 3023.6508, 3049.91,
-                    3073.5984, 3098.162, 3121.5564, 3146.2328, 3170.9484, 3195.5902, 3221.3346, 3242.7032, 3271.6112,
-                    3296.5546, 3317.7376, 3345.072, 3369.9518, 3394.326, 3418.1818, 3444.6926, 3469.086, 3494.2754,
-                    3517.8698, 3544.248, 3565.3768, 3588.7234, 3616.979, 3643.7504, 3668.6812, 3695.72, 3719.7392,
-                    3742.6224, 3770.4456, 3795.6602, 3819.9058, 3844.002, 3869.517, 3895.6824, 3920.8622, 3947.1364,
-                    3973.985, 3995.4772, 4021.62, 4046.628, 4074.65, 4096.2256, 4121.831, 4146.6406, 4173.276,
-                    4195.0744, 4223.9696, 4251.3708, 4272.9966, 4300.8046, 4326.302, 4353.1248, 4374.312, 4403.0322,
-                    4426.819, 4450.0598, 4478.5206, 4504.8116, 4528.8928, 4553.9584, 4578.8712, 4603.8384, 4632.3872,
-                    4655.5128, 4675.821, 4704.6222, 4731.9862, 4755.4174, 4781.2628, 4804.332, 4832.3048, 4862.8752,
-                    4883.4148, 4906.9544, 4935.3516, 4954.3532, 4984.0248, 5011.217, 5035.3258, 5057.3672, 5084.1828, },
+            { 738.1256, 750.4234, 763.1064, 775.4732, 788.4636, 801.0644, 814.488, 827.9654, 841.0832, 854.7864, 868.1992, 882.2176, 896.5228, 910.1716, 924.7752, 938.899, 953.6126, 968.6492, 982.9474, 998.5214, 1013.1064, 1028.6364, 1044.2468, 1059.4588, 1075.3832, 1091.0584, 1106.8606, 1123.3868, 1139.5062, 1156.1862, 1172.463, 1189.339, 1206.1936, 1223.1292, 1240.1854, 1257.2908, 1275.3324, 1292.8518, 1310.5204, 1328.4854, 1345.9318, 1364.552, 1381.4658, 1400.4256, 1419.849, 1438.152, 1456.8956, 1474.8792, 1494.118, 1513.62, 1532.5132, 1551.9322, 1570.7726, 1590.6086, 1610.5332, 1630.5918, 1650.4294, 1669.7662, 1690.4106, 1710.7338, 1730.9012, 1750.4486, 1770.1556, 1791.6338, 1812.7312, 1833.6264, 1853.9526, 1874.8742, 1896.8326, 1918.1966, 1939.5594, 1961.07, 1983.037, 2003.1804, 2026.071, 2047.4884, 2070.0848, 2091.2944, 2114.333, 2135.9626, 2158.2902, 2181.0814, 2202.0334, 2224.4832, 2246.39, 2269.7202, 2292.1714, 2314.2358, 2338.9346, 2360.891, 2384.0264, 2408.3834, 2430.1544
 ,
+                    2454.8684, 2476.9896, 2501.4368, 2522.8702, 2548.0408, 2570.6738, 2593.5208, 2617.0158, 2640.2302, 2664.0962, 2687.4986, 2714.2588, 2735.3914, 2759.6244, 2781.8378, 2808.0072, 2830.6516, 2856.2454, 2877.2136, 2903.4546, 2926.785, 2951.2294, 2976.468, 3000.867, 3023.6508, 3049.91, 3073.5984, 3098.162, 3121.5564, 3146.2328, 3170.9484, 3195.5902, 3221.3346, 3242.7032, 3271.6112, 3296.5546, 3317.7376, 3345.072, 3369.9518, 3394.326, 3418.1818, 3444.6926, 3469.086, 3494.2754, 3517.8698, 3544.248, 3565.3768, 3588.7234, 3616.979, 3643.7504, 3668.6812, 3695.72, 3719.7392, 3742.6224, 3770.4456, 3795.6602, 3819.9058, 3844.002, 3869.517, 3895.6824, 3920.8622, 3947.1364, 3973.985, 3995.4772, 4021.62, 4046.628, 4074.65, 4096.2256, 4121.831, 4146.6406, 4173.276, 4195.0744, 4223.9696, 4251.3708, 4272.9966, 4300.8046, 4326.302, 4353.1248, 4374.312, 4403.0322, 4426.819, 4450.0598, 4478.5206, 4504.8116, 4528.8928, 4553.9584, 4578.8712, 4603.8384, 4632.3872, 4655.5128, 4675.821,
+                    4704.6222, 4731.9862, 4755.4174, 4781.2628, 4804.332, 4832.3048, 4862.8752, 4883.4148, 4906.9544, 4935.3516, 4954.3532, 4984.0248, 5011.217, 5035.3258, 5057.3672, 5084.1828, },
             // precision 11
-            { 1477, 1501.6014, 1526.5802, 1551.7942, 1577.3042, 1603.2062, 1629.8402, 1656.2292, 1682.9462, 1709.9926,
-                    1737.3026, 1765.4252, 1793.0578, 1821.6092, 1849.626, 1878.5568, 1908.527, 1937.5154, 1967.1874,
-                    1997.3878, 2027.37, 2058.1972, 2089.5728, 2120.1012, 2151.9668, 2183.292, 2216.0772, 2247.8578,
-                    2280.6562, 2313.041, 2345.714, 2380.3112, 2414.1806, 2447.9854, 2481.656, 2516.346, 2551.5154,
-                    2586.8378, 2621.7448, 2656.6722, 2693.5722, 2729.1462, 2765.4124, 2802.8728, 2838.898, 2876.408,
-                    2913.4926, 2951.4938, 2989.6776, 3026.282, 3065.7704, 3104.1012, 3143.7388, 3181.6876, 3221.1872,
-                    3261.5048, 3300.0214, 3339.806, 3381.409, 3421.4144, 3461.4294, 3502.2286, 3544.651, 3586.6156,
-                    3627.337, 3670.083, 3711.1538, 3753.5094, 3797.01, 3838.6686, 3882.1678, 3922.8116, 3967.9978,
-                    4009.9204, 4054.3286, 4097.5706, 4140.6014, 4185.544, 4229.5976, 4274.583, 4316.9438, 4361.672,
-                    4406.2786, 4451.8628, 4496.1834, 4543.505, 4589.1816, 4632.5188, 4678.2294, 4724.8908, 4769.0194,
-                    4817.052, 4861.4588, 4910.1596, 4956.4344, 5002.5238, 5048.13, 5093.6374, 5142.8162, 5187.7894,
-                    5237.3984, 5285.6078, 5331.0858, 5379.1036, 5428.6258, 5474.6018, 5522.7618, 5571.5822, 5618.59,
-                    5667.9992, 5714.88, 5763.454, 5808.6982, 5860.3644, 5910.2914, 5953.571, 6005.9232, 6055.1914,
-                    6104.5882, 6154.5702, 6199.7036, 6251.1764, 6298.7596, 6350.0302, 6398.061, 6448.4694, 6495.933,
-                    6548.0474, 6597.7166, 6646.9416, 6695.9208, 6742.6328, 6793.5276, 6842.1934, 6894.2372, 6945.3864,
-                    6996.9228, 7044.2372, 7094.1374, 7142.2272, 7192.2942, 7238.8338, 7288.9006, 7344.0908, 7394.8544,
-                    7443.5176, 7490.4148, 7542.9314, 7595.6738, 7641.9878, 7694.3688, 7743.0448, 7797.522, 7845.53,
-                    7899.594, 7950.3132, 7996.455, 8050.9442, 8092.9114, 8153.1374, 8197.4472, 8252.8278, 8301.8728,
-                    8348.6776, 8401.4698, 8453.551, 8504.6598, 8553.8944, 8604.1276, 8657.6514, 8710.3062, 8758.908,
-                    8807.8706, 8862.1702, 8910.4668, 8960.77, 9007.2766, 9063.164, 9121.0534, 9164.1354, 9218.1594,
-                    9267.767, 9319.0594, 9372.155, 9419.7126, 9474.3722, 9520.1338, 9572.368, 9622.7702, 9675.8448,
-                    9726.5396, 9778.7378, 9827.6554, 9878.1922, 9928.7782, 9978.3984, 10026.578, 10076.5626, 10137.1618,
-                    10177.5244, 10229.9176, },
+            { 1477, 1501.6014, 1526.5802, 1551.7942, 1577.3042, 1603.2062, 1629.8402, 1656.2292, 1682.9462, 1709.9926, 1737.3026, 1765.4252, 1793.0578, 1821.6092, 1849.626, 1878.5568, 1908.527, 1937.5154, 1967.1874, 1997.3878, 2027.37, 2058.1972, 2089.5728, 2120.1012, 2151.9668, 2183.292, 2216.0772, 2247.8578, 2280.6562, 2313.041, 2345.714, 2380.3112, 2414.1806, 2447.9854, 2481.656, 2516.346, 2551.5154, 2586.8378, 2621.7448, 2656.6722, 2693.5722, 2729.1462, 2765.4124, 2802.8728, 2838.898, 2876.408, 2913.4926, 2951.4938, 2989.6776, 3026.282, 3065.7704, 3104.1012, 3143.7388, 3181.6876, 3221.1872, 3261.5048, 3300.0214, 3339.806, 3381.409, 3421.4144, 3461.4294, 3502.2286, 3544.651, 3586.6156, 3627.337, 3670.083, 3711.1538, 3753.5094, 3797.01, 3838.6686, 3882.1678, 3922.8116, 3967.9978, 4009.9204, 4054.3286, 4097.5706, 4140.6014, 4185.544, 4229.5976, 4274.583, 4316.9438, 4361.672, 4406.2786, 4451.8628, 4496.1834, 4543.505, 4589.1816, 4632.5188, 4678.2294, 4724.8908, 4769.0194, 4817.052,
+                    4861.4588, 4910.1596, 4956.4344, 5002.5238, 5048.13, 5093.6374, 5142.8162, 5187.7894, 5237.3984, 5285.6078, 5331.0858, 5379.1036, 5428.6258, 5474.6018, 5522.7618, 5571.5822, 5618.59, 5667.9992, 5714.88, 5763.454, 5808.6982, 5860.3644, 5910.2914, 5953.571, 6005.9232, 6055.1914, 6104.5882, 6154.5702, 6199.7036, 6251.1764, 6298.7596, 6350.0302, 6398.061, 6448.4694, 6495.933, 6548.0474, 6597.7166, 6646.9416, 6695.9208, 6742.6328, 6793.5276, 6842.1934, 6894.2372, 6945.3864, 6996.9228, 7044.2372, 7094.1374, 7142.2272, 7192.2942, 7238.8338, 7288.9006, 7344.0908, 7394.8544, 7443.5176, 7490.4148, 7542.9314, 7595.6738, 7641.9878, 7694.3688, 7743.0448, 7797.522, 7845.53, 7899.594, 7950.3132, 7996.455, 8050.9442, 8092.9114, 8153.1374, 8197.4472, 8252.8278, 8301.8728, 8348.6776, 8401.4698, 8453.551, 8504.6598, 8553.8944, 8604.1276, 8657.6514, 8710.3062, 8758.908, 8807.8706, 8862.1702, 8910.4668, 8960.77, 9007.2766, 9063.164, 9121.0534, 9164.1354, 9218.1594, 9267.767, 9319.059
 4,
+                    9372.155, 9419.7126, 9474.3722, 9520.1338, 9572.368, 9622.7702, 9675.8448, 9726.5396, 9778.7378, 9827.6554, 9878.1922, 9928.7782, 9978.3984, 10026.578, 10076.5626, 10137.1618, 10177.5244, 10229.9176, },
             // precision 12
-            { 2954, 3003.4782, 3053.3568, 3104.3666, 3155.324, 3206.9598, 3259.648, 3312.539, 3366.1474, 3420.2576,
-                    3474.8376, 3530.6076, 3586.451, 3643.38, 3700.4104, 3757.5638, 3815.9676, 3875.193, 3934.838,
-                    3994.8548, 4055.018, 4117.1742, 4178.4482, 4241.1294, 4304.4776, 4367.4044, 4431.8724, 4496.3732,
-                    4561.4304, 4627.5326, 4693.949, 4761.5532, 4828.7256, 4897.6182, 4965.5186, 5034.4528, 5104.865,
-                    5174.7164, 5244.6828, 5316.6708, 5387.8312, 5459.9036, 5532.476, 5604.8652, 5679.6718, 5753.757,
-                    5830.2072, 5905.2828, 5980.0434, 6056.6264, 6134.3192, 6211.5746, 6290.0816, 6367.1176, 6447.9796,
-                    6526.5576, 6606.1858, 6686.9144, 6766.1142, 6847.0818, 6927.9664, 7010.9096, 7091.0816, 7175.3962,
-                    7260.3454, 7344.018, 7426.4214, 7511.3106, 7596.0686, 7679.8094, 7765.818, 7852.4248, 7936.834,
-                    8022.363, 8109.5066, 8200.4554, 8288.5832, 8373.366, 8463.4808, 8549.7682, 8642.0522, 8728.3288,
-                    8820.9528, 8907.727, 9001.0794, 9091.2522, 9179.988, 9269.852, 9362.6394, 9453.642, 9546.9024,
-                    9640.6616, 9732.6622, 9824.3254, 9917.7484, 10007.9392, 10106.7508, 10196.2152, 10289.8114,
-                    10383.5494, 10482.3064, 10576.8734, 10668.7872, 10764.7156, 10862.0196, 10952.793, 11049.9748,
-                    11146.0702, 11241.4492, 11339.2772, 11434.2336, 11530.741, 11627.6136, 11726.311, 11821.5964,
-                    11918.837, 12015.3724, 12113.0162, 12213.0424, 12306.9804, 12408.4518, 12504.8968, 12604.586,
-                    12700.9332, 12798.705, 12898.5142, 12997.0488, 13094.788, 13198.475, 13292.7764, 13392.9698,
-                    13486.8574, 13590.1616, 13686.5838, 13783.6264, 13887.2638, 13992.0978, 14081.0844, 14189.9956,
-                    14280.0912, 14382.4956, 14486.4384, 14588.1082, 14686.2392, 14782.276, 14888.0284, 14985.1864,
-                    15088.8596, 15187.0998, 15285.027, 15383.6694, 15495.8266, 15591.3736, 15694.2008, 15790.3246,
-                    15898.4116, 15997.4522, 16095.5014, 16198.8514, 16291.7492, 16402.6424, 16499.1266, 16606.2436,
-                    16697.7186, 16796.3946, 16902.3376, 17005.7672, 17100.814, 17206.8282, 17305.8262, 17416.0744,
-                    17508.4092, 17617.0178, 17715.4554, 17816.758, 17920.1748, 18012.9236, 18119.7984, 18223.2248,
-                    18324.2482, 18426.6276, 18525.0932, 18629.8976, 18733.2588, 18831.0466, 18940.1366, 19032.2696,
-                    19131.729, 19243.4864, 19349.6932, 19442.866, 19547.9448, 19653.2798, 19754.4034, 19854.0692,
-                    19965.1224, 20065.1774, 20158.2212, 20253.353, 20366.3264, 20463.22, },
+            { 2954, 3003.4782, 3053.3568, 3104.3666, 3155.324, 3206.9598, 3259.648, 3312.539, 3366.1474, 3420.2576, 3474.8376, 3530.6076, 3586.451, 3643.38, 3700.4104, 3757.5638, 3815.9676, 3875.193, 3934.838, 3994.8548, 4055.018, 4117.1742, 4178.4482, 4241.1294, 4304.4776, 4367.4044, 4431.8724, 4496.3732, 4561.4304, 4627.5326, 4693.949, 4761.5532, 4828.7256, 4897.6182, 4965.5186, 5034.4528, 5104.865, 5174.7164, 5244.6828, 5316.6708, 5387.8312, 5459.9036, 5532.476, 5604.8652, 5679.6718, 5753.757, 5830.2072, 5905.2828, 5980.0434, 6056.6264, 6134.3192, 6211.5746, 6290.0816, 6367.1176, 6447.9796, 6526.5576, 6606.1858, 6686.9144, 6766.1142, 6847.0818, 6927.9664, 7010.9096, 7091.0816, 7175.3962, 7260.3454, 7344.018, 7426.4214, 7511.3106, 7596.0686, 7679.8094, 7765.818, 7852.4248, 7936.834, 8022.363, 8109.5066, 8200.4554, 8288.5832, 8373.366, 8463.4808, 8549.7682, 8642.0522, 8728.3288, 8820.9528, 8907.727, 9001.0794, 9091.2522, 9179.988, 9269.852, 9362.6394, 9453.642, 9546.9024, 9640.6616
 ,
+                    9732.6622, 9824.3254, 9917.7484, 10007.9392, 10106.7508, 10196.2152, 10289.8114, 10383.5494, 10482.3064, 10576.8734, 10668.7872, 10764.7156, 10862.0196, 10952.793, 11049.9748, 11146.0702, 11241.4492, 11339.2772, 11434.2336, 11530.741, 11627.6136, 11726.311, 11821.5964, 11918.837, 12015.3724, 12113.0162, 12213.0424, 12306.9804, 12408.4518, 12504.8968, 12604.586, 12700.9332, 12798.705, 12898.5142, 12997.0488, 13094.788, 13198.475, 13292.7764, 13392.9698, 13486.8574, 13590.1616, 13686.5838, 13783.6264, 13887.2638, 13992.0978, 14081.0844, 14189.9956, 14280.0912, 14382.4956, 14486.4384, 14588.1082, 14686.2392, 14782.276, 14888.0284, 14985.1864, 15088.8596, 15187.0998, 15285.027, 15383.6694, 15495.8266, 15591.3736, 15694.2008, 15790.3246, 15898.4116, 15997.4522, 16095.5014, 16198.8514, 16291.7492, 16402.6424, 16499.1266, 16606.2436, 16697.7186, 16796.3946, 16902.3376, 17005.7672, 17100.814, 17206.8282, 17305.8262, 17416.0744, 17508.4092, 17617.0178, 17715.4554,
+                    17816.758, 17920.1748, 18012.9236, 18119.7984, 18223.2248, 18324.2482, 18426.6276, 18525.0932, 18629.8976, 18733.2588, 18831.0466, 18940.1366, 19032.2696, 19131.729, 19243.4864, 19349.6932, 19442.866, 19547.9448, 19653.2798, 19754.4034, 19854.0692, 19965.1224, 20065.1774, 20158.2212, 20253.353, 20366.3264, 20463.22, },
             // precision 13
-            { 5908.5052, 6007.2672, 6107.347, 6208.5794, 6311.2622, 6414.5514, 6519.3376, 6625.6952, 6732.5988,
-                    6841.3552, 6950.5972, 7061.3082, 7173.5646, 7287.109, 7401.8216, 7516.4344, 7633.3802, 7751.2962,
-                    7870.3784, 7990.292, 8110.79, 8233.4574, 8356.6036, 8482.2712, 8607.7708, 8735.099, 8863.1858,
-                    8993.4746, 9123.8496, 9255.6794, 9388.5448, 9522.7516, 9657.3106, 9792.6094, 9930.5642, 10068.794,
-                    10206.7256, 10347.81, 10490.3196, 10632.0778, 10775.9916, 10920.4662, 11066.124, 11213.073,
-                    11358.0362, 11508.1006, 11659.1716, 11808.7514, 11959.4884, 12112.1314, 12265.037, 12420.3756,
-                    12578.933, 12734.311, 12890.0006, 13047.2144, 13207.3096, 13368.5144, 13528.024, 13689.847,
-                    13852.7528, 14018.3168, 14180.5372, 14346.9668, 14513.5074, 14677.867, 14846.2186, 15017.4186,
-                    15184.9716, 15356.339, 15529.2972, 15697.3578, 15871.8686, 16042.187, 16216.4094, 16389.4188,
-                    16565.9126, 16742.3272, 16919.0042, 17094.7592, 17273.965, 17451.8342, 17634.4254, 17810.5984,
-                    17988.9242, 18171.051, 18354.7938, 18539.466, 18721.0408, 18904.9972, 19081.867, 19271.9118,
-                    19451.8694, 19637.9816, 19821.2922, 20013.1292, 20199.3858, 20387.8726, 20572.9514, 20770.7764,
-                    20955.1714, 21144.751, 21329.9952, 21520.709, 21712.7016, 21906.3868, 22096.2626, 22286.0524,
-                    22475.051, 22665.5098, 22862.8492, 23055.5294, 23249.6138, 23437.848, 23636.273, 23826.093,
-                    24020.3296, 24213.3896, 24411.7392, 24602.9614, 24805.7952, 24998.1552, 25193.9588, 25389.0166,
-                    25585.8392, 25780.6976, 25981.2728, 26175.977, 26376.5252, 26570.1964, 26773.387, 26962.9812,
-                    27163.0586, 27368.164, 27565.0534, 27758.7428, 27961.1276, 28163.2324, 28362.3816, 28565.7668,
-                    28758.644, 28956.9768, 29163.4722, 29354.7026, 29561.1186, 29767.9948, 29959.9986, 30164.0492,
-                    30366.9818, 30562.5338, 30762.9928, 30976.1592, 31166.274, 31376.722, 31570.3734, 31770.809,
-                    31974.8934, 32179.5286, 32387.5442, 32582.3504, 32794.076, 32989.9528, 33191.842, 33392.4684,
-                    33595.659, 33801.8672, 34000.3414, 34200.0922, 34402.6792, 34610.0638, 34804.0084, 35011.13,
-                    35218.669, 35418.6634, 35619.0792, 35830.6534, 36028.4966, 36229.7902, 36438.6422, 36630.7764,
-                    36833.3102, 37048.6728, 37247.3916, 37453.5904, 37669.3614, 37854.5526, 38059.305, 38268.0936,
-                    38470.2516, 38674.7064, 38876.167, 39068.3794, 39281.9144, 39492.8566, 39684.8628, 39898.4108,
-                    40093.1836, 40297.6858, 40489.7086, 40717.2424, },
+            { 5908.5052, 6007.2672, 6107.347, 6208.5794, 6311.2622, 6414.5514, 6519.3376, 6625.6952, 6732.5988, 6841.3552, 6950.5972, 7061.3082, 7173.5646, 7287.109, 7401.8216, 7516.4344, 7633.3802, 7751.2962, 7870.3784, 7990.292, 8110.79, 8233.4574, 8356.6036, 8482.2712, 8607.7708, 8735.099, 8863.1858, 8993.4746, 9123.8496, 9255.6794, 9388.5448, 9522.7516, 9657.3106, 9792.6094, 9930.5642, 10068.794, 10206.7256, 10347.81, 10490.3196, 10632.0778, 10775.9916, 10920.4662, 11066.124, 11213.073, 11358.0362, 11508.1006, 11659.1716, 11808.7514, 11959.4884, 12112.1314, 12265.037, 12420.3756, 12578.933, 12734.311, 12890.0006, 13047.2144, 13207.3096, 13368.5144, 13528.024, 13689.847, 13852.7528, 14018.3168, 14180.5372, 14346.9668, 14513.5074, 14677.867, 14846.2186, 15017.4186, 15184.9716, 15356.339, 15529.2972, 15697.3578, 15871.8686, 16042.187, 16216.4094, 16389.4188, 16565.9126, 16742.3272, 16919.0042, 17094.7592, 17273.965, 17451.8342, 17634.4254, 17810.5984, 17988.9242, 18171.051,
+                    18354.7938, 18539.466, 18721.0408, 18904.9972, 19081.867, 19271.9118, 19451.8694, 19637.9816, 19821.2922, 20013.1292, 20199.3858, 20387.8726, 20572.9514, 20770.7764, 20955.1714, 21144.751, 21329.9952, 21520.709, 21712.7016, 21906.3868, 22096.2626, 22286.0524, 22475.051, 22665.5098, 22862.8492, 23055.5294, 23249.6138, 23437.848, 23636.273, 23826.093, 24020.3296, 24213.3896, 24411.7392, 24602.9614, 24805.7952, 24998.1552, 25193.9588, 25389.0166, 25585.8392, 25780.6976, 25981.2728, 26175.977, 26376.5252, 26570.1964, 26773.387, 26962.9812, 27163.0586, 27368.164, 27565.0534, 27758.7428, 27961.1276, 28163.2324, 28362.3816, 28565.7668, 28758.644, 28956.9768, 29163.4722, 29354.7026, 29561.1186, 29767.9948, 29959.9986, 30164.0492, 30366.9818, 30562.5338, 30762.9928, 30976.1592, 31166.274, 31376.722, 31570.3734, 31770.809, 31974.8934, 32179.5286, 32387.5442, 32582.3504, 32794.076, 32989.9528, 33191.842, 33392.4684, 33595.659, 33801.8672, 34000.3414, 34200.0922, 34402.6792,
+                    34610.0638, 34804.0084, 35011.13, 35218.669, 35418.6634, 35619.0792, 35830.6534, 36028.4966, 36229.7902, 36438.6422, 36630.7764, 36833.3102, 37048.6728, 37247.3916, 37453.5904, 37669.3614, 37854.5526, 38059.305, 38268.0936, 38470.2516, 38674.7064, 38876.167, 39068.3794, 39281.9144, 39492.8566, 39684.8628, 39898.4108, 40093.1836, 40297.6858, 40489.7086, 40717.2424, },
             // precision 14
-            { 11817.475, 12015.0046, 12215.3792, 12417.7504, 12623.1814, 12830.0086, 13040.0072, 13252.503, 13466.178,
-                    13683.2738, 13902.0344, 14123.9798, 14347.394, 14573.7784, 14802.6894, 15033.6824, 15266.9134,
-                    15502.8624, 15741.4944, 15980.7956, 16223.8916, 16468.6316, 16715.733, 16965.5726, 17217.204,
-                    17470.666, 17727.8516, 17986.7886, 18247.6902, 18510.9632, 18775.304, 19044.7486, 19314.4408,
-                    19587.202, 19862.2576, 20135.924, 20417.0324, 20697.9788, 20979.6112, 21265.0274, 21550.723,
-                    21841.6906, 22132.162, 22428.1406, 22722.127, 23020.5606, 23319.7394, 23620.4014, 23925.2728,
-                    24226.9224, 24535.581, 24845.505, 25155.9618, 25470.3828, 25785.9702, 26103.7764, 26420.4132,
-                    26742.0186, 27062.8852, 27388.415, 27714.6024, 28042.296, 28365.4494, 28701.1526, 29031.8008,
-                    29364.2156, 29704.497, 30037.1458, 30380.111, 30723.8168, 31059.5114, 31404.9498, 31751.6752,
-                    32095.2686, 32444.7792, 32794.767, 33145.204, 33498.4226, 33847.6502, 34209.006, 34560.849,
-                    34919.4838, 35274.9778, 35635.1322, 35996.3266, 36359.1394, 36722.8266, 37082.8516, 37447.7354,
-                    37815.9606, 38191.0692, 38559.4106, 38924.8112, 39294.6726, 39663.973, 40042.261, 40416.2036,
-                    40779.2036, 41161.6436, 41540.9014, 41921.1998, 42294.7698, 42678.5264, 43061.3464, 43432.375,
-                    43818.432, 44198.6598, 44583.0138, 44970.4794, 45353.924, 45729.858, 46118.2224, 46511.5724,
-                    46900.7386, 47280.6964, 47668.1472, 48055.6796, 48446.9436, 48838.7146, 49217.7296, 49613.7796,
-                    50010.7508, 50410.0208, 50793.7886, 51190.2456, 51583.1882, 51971.0796, 52376.5338, 52763.319,
-                    53165.5534, 53556.5594, 53948.2702, 54346.352, 54748.7914, 55138.577, 55543.4824, 55941.1748,
-                    56333.7746, 56745.1552, 57142.7944, 57545.2236, 57935.9956, 58348.5268, 58737.5474, 59158.5962,
-                    59542.6896, 59958.8004, 60349.3788, 60755.0212, 61147.6144, 61548.194, 61946.0696, 62348.6042,
-                    62763.603, 63162.781, 63560.635, 63974.3482, 64366.4908, 64771.5876, 65176.7346, 65597.3916,
-                    65995.915, 66394.0384, 66822.9396, 67203.6336, 67612.2032, 68019.0078, 68420.0388, 68821.22,
-                    69235.8388, 69640.0724, 70055.155, 70466.357, 70863.4266, 71276.2482, 71677.0306, 72080.2006,
-                    72493.0214, 72893.5952, 73314.5856, 73714.9852, 74125.3022, 74521.2122, 74933.6814, 75341.5904,
-                    75743.0244, 76166.0278, 76572.1322, 76973.1028, 77381.6284, 77800.6092, 78189.328, 78607.0962,
-                    79012.2508, 79407.8358, 79825.725, 80238.701, 80646.891, 81035.6436, 81460.0448, 81876.3884, },
+            { 11817.475, 12015.0046, 12215.3792, 12417.7504, 12623.1814, 12830.0086, 13040.0072, 13252.503, 13466.178, 13683.2738, 13902.0344, 14123.9798, 14347.394, 14573.7784, 14802.6894, 15033.6824, 15266.9134, 15502.8624, 15741.4944, 15980.7956, 16223.8916, 16468.6316, 16715.733, 16965.5726, 17217.204, 17470.666, 17727.8516, 17986.7886, 18247.6902, 18510.9632, 18775.304, 19044.7486, 19314.4408, 19587.202, 19862.2576, 20135.924, 20417.0324, 20697.9788, 20979.6112, 21265.0274, 21550.723, 21841.6906, 22132.162, 22428.1406, 22722.127, 23020.5606, 23319.7394, 23620.4014, 23925.2728, 24226.9224, 24535.581, 24845.505, 25155.9618, 25470.3828, 25785.9702, 26103.7764, 26420.4132, 26742.0186, 27062.8852, 27388.415, 27714.6024, 28042.296, 28365.4494, 28701.1526, 29031.8008, 29364.2156, 29704.497, 30037.1458, 30380.111, 30723.8168, 31059.5114, 31404.9498, 31751.6752, 32095.2686, 32444.7792, 32794.767, 33145.204, 33498.4226, 33847.6502, 34209.006, 34560.849, 34919.4838, 35274.9778, 35635.1322
 ,
+                    35996.3266, 36359.1394, 36722.8266, 37082.8516, 37447.7354, 37815.9606, 38191.0692, 38559.4106, 38924.8112, 39294.6726, 39663.973, 40042.261, 40416.2036, 40779.2036, 41161.6436, 41540.9014, 41921.1998, 42294.7698, 42678.5264, 43061.3464, 43432.375, 43818.432, 44198.6598, 44583.0138, 44970.4794, 45353.924, 45729.858, 46118.2224, 46511.5724, 46900.7386, 47280.6964, 47668.1472, 48055.6796, 48446.9436, 48838.7146, 49217.7296, 49613.7796, 50010.7508, 50410.0208, 50793.7886, 51190.2456, 51583.1882, 51971.0796, 52376.5338, 52763.319, 53165.5534, 53556.5594, 53948.2702, 54346.352, 54748.7914, 55138.577, 55543.4824, 55941.1748, 56333.7746, 56745.1552, 57142.7944, 57545.2236, 57935.9956, 58348.5268, 58737.5474, 59158.5962, 59542.6896, 59958.8004, 60349.3788, 60755.0212, 61147.6144, 61548.194, 61946.0696, 62348.6042, 62763.603, 63162.781, 63560.635, 63974.3482, 64366.4908, 64771.5876, 65176.7346, 65597.3916, 65995.915, 66394.0384, 66822.9396, 67203.6336, 67612.2032,
+                    68019.0078, 68420.0388, 68821.22, 69235.8388, 69640.0724, 70055.155, 70466.357, 70863.4266, 71276.2482, 71677.0306, 72080.2006, 72493.0214, 72893.5952, 73314.5856, 73714.9852, 74125.3022, 74521.2122, 74933.6814, 75341.5904, 75743.0244, 76166.0278, 76572.1322, 76973.1028, 77381.6284, 77800.6092, 78189.328, 78607.0962, 79012.2508, 79407.8358, 79825.725, 80238.701, 80646.891, 81035.6436, 81460.0448, 81876.3884, },
             // precision 15
-            { 23635.0036, 24030.8034, 24431.4744, 24837.1524, 25246.7928, 25661.326, 26081.3532, 26505.2806, 26933.9892,
-                    27367.7098, 27805.318, 28248.799, 28696.4382, 29148.8244, 29605.5138, 30066.8668, 30534.2344,
-                    31006.32, 31480.778, 31962.2418, 32447.3324, 32938.0232, 33432.731, 33930.728, 34433.9896,
-                    34944.1402, 35457.5588, 35974.5958, 36497.3296, 37021.9096, 37554.326, 38088.0826, 38628.8816,
-                    39171.3192, 39723.2326, 40274.5554, 40832.3142, 41390.613, 41959.5908, 42532.5466, 43102.0344,
-                    43683.5072, 44266.694, 44851.2822, 45440.7862, 46038.0586, 46640.3164, 47241.064, 47846.155,
-                    48454.7396, 49076.9168, 49692.542, 50317.4778, 50939.65, 51572.5596, 52210.2906, 52843.7396,
-                    53481.3996, 54127.236, 54770.406, 55422.6598, 56078.7958, 56736.7174, 57397.6784, 58064.5784,
-                    58730.308, 59404.9784, 60077.0864, 60751.9158, 61444.1386, 62115.817, 62808.7742, 63501.4774,
-                    64187.5454, 64883.6622, 65582.7468, 66274.5318, 66976.9276, 67688.7764, 68402.138, 69109.6274,
-                    69822.9706, 70543.6108, 71265.5202, 71983.3848, 72708.4656, 73433.384, 74158.4664, 74896.4868,
-                    75620.9564, 76362.1434, 77098.3204, 77835.7662, 78582.6114, 79323.9902, 80067.8658, 80814.9246,
-                    81567.0136, 82310.8536, 83061.9952, 83821.4096, 84580.8608, 85335.547, 86092.5802, 86851.6506,
-                    87612.311, 88381.2016, 89146.3296, 89907.8974, 90676.846, 91451.4152, 92224.5518, 92995.8686,
-                    93763.5066, 94551.2796, 95315.1944, 96096.1806, 96881.0918, 97665.679, 98442.68, 99229.3002,
-                    100011.0994, 100790.6386, 101580.1564, 102377.7484, 103152.1392, 103944.2712, 104730.216,
-                    105528.6336, 106324.9398, 107117.6706, 107890.3988, 108695.2266, 109485.238, 110294.7876,
-                    111075.0958, 111878.0496, 112695.2864, 113464.5486, 114270.0474, 115068.608, 115884.3626,
-                    116673.2588, 117483.3716, 118275.097, 119085.4092, 119879.2808, 120687.5868, 121499.9944,
-                    122284.916, 123095.9254, 123912.5038, 124709.0454, 125503.7182, 126323.259, 127138.9412,
-                    127943.8294, 128755.646, 129556.5354, 130375.3298, 131161.4734, 131971.1962, 132787.5458,
-                    133588.1056, 134431.351, 135220.2906, 136023.398, 136846.6558, 137667.0004, 138463.663, 139283.7154,
-                    140074.6146, 140901.3072, 141721.8548, 142543.2322, 143356.1096, 144173.7412, 144973.0948,
-                    145794.3162, 146609.5714, 147420.003, 148237.9784, 149050.5696, 149854.761, 150663.1966,
-                    151494.0754, 152313.1416, 153112.6902, 153935.7206, 154746.9262, 155559.547, 156401.9746,
-                    157228.7036, 158008.7254, 158820.75, 159646.9184, 160470.4458, 161279.5348, 162093.3114, 162918.542,
-                    163729.2842, },
+            { 23635.0036, 24030.8034, 24431.4744, 24837.1524, 25246.7928, 25661.326, 26081.3532, 26505.2806, 26933.9892, 27367.7098, 27805.318, 28248.799, 28696.4382, 29148.8244, 29605.5138, 30066.8668, 30534.2344, 31006.32, 31480.778, 31962.2418, 32447.3324, 32938.0232, 33432.731, 33930.728, 34433.9896, 34944.1402, 35457.5588, 35974.5958, 36497.3296, 37021.9096, 37554.326, 38088.0826, 38628.8816, 39171.3192, 39723.2326, 40274.5554, 40832.3142, 41390.613, 41959.5908, 42532.5466, 43102.0344, 43683.5072, 44266.694, 44851.2822, 45440.7862, 46038.0586, 46640.3164, 47241.064, 47846.155, 48454.7396, 49076.9168, 49692.542, 50317.4778, 50939.65, 51572.5596, 52210.2906, 52843.7396, 53481.3996, 54127.236, 54770.406, 55422.6598, 56078.7958, 56736.7174, 57397.6784, 58064.5784, 58730.308, 59404.9784, 60077.0864, 60751.9158, 61444.1386, 62115.817, 62808.7742, 63501.4774, 64187.5454, 64883.6622, 65582.7468, 66274.5318, 66976.9276, 67688.7764, 68402.138, 69109.6274, 69822.9706, 70543.6108,
+                    71265.5202, 71983.3848, 72708.4656, 73433.384, 74158.4664, 74896.4868, 75620.9564, 76362.1434, 77098.3204, 77835.7662, 78582.6114, 79323.9902, 80067.8658, 80814.9246, 81567.0136, 82310.8536, 83061.9952, 83821.4096, 84580.8608, 85335.547, 86092.5802, 86851.6506, 87612.311, 88381.2016, 89146.3296, 89907.8974, 90676.846, 91451.4152, 92224.5518, 92995.8686, 93763.5066, 94551.2796, 95315.1944, 96096.1806, 96881.0918, 97665.679, 98442.68, 99229.3002, 100011.0994, 100790.6386, 101580.1564, 102377.7484, 103152.1392, 103944.2712, 104730.216, 105528.6336, 106324.9398, 107117.6706, 107890.3988, 108695.2266, 109485.238, 110294.7876, 111075.0958, 111878.0496, 112695.2864, 113464.5486, 114270.0474, 115068.608, 115884.3626, 116673.2588, 117483.3716, 118275.097, 119085.4092, 119879.2808, 120687.5868, 121499.9944, 122284.916, 123095.9254, 123912.5038, 124709.0454, 125503.7182, 126323.259, 127138.9412, 127943.8294, 128755.646, 129556.5354, 130375.3298, 131161.4734, 131971.1962,
+                    132787.5458, 133588.1056, 134431.351, 135220.2906, 136023.398, 136846.6558, 137667.0004, 138463.663, 139283.7154, 140074.6146, 140901.3072, 141721.8548, 142543.2322, 143356.1096, 144173.7412, 144973.0948, 145794.3162, 146609.5714, 147420.003, 148237.9784, 149050.5696, 149854.761, 150663.1966, 151494.0754, 152313.1416, 153112.6902, 153935.7206, 154746.9262, 155559.547, 156401.9746, 157228.7036, 158008.7254, 158820.75, 159646.9184, 160470.4458, 161279.5348, 162093.3114, 162918.542, 163729.2842, },
             // precision 16
-            { 47271, 48062.3584, 48862.7074, 49673.152, 50492.8416, 51322.9514, 52161.03, 53009.407, 53867.6348,
-                    54734.206, 55610.5144, 56496.2096, 57390.795, 58297.268, 59210.6448, 60134.665, 61068.0248,
-                    62010.4472, 62962.5204, 63923.5742, 64895.0194, 65876.4182, 66862.6136, 67862.6968, 68868.8908,
-                    69882.8544, 70911.271, 71944.0924, 72990.0326, 74040.692, 75100.6336, 76174.7826, 77252.5998,
-                    78340.2974, 79438.2572, 80545.4976, 81657.2796, 82784.6336, 83915.515, 85059.7362, 86205.9368,
-                    87364.4424, 88530.3358, 89707.3744, 90885.9638, 92080.197, 93275.5738, 94479.391, 95695.918,
-                    96919.2236, 98148.4602, 99382.3474, 100625.6974, 101878.0284, 103141.6278, 104409.4588, 105686.2882,
-                    106967.5402, 108261.6032, 109548.1578, 110852.0728, 112162.231, 113479.0072, 114806.2626,
-                    116137.9072, 117469.5048, 118813.5186, 120165.4876, 121516.2556, 122875.766, 124250.5444,
-                    125621.2222, 127003.2352, 128387.848, 129775.2644, 131181.7776, 132577.3086, 133979.9458,
-                    135394.1132, 136800.9078, 138233.217, 139668.5308, 141085.212, 142535.2122, 143969.0684,
-                    145420.2872, 146878.1542, 148332.7572, 149800.3202, 151269.66, 152743.6104, 154213.0948, 155690.288,
-                    157169.4246, 158672.1756, 160160.059, 161650.6854, 163145.7772, 164645.6726, 166159.1952,
-                    167682.1578, 169177.3328, 170700.0118, 172228.8964, 173732.6664, 175265.5556, 176787.799,
-                    178317.111, 179856.6914, 181400.865, 182943.4612, 184486.742, 186033.4698, 187583.7886, 189148.1868,
-                    190688.4526, 192250.1926, 193810.9042, 195354.2972, 196938.7682, 198493.5898, 200079.2824,
-                    201618.912, 203205.5492, 204765.5798, 206356.1124, 207929.3064, 209498.7196, 211086.229,
-                    212675.1324, 214256.7892, 215826.2392, 217412.8474, 218995.6724, 220618.6038, 222207.1166,
-                    223781.0364, 225387.4332, 227005.7928, 228590.4336, 230217.8738, 231805.1054, 233408.9, 234995.3432,
-                    236601.4956, 238190.7904, 239817.2548, 241411.2832, 243002.4066, 244640.1884, 246255.3128,
-                    247849.3508, 249479.9734, 251106.8822, 252705.027, 254332.9242, 255935.129, 257526.9014, 259154.772,
-                    260777.625, 262390.253, 264004.4906, 265643.59, 267255.4076, 268873.426, 270470.7252, 272106.4804,
-                    273722.4456, 275337.794, 276945.7038, 278592.9154, 280204.3726, 281841.1606, 283489.171,
-                    285130.1716, 286735.3362, 288364.7164, 289961.1814, 291595.5524, 293285.683, 294899.6668,
-                    296499.3434, 298128.0462, 299761.8946, 301394.2424, 302997.6748, 304615.1478, 306269.7724,
-                    307886.114, 309543.1028, 311153.2862, 312782.8546, 314421.2008, 316033.2438, 317692.9636,
-                    319305.2648, 320948.7406, 322566.3364, 324228.4224, 325847.1542, },
+            { 47271, 48062.3584, 48862.7074, 49673.152, 50492.8416, 51322.9514, 52161.03, 53009.407, 53867.6348, 54734.206, 55610.5144, 56496.2096, 57390.795, 58297.268, 59210.6448, 60134.665, 61068.0248, 62010.4472, 62962.5204, 63923.5742, 64895.0194, 65876.4182, 66862.6136, 67862.6968, 68868.8908, 69882.8544, 70911.271, 71944.0924, 72990.0326, 74040.692, 75100.6336, 76174.7826, 77252.5998, 78340.2974, 79438.2572, 80545.4976, 81657.2796, 82784.6336, 83915.515, 85059.7362, 86205.9368, 87364.4424, 88530.3358, 89707.3744, 90885.9638, 92080.197, 93275.5738, 94479.391, 95695.918, 96919.2236, 98148.4602, 99382.3474, 100625.6974, 101878.0284, 103141.6278, 104409.4588, 105686.2882, 106967.5402, 108261.6032, 109548.1578, 110852.0728, 112162.231, 113479.0072, 114806.2626, 116137.9072, 117469.5048, 118813.5186, 120165.4876, 121516.2556, 122875.766, 124250.5444, 125621.2222, 127003.2352, 128387.848, 129775.2644, 131181.7776, 132577.3086, 133979.9458, 135394.1132, 136800.9078, 138233.217,
+                    139668.5308, 141085.212, 142535.2122, 143969.0684, 145420.2872, 146878.1542, 148332.7572, 149800.3202, 151269.66, 152743.6104, 154213.0948, 155690.288, 157169.4246, 158672.1756, 160160.059, 161650.6854, 163145.7772, 164645.6726, 166159.1952, 167682.1578, 169177.3328, 170700.0118, 172228.8964, 173732.6664, 175265.5556, 176787.799, 178317.111, 179856.6914, 181400.865, 182943.4612, 184486.742, 186033.4698, 187583.7886, 189148.1868, 190688.4526, 192250.1926, 193810.9042, 195354.2972, 196938.7682, 198493.5898, 200079.2824, 201618.912, 203205.5492, 204765.5798, 206356.1124, 207929.3064, 209498.7196, 211086.229, 212675.1324, 214256.7892, 215826.2392, 217412.8474, 218995.6724, 220618.6038, 222207.1166, 223781.0364, 225387.4332, 227005.7928, 228590.4336, 230217.8738, 231805.1054, 233408.9, 234995.3432, 236601.4956, 238190.7904, 239817.2548, 241411.2832, 243002.4066, 244640.1884, 246255.3128, 247849.3508, 249479.9734, 251106.8822, 252705.027, 254332.9242, 255935.129,
+                    257526.9014, 259154.772, 260777.625, 262390.253, 264004.4906, 265643.59, 267255.4076, 268873.426, 270470.7252, 272106.4804, 273722.4456, 275337.794, 276945.7038, 278592.9154, 280204.3726, 281841.1606, 283489.171, 285130.1716, 286735.3362, 288364.7164, 289961.1814, 291595.5524, 293285.683, 294899.6668, 296499.3434, 298128.0462, 299761.8946, 301394.2424, 302997.6748, 304615.1478, 306269.7724, 307886.114, 309543.1028, 311153.2862, 312782.8546, 314421.2008, 316033.2438, 317692.9636, 319305.2648, 320948.7406, 322566.3364, 324228.4224, 325847.1542, },
             // precision 17
-            { 94542, 96125.811, 97728.019, 99348.558, 100987.9705, 102646.7565, 104324.5125, 106021.7435, 107736.7865,
-                    109469.272, 111223.9465, 112995.219, 114787.432, 116593.152, 118422.71, 120267.2345, 122134.6765,
-                    124020.937, 125927.2705, 127851.255, 129788.9485, 131751.016, 133726.8225, 135722.592, 137736.789,
-                    139770.568, 141821.518, 143891.343, 145982.1415, 148095.387, 150207.526, 152355.649, 154515.6415,
-                    156696.05, 158887.7575, 161098.159, 163329.852, 165569.053, 167837.4005, 170121.6165, 172420.4595,
-                    174732.6265, 177062.77, 179412.502, 181774.035, 184151.939, 186551.6895, 188965.691, 191402.8095,
-                    193857.949, 196305.0775, 198774.6715, 201271.2585, 203764.78, 206299.3695, 208818.1365, 211373.115,
-                    213946.7465, 216532.076, 219105.541, 221714.5375, 224337.5135, 226977.5125, 229613.0655,
-                    232270.2685, 234952.2065, 237645.3555, 240331.1925, 243034.517, 245756.0725, 248517.6865,
-                    251232.737, 254011.3955, 256785.995, 259556.44, 262368.335, 265156.911, 267965.266, 270785.583,
-                    273616.0495, 276487.4835, 279346.639, 282202.509, 285074.3885, 287942.2855, 290856.018, 293774.0345,
-                    296678.5145, 299603.6355, 302552.6575, 305492.9785, 308466.8605, 311392.581, 314347.538,
-                    317319.4295, 320285.9785, 323301.7325, 326298.3235, 329301.3105, 332301.987, 335309.791, 338370.762,
-                    341382.923, 344431.1265, 347464.1545, 350507.28, 353619.2345, 356631.2005, 359685.203, 362776.7845,
-                    365886.488, 368958.2255, 372060.6825, 375165.4335, 378237.935, 381328.311, 384430.5225, 387576.425,
-                    390683.242, 393839.648, 396977.8425, 400101.9805, 403271.296, 406409.8425, 409529.5485, 412678.7,
-                    415847.423, 419020.8035, 422157.081, 425337.749, 428479.6165, 431700.902, 434893.1915, 438049.582,
-                    441210.5415, 444379.2545, 447577.356, 450741.931, 453959.548, 457137.0935, 460329.846, 463537.4815,
-                    466732.3345, 469960.5615, 473164.681, 476347.6345, 479496.173, 482813.1645, 486025.6995,
-                    489249.4885, 492460.1945, 495675.8805, 498908.0075, 502131.802, 505374.3855, 508550.9915,
-                    511806.7305, 515026.776, 518217.0005, 521523.9855, 524705.9855, 527950.997, 531210.0265, 534472.497,
-                    537750.7315, 540926.922, 544207.094, 547429.4345, 550666.3745, 553975.3475, 557150.7185,
-                    560399.6165, 563662.697, 566916.7395, 570146.1215, 573447.425, 576689.6245, 579874.5745, 583202.337,
-                    586503.0255, 589715.635, 592910.161, 596214.3885, 599488.035, 602740.92, 605983.0685, 609248.67,
-                    612491.3605, 615787.912, 619107.5245, 622307.9555, 625577.333, 628840.4385, 632085.2155,
-                    635317.6135, 638691.7195, 641887.467, 645139.9405, 648441.546, 651666.252, 654941.845, },
+            { 94542, 96125.811, 97728.019, 99348.558, 100987.9705, 102646.7565, 104324.5125, 106021.7435, 107736.7865, 109469.272, 111223.9465, 112995.219, 114787.432, 116593.152, 118422.71, 120267.2345, 122134.6765, 124020.937, 125927.2705, 127851.255, 129788.9485, 131751.016, 133726.8225, 135722.592, 137736.789, 139770.568, 141821.518, 143891.343, 145982.1415, 148095.387, 150207.526, 152355.649, 154515.6415, 156696.05, 158887.7575, 161098.159, 163329.852, 165569.053, 167837.4005, 170121.6165, 172420.4595, 174732.6265, 177062.77, 179412.502, 181774.035, 184151.939, 186551.6895, 188965.691, 191402.8095, 193857.949, 196305.0775, 198774.6715, 201271.2585, 203764.78, 206299.3695, 208818.1365, 211373.115, 213946.7465, 216532.076, 219105.541, 221714.5375, 224337.5135, 226977.5125, 229613.0655, 232270.2685, 234952.2065, 237645.3555, 240331.1925, 243034.517, 245756.0725, 248517.6865, 251232.737, 254011.3955, 256785.995, 259556.44, 262368.335, 265156.911, 267965.266, 270785.583, 273616.0495
 ,
+                    276487.4835, 279346.639, 282202.509, 285074.3885, 287942.2855, 290856.018, 293774.0345, 296678.5145, 299603.6355, 302552.6575, 305492.9785, 308466.8605, 311392.581, 314347.538, 317319.4295, 320285.9785, 323301.7325, 326298.3235, 329301.3105, 332301.987, 335309.791, 338370.762, 341382.923, 344431.1265, 347464.1545, 350507.28, 353619.2345, 356631.2005, 359685.203, 362776.7845, 365886.488, 368958.2255, 372060.6825, 375165.4335, 378237.935, 381328.311, 384430.5225, 387576.425, 390683.242, 393839.648, 396977.8425, 400101.9805, 403271.296, 406409.8425, 409529.5485, 412678.7, 415847.423, 419020.8035, 422157.081, 425337.749, 428479.6165, 431700.902, 434893.1915, 438049.582, 441210.5415, 444379.2545, 447577.356, 450741.931, 453959.548, 457137.0935, 460329.846, 463537.4815, 466732.3345, 469960.5615, 473164.681, 476347.6345, 479496.173, 482813.1645, 486025.6995, 489249.4885, 492460.1945, 495675.8805, 498908.0075, 502131.802, 505374.3855, 508550.9915, 511806.7305, 515026.776
 ,
+                    518217.0005, 521523.9855, 524705.9855, 527950.997, 531210.0265, 534472.497, 537750.7315, 540926.922, 544207.094, 547429.4345, 550666.3745, 553975.3475, 557150.7185, 560399.6165, 563662.697, 566916.7395, 570146.1215, 573447.425, 576689.6245, 579874.5745, 583202.337, 586503.0255, 589715.635, 592910.161, 596214.3885, 599488.035, 602740.92, 605983.0685, 609248.67, 612491.3605, 615787.912, 619107.5245, 622307.9555, 625577.333, 628840.4385, 632085.2155, 635317.6135, 638691.7195, 641887.467, 645139.9405, 648441.546, 651666.252, 654941.845, },
             // precision 18
-            { 189084, 192250.913, 195456.774, 198696.946, 201977.762, 205294.444, 208651.754, 212042.099, 215472.269,
-                    218941.91, 222443.912, 225996.845, 229568.199, 233193.568, 236844.457, 240543.233, 244279.475,
-                    248044.27, 251854.588, 255693.2, 259583.619, 263494.621, 267445.385, 271454.061, 275468.769,
-                    279549.456, 283646.446, 287788.198, 291966.099, 296181.164, 300431.469, 304718.618, 309024.004,
-                    313393.508, 317760.803, 322209.731, 326675.061, 331160.627, 335654.47, 340241.442, 344841.833,
-                    349467.132, 354130.629, 358819.432, 363574.626, 368296.587, 373118.482, 377914.93, 382782.301,
-                    387680.669, 392601.981, 397544.323, 402529.115, 407546.018, 412593.658, 417638.657, 422762.865,
-                    427886.169, 433017.167, 438213.273, 443441.254, 448692.421, 453937.533, 459239.049, 464529.569,
-                    469910.083, 475274.03, 480684.473, 486070.26, 491515.237, 496995.651, 502476.617, 507973.609,
-                    513497.19, 519083.233, 524726.509, 530305.505, 535945.728, 541584.404, 547274.055, 552967.236,
-                    558667.862, 564360.216, 570128.148, 575965.08, 581701.952, 587532.523, 593361.144, 599246.128,
-                    605033.418, 610958.779, 616837.117, 622772.818, 628672.04, 634675.369, 640574.831, 646585.739,
-                    652574.547, 658611.217, 664642.684, 670713.914, 676737.681, 682797.313, 688837.897, 694917.874,
-                    701009.882, 707173.648, 713257.254, 719415.392, 725636.761, 731710.697, 737906.209, 744103.074,
-                    750313.39, 756504.185, 762712.579, 768876.985, 775167.859, 781359, 787615.959, 793863.597,
-                    800245.477, 806464.582, 812785.294, 819005.925, 825403.057, 831676.197, 837936.284, 844266.968,
-                    850642.711, 856959.756, 863322.774, 869699.931, 876102.478, 882355.787, 888694.463, 895159.952,
-                    901536.143, 907872.631, 914293.672, 920615.14, 927130.974, 933409.404, 939922.178, 946331.47,
-                    952745.93, 959209.264, 965590.224, 972077.284, 978501.961, 984953.19, 991413.271, 997817.479,
-                    1004222.658, 1010725.676, 1017177.138, 1023612.529, 1030098.236, 1036493.719, 1043112.207,
-                    1049537.036, 1056008.096, 1062476.184, 1068942.337, 1075524.95, 1081932.864, 1088426.025,
-                    1094776.005, 1101327.448, 1107901.673, 1114423.639, 1120884.602, 1127324.923, 1133794.24,
-                    1140328.886, 1146849.376, 1153346.682, 1159836.502, 1166478.703, 1172953.304, 1179391.502,
-                    1185950.982, 1192544.052, 1198913.41, 1205430.994, 1212015.525, 1218674.042, 1225121.683,
-                    1231551.101, 1238126.379, 1244673.795, 1251260.649, 1257697.86, 1264320.983, 1270736.319,
-                    1277274.694, 1283804.95, 1290211.514, 1296858.568, 1303455.691, } };
+            { 189084, 192250.913, 195456.774, 198696.946, 201977.762, 205294.444, 208651.754, 212042.099, 215472.269, 218941.91, 222443.912, 225996.845, 229568.199, 233193.568, 236844.457, 240543.233, 244279.475, 248044.27, 251854.588, 255693.2, 259583.619, 263494.621, 267445.385, 271454.061, 275468.769, 279549.456, 283646.446, 287788.198, 291966.099, 296181.164, 300431.469, 304718.618, 309024.004, 313393.508, 317760.803, 322209.731, 326675.061, 331160.627, 335654.47, 340241.442, 344841.833, 349467.132, 354130.629, 358819.432, 363574.626, 368296.587, 373118.482, 377914.93, 382782.301, 387680.669, 392601.981, 397544.323, 402529.115, 407546.018, 412593.658, 417638.657, 422762.865, 427886.169, 433017.167, 438213.273, 443441.254, 448692.421, 453937.533, 459239.049, 464529.569, 469910.083, 475274.03, 480684.473, 486070.26, 491515.237, 496995.651, 502476.617, 507973.609, 513497.19, 519083.233, 524726.509, 530305.505, 535945.728, 541584.404, 547274.055, 552967.236, 558667.862, 564360.216,
+                    570128.148, 575965.08, 581701.952, 587532.523, 593361.144, 599246.128, 605033.418, 610958.779, 616837.117, 622772.818, 628672.04, 634675.369, 640574.831, 646585.739, 652574.547, 658611.217, 664642.684, 670713.914, 676737.681, 682797.313, 688837.897, 694917.874, 701009.882, 707173.648, 713257.254, 719415.392, 725636.761, 731710.697, 737906.209, 744103.074, 750313.39, 756504.185, 762712.579, 768876.985, 775167.859, 781359, 787615.959, 793863.597, 800245.477, 806464.582, 812785.294, 819005.925, 825403.057, 831676.197, 837936.284, 844266.968, 850642.711, 856959.756, 863322.774, 869699.931, 876102.478, 882355.787, 888694.463, 895159.952, 901536.143, 907872.631, 914293.672, 920615.14, 927130.974, 933409.404, 939922.178, 946331.47, 952745.93, 959209.264, 965590.224, 972077.284, 978501.961, 984953.19, 991413.271, 997817.479, 1004222.658, 1010725.676, 1017177.138, 1023612.529, 1030098.236, 1036493.719, 1043112.207, 1049537.036, 1056008.096, 1062476.184, 1068942.337,
+                    1075524.95, 1081932.864, 1088426.025, 1094776.005, 1101327.448, 1107901.673, 1114423.639, 1120884.602, 1127324.923, 1133794.24, 1140328.886, 1146849.376, 1153346.682, 1159836.502, 1166478.703, 1172953.304, 1179391.502, 1185950.982, 1192544.052, 1198913.41, 1205430.994, 1212015.525, 1218674.042, 1225121.683, 1231551.101, 1238126.379, 1244673.795, 1251260.649, 1257697.86, 1264320.983, 1270736.319, 1277274.694, 1283804.95, 1290211.514, 1296858.568, 1303455.691, } };
 
     public static final double[][] biasData = {
             // precision 4
-            { 10, 9.717, 9.207, 8.7896, 8.2882, 7.8204, 7.3772, 6.9342, 6.5202, 6.161, 5.7722, 5.4636, 5.0396, 4.6766,
-                    4.3566, 4.0454, 3.7936, 3.4856, 3.2666, 2.9946, 2.766, 2.4692, 2.3638, 2.0764, 1.7864, 1.7602,
-                    1.4814, 1.433, 1.2926, 1.0664, 0.999600000000001, 0.7956, 0.5366, 0.589399999999998,
-                    0.573799999999999, 0.269799999999996, 0.368200000000002, 0.0544000000000011, 0.234200000000001,
-                    0.0108000000000033, -0.203400000000002, -0.0701999999999998, -0.129600000000003, -0.364199999999997,
-                    -0.480600000000003, -0.226999999999997, -0.322800000000001, -0.382599999999996, -0.511200000000002,
-                    -0.669600000000003, -0.749400000000001, -0.500399999999999, -0.617600000000003, -0.6922,
-                    -0.601599999999998, -0.416200000000003, -0.338200000000001, -0.782600000000002, -0.648600000000002,
-                    -0.919800000000002, -0.851799999999997, -0.962400000000002, -0.6402, -1.1922, -1.0256, -1.086,
-                    -1.21899999999999, -0.819400000000002, -0.940600000000003, -1.1554, -1.2072, -1.1752,
-                    -1.16759999999999, -1.14019999999999, -1.3754, -1.29859999999999, -1.607, -1.3292, -1.7606, },
+            { 10, 9.717, 9.207, 8.7896, 8.2882, 7.8204, 7.3772, 6.9342, 6.5202, 6.161, 5.7722, 5.4636, 5.0396, 4.6766, 4.3566, 4.0454, 3.7936, 3.4856, 3.2666, 2.9946, 2.766, 2.4692, 2.3638, 2.0764, 1.7864, 1.7602, 1.4814, 1.433, 1.2926, 1.0664, 0.999600000000001, 0.7956, 0.5366, 0.589399999999998, 0.573799999999999, 0.269799999999996, 0.368200000000002, 0.0544000000000011, 0.234200000000001, 0.0108000000000033, -0.203400000000002, -0.0701999999999998, -0.129600000000003, -0.364199999999997, -0.480600000000003, -0.226999999999997, -0.322800000000001, -0.382599999999996, -0.511200000000002, -0.669600000000003, -0.749400000000001, -0.500399999999999, -0.617600000000003, -0.6922, -0.601599999999998, -0.416200000000003, -0.338200000000001, -0.782600000000002, -0.648600000000002, -0.919800000000002, -0.851799999999997, -0.962400000000002, -0.6402, -1.1922, -1.0256, -1.086, -1.21899999999999, -0.819400000000002, -0.940600000000003, -1.1554, -1.2072, -1.1752, -1.16759999999999,
+                    -1.14019999999999, -1.3754, -1.29859999999999, -1.607, -1.3292, -1.7606, },
             // precision 5
-            { 22, 21.1194, 20.8208, 20.2318, 19.77, 19.2436, 18.7774, 18.2848, 17.8224, 17.3742, 16.9336, 16.503,
-                    16.0494, 15.6292, 15.2124, 14.798, 14.367, 13.9728, 13.5944, 13.217, 12.8438, 12.3696, 12.0956,
-                    11.7044, 11.324, 11.0668, 10.6698, 10.3644, 10.049, 9.6918, 9.4146, 9.082, 8.687, 8.5398, 8.2462,
-                    7.857, 7.6606, 7.4168, 7.1248, 6.9222, 6.6804, 6.447, 6.3454, 5.9594, 5.7636, 5.5776, 5.331, 5.19,
-                    4.9676, 4.7564, 4.5314, 4.4442, 4.3708, 3.9774, 3.9624, 3.8796, 3.755, 3.472, 3.2076, 3.1024,
-                    2.8908, 2.7338, 2.7728, 2.629, 2.413, 2.3266, 2.1524, 2.2642, 2.1806, 2.0566, 1.9192, 1.7598,
-                    1.3516, 1.5802, 1.43859999999999, 1.49160000000001, 1.1524, 1.1892, 0.841399999999993,
-                    0.879800000000003, 0.837599999999995, 0.469800000000006, 0.765600000000006, 0.331000000000003,
-                    0.591399999999993, 0.601200000000006, 0.701599999999999, 0.558199999999999, 0.339399999999998,
-                    0.354399999999998, 0.491200000000006, 0.308000000000007, 0.355199999999996, -0.0254000000000048,
-                    0.205200000000005, -0.272999999999996, 0.132199999999997, 0.394400000000005, -0.241200000000006,
-                    0.242000000000004, 0.191400000000002, 0.253799999999998, -0.122399999999999, -0.370800000000003,
-                    0.193200000000004, -0.0848000000000013, 0.0867999999999967, -0.327200000000005, -0.285600000000002,
-                    0.311400000000006, -0.128399999999999, -0.754999999999995, -0.209199999999996, -0.293599999999998,
-                    -0.364000000000004, -0.253600000000006, -0.821200000000005, -0.253600000000006, -0.510400000000004,
-                    -0.383399999999995, -0.491799999999998, -0.220200000000006, -0.0972000000000008, -0.557400000000001,
-                    -0.114599999999996, -0.295000000000002, -0.534800000000004, 0.346399999999988, -0.65379999999999,
-                    0.0398000000000138, 0.0341999999999985, -0.995800000000003, -0.523400000000009, -0.489000000000004,
-                    -0.274799999999999, -0.574999999999989, -0.482799999999997, 0.0571999999999946, -0.330600000000004,
-                    -0.628800000000012, -0.140199999999993, -0.540600000000012, -0.445999999999998, -0.599400000000003,
-                    -0.262599999999992, 0.163399999999996, -0.100599999999986, -0.39500000000001, -1.06960000000001,
-                    -0.836399999999998, -0.753199999999993, -0.412399999999991, -0.790400000000005, -0.29679999999999,
-                    -0.28540000000001, -0.193000000000012, -0.0772000000000048, -0.962799999999987,
-                    -0.414800000000014, },
+            { 22, 21.1194, 20.8208, 20.2318, 19.77, 19.2436, 18.7774, 18.2848, 17.8224, 17.3742, 16.9336, 16.503, 16.0494, 15.6292, 15.2124, 14.798, 14.367, 13.9728, 13.5944, 13.217, 12.8438, 12.3696, 12.0956, 11.7044, 11.324, 11.0668, 10.6698, 10.3644, 10.049, 9.6918, 9.4146, 9.082, 8.687, 8.5398, 8.2462, 7.857, 7.6606, 7.4168, 7.1248, 6.9222, 6.6804, 6.447, 6.3454, 5.9594, 5.7636, 5.5776, 5.331, 5.19, 4.9676, 4.7564, 4.5314, 4.4442, 4.3708, 3.9774, 3.9624, 3.8796, 3.755, 3.472, 3.2076, 3.1024, 2.8908, 2.7338, 2.7728, 2.629, 2.413, 2.3266, 2.1524, 2.2642, 2.1806, 2.0566, 1.9192, 1.7598, 1.3516, 1.5802, 1.43859999999999, 1.49160000000001, 1.1524, 1.1892, 0.841399999999993, 0.879800000000003, 0.837599999999995, 0.469800000000006, 0.765600000000006, 0.331000000000003, 0.591399999999993, 0.601200000000006, 0.701599999999999, 0.558199999999999, 0.339399999999998, 0.354399999999998, 0.491200000000006, 0.308000000000007, 0.355199999999996, -0.0254000000000048, 0.205200000000005,
+                    -0.272999999999996, 0.132199999999997, 0.394400000000005, -0.241200000000006, 0.242000000000004, 0.191400000000002, 0.253799999999998, -0.122399999999999, -0.370800000000003, 0.193200000000004, -0.0848000000000013, 0.0867999999999967, -0.327200000000005, -0.285600000000002, 0.311400000000006, -0.128399999999999, -0.754999999999995, -0.209199999999996, -0.293599999999998, -0.364000000000004, -0.253600000000006, -0.821200000000005, -0.253600000000006, -0.510400000000004, -0.383399999999995, -0.491799999999998, -0.220200000000006, -0.0972000000000008, -0.557400000000001, -0.114599999999996, -0.295000000000002, -0.534800000000004, 0.346399999999988, -0.65379999999999, 0.0398000000000138, 0.0341999999999985, -0.995800000000003, -0.523400000000009, -0.489000000000004, -0.274799999999999, -0.574999999999989, -0.482799999999997, 0.0571999999999946, -0.330600000000004, -0.628800000000012, -0.140199999999993, -0.540600000000012, -0.445999999999998, -0.599400000000003,
+                    -0.262599999999992, 0.163399999999996, -0.100599999999986, -0.39500000000001, -1.06960000000001, -0.836399999999998, -0.753199999999993, -0.412399999999991, -0.790400000000005, -0.29679999999999, -0.28540000000001, -0.193000000000012, -0.0772000000000048, -0.962799999999987, -0.414800000000014, },
             // precision 6
-            { 45, 44.1902, 43.271, 42.8358, 41.8142, 41.2854, 40.317, 39.354, 38.8924, 37.9436, 37.4596, 36.5262,
-                    35.6248, 35.1574, 34.2822, 33.837, 32.9636, 32.074, 31.7042, 30.7976, 30.4772, 29.6564, 28.7942,
-                    28.5004, 27.686, 27.291, 26.5672, 25.8556, 25.4982, 24.8204, 24.4252, 23.7744, 23.0786, 22.8344,
-                    22.0294, 21.8098, 21.0794, 20.5732, 20.1878, 19.5648, 19.2902, 18.6784, 18.3352, 17.8946, 17.3712,
-                    17.0852, 16.499, 16.2686, 15.6844, 15.2234, 14.9732, 14.3356, 14.2286, 13.7262, 13.3284, 13.1048,
-                    12.5962, 12.3562, 12.1272, 11.4184, 11.4974, 11.0822, 10.856, 10.48, 10.2834, 10.0208, 9.637,
-                    9.51739999999999, 9.05759999999999, 8.74760000000001, 8.42700000000001, 8.1326, 8.2372, 8.2788,
-                    7.6776, 7.79259999999999, 7.1952, 6.9564, 6.6454, 6.87, 6.5428, 6.19999999999999, 6.02940000000001,
-                    5.62780000000001, 5.6782, 5.792, 5.35159999999999, 5.28319999999999, 5.0394, 5.07480000000001,
-                    4.49119999999999, 4.84899999999999, 4.696, 4.54040000000001, 4.07300000000001, 4.37139999999999,
-                    3.7216, 3.7328, 3.42080000000001, 3.41839999999999, 3.94239999999999, 3.27719999999999, 3.411,
-                    3.13079999999999, 2.76900000000001, 2.92580000000001, 2.68279999999999, 2.75020000000001,
-                    2.70599999999999, 2.3886, 3.01859999999999, 2.45179999999999, 2.92699999999999, 2.41720000000001,
-                    2.41139999999999, 2.03299999999999, 2.51240000000001, 2.5564, 2.60079999999999, 2.41720000000001,
-                    1.80439999999999, 1.99700000000001, 2.45480000000001, 1.8948, 2.2346, 2.30860000000001,
-                    2.15479999999999, 1.88419999999999, 1.6508, 0.677199999999999, 1.72540000000001, 1.4752,
-                    1.72280000000001, 1.66139999999999, 1.16759999999999, 1.79300000000001, 1.00059999999999,
-                    0.905200000000008, 0.659999999999997, 1.55879999999999, 1.1636, 0.688199999999995,
-                    0.712600000000009, 0.450199999999995, 1.1978, 0.975599999999986, 0.165400000000005, 1.727,
-                    1.19739999999999, -0.252600000000001, 1.13460000000001, 1.3048, 1.19479999999999, 0.313400000000001,
-                    0.878999999999991, 1.12039999999999, 0.853000000000009, 1.67920000000001, 0.856999999999999,
-                    0.448599999999999, 1.2362, 0.953399999999988, 1.02859999999998, 0.563199999999995,
-                    0.663000000000011, 0.723000000000013, 0.756599999999992, 0.256599999999992, -0.837600000000009,
-                    0.620000000000005, 0.821599999999989, 0.216600000000028, 0.205600000000004, 0.220199999999977,
-                    0.372599999999977, 0.334400000000016, 0.928400000000011, 0.972800000000007, 0.192400000000021,
-                    0.487199999999973, -0.413000000000011, 0.807000000000016, 0.120600000000024, 0.769000000000005,
-                    0.870799999999974, 0.66500000000002, 0.118200000000002, 0.401200000000017, 0.635199999999998,
-                    0.135400000000004, 0.175599999999974, 1.16059999999999, 0.34620000000001, 0.521400000000028,
-                    -0.586599999999976, -1.16480000000001, 0.968399999999974, 0.836999999999989, 0.779600000000016,
-                    0.985799999999983, },
+            { 45, 44.1902, 43.271, 42.8358, 41.8142, 41.2854, 40.317, 39.354, 38.8924, 37.9436, 37.4596, 36.5262, 35.6248, 35.1574, 34.2822, 33.837, 32.9636, 32.074, 31.7042, 30.7976, 30.4772, 29.6564, 28.7942, 28.5004, 27.686, 27.291, 26.5672, 25.8556, 25.4982, 24.8204, 24.4252, 23.7744, 23.0786, 22.8344, 22.0294, 21.8098, 21.0794, 20.5732, 20.1878, 19.5648, 19.2902, 18.6784, 18.3352, 17.8946, 17.3712, 17.0852, 16.499, 16.2686, 15.6844, 15.2234, 14.9732, 14.3356, 14.2286, 13.7262, 13.3284, 13.1048, 12.5962, 12.3562, 12.1272, 11.4184, 11.4974, 11.0822, 10.856, 10.48, 10.2834, 10.0208, 9.637, 9.51739999999999, 9.05759999999999, 8.74760000000001, 8.42700000000001, 8.1326, 8.2372, 8.2788, 7.6776, 7.79259999999999, 7.1952, 6.9564, 6.6454, 6.87, 6.5428, 6.19999999999999, 6.02940000000001, 5.62780000000001, 5.6782, 5.792, 5.35159999999999, 5.28319999999999, 5.0394, 5.07480000000001, 4.49119999999999, 4.84899999999999, 4.696, 4.54040000000001, 4.07300000000001, 4.37139999999999, 3.7216,
+                    3.7328, 3.42080000000001, 3.41839999999999, 3.94239999999999, 3.27719999999999, 3.411, 3.13079999999999, 2.76900000000001, 2.92580000000001, 2.68279999999999, 2.75020000000001, 2.70599999999999, 2.3886, 3.01859999999999, 2.45179999999999, 2.92699999999999, 2.41720000000001, 2.41139999999999, 2.03299999999999, 2.51240000000001, 2.5564, 2.60079999999999, 2.41720000000001, 1.80439999999999, 1.99700000000001, 2.45480000000001, 1.8948, 2.2346, 2.30860000000001, 2.15479999999999, 1.88419999999999, 1.6508, 0.677199999999999, 1.72540000000001, 1.4752, 1.72280000000001, 1.66139999999999, 1.16759999999999, 1.79300000000001, 1.00059999999999, 0.905200000000008, 0.659999999999997, 1.55879999999999, 1.1636, 0.688199999999995, 0.712600000000009, 0.450199999999995, 1.1978, 0.975599999999986, 0.165400000000005, 1.727, 1.19739999999999, -0.252600000000001, 1.13460000000001, 1.3048, 1.19479999999999, 0.313400000000001, 0.878999999999991, 1.12039999999999, 0.853000000000009,
+                    1.67920000000001, 0.856999999999999, 0.448599999999999, 1.2362, 0.953399999999988, 1.02859999999998, 0.563199999999995, 0.663000000000011, 0.723000000000013, 0.756599999999992, 0.256599999999992, -0.837600000000009, 0.620000000000005, 0.821599999999989, 0.216600000000028, 0.205600000000004, 0.220199999999977, 0.372599999999977, 0.334400000000016, 0.928400000000011, 0.972800000000007, 0.192400000000021, 0.487199999999973, -0.413000000000011, 0.807000000000016, 0.120600000000024, 0.769000000000005, 0.870799999999974, 0.66500000000002, 0.118200000000002, 0.401200000000017, 0.635199999999998, 0.135400000000004, 0.175599999999974, 1.16059999999999, 0.34620000000001, 0.521400000000028, -0.586599999999976, -1.16480000000001, 0.968399999999974, 0.836999999999989, 0.779600000000016, 0.985799999999983, },
             // precision 7
-            { 91, 89.4934, 87.9758, 86.4574, 84.9718, 83.4954, 81.5302, 80.0756, 78.6374, 77.1782, 75.7888, 73.9522,
-                    72.592, 71.2532, 69.9086, 68.5938, 66.9474, 65.6796, 64.4394, 63.2176, 61.9768, 60.4214, 59.2528,
-                    58.0102, 56.8658, 55.7278, 54.3044, 53.1316, 52.093, 51.0032, 49.9092, 48.6306, 47.5294, 46.5756,
-                    45.6508, 44.662, 43.552, 42.3724, 41.617, 40.5754, 39.7872, 38.8444, 37.7988, 36.8606, 36.2118,
-                    35.3566, 34.4476, 33.5882, 32.6816, 32.0824, 31.0258, 30.6048, 29.4436, 28.7274, 27.957, 27.147,
-                    26.4364, 25.7592, 25.3386, 24.781, 23.8028, 23.656, 22.6544, 21.996, 21.4718, 21.1544, 20.6098,
-                    19.5956, 19.0616, 18.5758, 18.4878, 17.5244, 17.2146, 16.724, 15.8722, 15.5198, 15.0414, 14.941,
-                    14.9048, 13.87, 13.4304, 13.028, 12.4708, 12.37, 12.0624, 11.4668, 11.5532, 11.4352, 11.2564,
-                    10.2744, 10.2118, 9.74720000000002, 10.1456, 9.2928, 8.75040000000001, 8.55279999999999,
-                    8.97899999999998, 8.21019999999999, 8.18340000000001, 7.3494, 7.32499999999999, 7.66140000000001,
-                    6.90300000000002, 7.25439999999998, 6.9042, 7.21499999999997, 6.28640000000001, 6.08139999999997,
-                    6.6764, 6.30099999999999, 5.13900000000001, 5.65800000000002, 5.17320000000001, 4.59019999999998,
-                    4.9538, 5.08280000000002, 4.92200000000003, 4.99020000000002, 4.7328, 5.4538, 4.11360000000002,
-                    4.22340000000003, 4.08780000000002, 3.70800000000003, 4.15559999999999, 4.18520000000001,
-                    3.63720000000001, 3.68220000000002, 3.77960000000002, 3.6078, 2.49160000000001, 3.13099999999997,
-                    2.5376, 3.19880000000001, 3.21100000000001, 2.4502, 3.52820000000003, 2.91199999999998,
-                    3.04480000000001, 2.7432, 2.85239999999999, 2.79880000000003, 2.78579999999999, 1.88679999999999,
-                    2.98860000000002, 2.50639999999999, 1.91239999999999, 2.66160000000002, 2.46820000000002,
-                    1.58199999999999, 1.30399999999997, 2.27379999999999, 2.68939999999998, 1.32900000000001,
-                    3.10599999999999, 1.69080000000002, 2.13740000000001, 2.53219999999999, 1.88479999999998,
-                    1.33240000000001, 1.45119999999997, 1.17899999999997, 2.44119999999998, 1.60659999999996,
-                    2.16700000000003, 0.77940000000001, 2.37900000000002, 2.06700000000001, 1.46000000000004,
-                    2.91160000000002, 1.69200000000001, 0.954600000000028, 2.49300000000005, 2.2722, 1.33500000000004,
-                    2.44899999999996, 1.20140000000004, 3.07380000000001, 2.09739999999999, 2.85640000000001,
-                    2.29960000000005, 2.40899999999999, 1.97040000000004, 0.809799999999996, 1.65279999999996,
-                    2.59979999999996, 0.95799999999997, 2.06799999999998, 2.32780000000002, 4.20159999999998,
-                    1.96320000000003, 1.86400000000003, 1.42999999999995, 3.77940000000001, 1.27200000000005,
-                    1.86440000000005, 2.20600000000002, 3.21900000000005, 1.5154, 2.61019999999996, },
+            { 91, 89.4934, 87.9758, 86.4574, 84.9718, 83.4954, 81.5302, 80.0756, 78.6374, 77.1782, 75.7888, 73.9522, 72.592, 71.2532, 69.9086, 68.5938, 66.9474, 65.6796, 64.4394, 63.2176, 61.9768, 60.4214, 59.2528, 58.0102, 56.8658, 55.7278, 54.3044, 53.1316, 52.093, 51.0032, 49.9092, 48.6306, 47.5294, 46.5756, 45.6508, 44.662, 43.552, 42.3724, 41.617, 40.5754, 39.7872, 38.8444, 37.7988, 36.8606, 36.2118, 35.3566, 34.4476, 33.5882, 32.6816, 32.0824, 31.0258, 30.6048, 29.4436, 28.7274, 27.957, 27.147, 26.4364, 25.7592, 25.3386, 24.781, 23.8028, 23.656, 22.6544, 21.996, 21.4718, 21.1544, 20.6098, 19.5956, 19.0616, 18.5758, 18.4878, 17.5244, 17.2146, 16.724, 15.8722, 15.5198, 15.0414, 14.941, 14.9048, 13.87, 13.4304, 13.028, 12.4708, 12.37, 12.0624, 11.4668, 11.5532, 11.4352, 11.2564, 10.2744, 10.2118, 9.747200000

<TRUNCATED>

[50/67] [abbrv] kylin git commit: KYLIN-2535 Use ResourceStore to manage ACL and saved queries

Posted by li...@apache.org.
KYLIN-2535 Use ResourceStore to manage ACL and saved queries


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/afaa95a0
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/afaa95a0
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/afaa95a0

Branch: refs/heads/master
Commit: afaa95a099d69105198b73fbf92622975dbf766c
Parents: 0523823
Author: Yang Li <li...@apache.org>
Authored: Tue May 30 00:40:50 2017 +0800
Committer: nichunen <zj...@sjtu.org>
Committed: Tue May 30 13:20:11 2017 +0800

----------------------------------------------------------------------
 .../apache/kylin/common/KylinConfigBase.java    |  32 +-
 .../kylin/common/persistence/ResourceStore.java |   7 +-
 .../apache/kylin/common/util/ZooKeeperUtil.java |  59 +++
 .../common/persistence/ResourceStoreTest.java   |   3 +-
 .../hbase/ITAclTableMigrationToolTest.java      |  15 +-
 .../storage/hbase/ITHBaseResourceStoreTest.java |   8 +-
 .../rest/job/StorageCleanJobHbaseUtil.java      | 127 +++++++
 .../kylin/rest/job/StorageCleanupJob.java       | 113 +-----
 .../apache/kylin/rest/security/AclConstant.java |  47 +++
 .../kylin/rest/security/AclHBaseStorage.java    |  42 ---
 .../rest/security/MockAclHBaseStorage.java      |  83 -----
 .../rest/security/RealAclHBaseStorage.java      |  71 ----
 .../rest/service/AclTableMigrationTool.java     |  48 +--
 .../apache/kylin/rest/service/CubeService.java  |  32 +-
 .../kylin/rest/service/HBaseInfoUtil.java       |  58 +++
 .../kylin/rest/service/LegacyAclService.java    | 368 -------------------
 .../kylin/rest/service/LegacyUserService.java   | 237 ------------
 .../apache/kylin/rest/service/QueryService.java | 142 +++----
 .../src/main/resources/applicationContext.xml   |   7 -
 .../kylin/storage/hbase/HBaseResourceStore.java |  26 +-
 .../storage/hbase/util/ZookeeperJobLock.java    |   5 +-
 .../kylin/storage/hbase/util/ZookeeperUtil.java |  32 +-
 .../kylin/storage/hdfs/HDFSResourceStore.java   |   7 +-
 .../org/apache/kylin/tool/DiagnosisInfoCLI.java |  10 +-
 24 files changed, 500 insertions(+), 1079 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 7f366d8..77c2987 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.lock.DistributedLockFactory;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.CliCommandExecutor;
+import org.apache.kylin.common.util.ZooKeeperUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -188,16 +189,16 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     private String cachedHdfsWorkingDirectory;
-    
+
     public String getHdfsWorkingDirectory() {
         if (cachedHdfsWorkingDirectory != null)
             return cachedHdfsWorkingDirectory;
-        
+
         String root = getRequired("kylin.env.hdfs-working-dir");
         Path path = new Path(root);
         if (!path.isAbsolute())
             throw new IllegalArgumentException("kylin.env.hdfs-working-dir must be absolute, but got " + root);
-        
+
         // make sure path is qualified
         try {
             FileSystem fs = path.getFileSystem(new Configuration());
@@ -205,19 +206,34 @@ abstract public class KylinConfigBase implements Serializable {
         } catch (IOException e) {
             throw new RuntimeException(e);
         }
-        
+
         // append metadata-url prefix
         root = new Path(path, StringUtils.replaceChars(getMetadataUrlPrefix(), ':', '-')).toString();
-        
+
         if (!root.endsWith("/"))
             root += "/";
-        
+
         cachedHdfsWorkingDirectory = root;
         if (cachedHdfsWorkingDirectory.startsWith("file:")) {
             cachedHdfsWorkingDirectory = cachedHdfsWorkingDirectory.replace("file:", "file://");
         }
         return cachedHdfsWorkingDirectory;
     }
+    
+    /**
+     * A comma separated list of host:port pairs, each corresponding to a ZooKeeper server
+     */
+    public String getZookeeperConnectString() {
+        String str = getOptional("kylin.env.zookeeper-connect-string");
+        if (str != null)
+            return str;
+        
+        str = ZooKeeperUtil.getZKConnectStringFromHBase();
+        if (str != null)
+            return str;
+        
+        throw new RuntimeException("Please set 'kylin.env.zookeeper-connect-string' in kylin.properties");
+    }
 
     // ============================================================================
     // METADATA
@@ -962,13 +978,13 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public boolean isAdhocEnabled() {
-        return StringUtils.isNotEmpty(getAdHocRunnerClassName()); 
+        return StringUtils.isNotEmpty(getAdHocRunnerClassName());
     }
 
     public String getAdHocRunnerClassName() {
         return getOptional("kylin.query.ad-hoc.runner.class-name", "");
     }
-    
+
     public String getAdHocConverterClassName() {
         return getOptional("kylin.query.ad-hoc.converter.class-name", "org.apache.kylin.storage.adhoc.HiveAdhocConverter");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
index 7fb93e7..8a84968 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
@@ -119,6 +119,9 @@ abstract public class ResourceStore {
         return listResourcesImpl(path);
     }
 
+    /**
+     * return null if given path is not a folder or not exists
+     */
     abstract protected NavigableSet<String> listResourcesImpl(String folderPath) throws IOException;
 
     public String createMetaStoreUUID() throws IOException {
@@ -201,6 +204,9 @@ abstract public class ResourceStore {
         }
     }
 
+    /**
+     * return empty list if given path is not a folder or not exists
+     */
     abstract protected List<RawResource> getAllResourcesImpl(String folderPath, long timeStart, long timeEndExclusive) throws IOException;
 
     /**
@@ -452,5 +458,4 @@ abstract public class ResourceStore {
 
         return metaDirURI;
     }
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/core-common/src/main/java/org/apache/kylin/common/util/ZooKeeperUtil.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/ZooKeeperUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/ZooKeeperUtil.java
new file mode 100644
index 0000000..66fed31
--- /dev/null
+++ b/core-common/src/main/java/org/apache/kylin/common/util/ZooKeeperUtil.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.common.util;
+
+import java.util.Arrays;
+
+import javax.annotation.Nullable;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Iterables;
+
+/**
+ * Use reflection to get zookeeper connect string from HBase configuration.
+ */
+public class ZooKeeperUtil {
+    private static final Logger logger = LoggerFactory.getLogger(ZooKeeperUtil.class);
+
+    public static String getZKConnectStringFromHBase() {
+        Configuration hconf = null;
+        try {
+            Class<? extends Object> hbaseConnClz = ClassUtil.forName("org.apache.kylin.storage.hbase.HBaseConnection", Object.class);
+            hconf = (Configuration) hbaseConnClz.getMethod("getCurrentHBaseConfiguration").invoke(null);
+        } catch (Throwable ex) {
+            logger.warn("Failed to get zookeeper connect string from HBase configuration", ex);
+            return null;
+        }
+        
+        final String serverList = hconf.get("hbase.zookeeper.quorum");
+        final String port = hconf.get("hbase.zookeeper.property.clientPort");
+        return StringUtils.join(Iterables.transform(Arrays.asList(serverList.split(",")), new Function<String, String>() {
+            @Nullable
+            @Override
+            public String apply(String input) {
+                return input + ":" + port;
+            }
+        }), ",");
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/core-common/src/test/java/org/apache/kylin/common/persistence/ResourceStoreTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/persistence/ResourceStoreTest.java b/core-common/src/test/java/org/apache/kylin/common/persistence/ResourceStoreTest.java
index 91a9dfd..f183e7c 100644
--- a/core-common/src/test/java/org/apache/kylin/common/persistence/ResourceStoreTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/persistence/ResourceStoreTest.java
@@ -55,7 +55,8 @@ public class ResourceStoreTest {
     }
 
     public static String mockUrl(String tag, KylinConfig kylinConfig) {
-        return kylinConfig.getMetadataUrlPrefix() + "@" + tag;
+        String str = kylinConfig.getMetadataUrlPrefix() + "@" + tag;
+        return str;
     }
 
     private static void testAStore(ResourceStore store) throws IOException {

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITAclTableMigrationToolTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITAclTableMigrationToolTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITAclTableMigrationToolTest.java
index 65d5b52..2cb671e 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITAclTableMigrationToolTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITAclTableMigrationToolTest.java
@@ -41,10 +41,9 @@ import org.apache.kylin.common.persistence.ResourceStoreTest;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
 import org.apache.kylin.common.util.Pair;
-import org.apache.kylin.rest.security.AclHBaseStorage;
+import org.apache.kylin.rest.security.AclConstant;
 import org.apache.kylin.rest.service.AclService;
 import org.apache.kylin.rest.service.AclTableMigrationTool;
-import org.apache.kylin.rest.service.LegacyUserService;
 import org.apache.kylin.rest.service.UserGrantedAuthority;
 import org.apache.kylin.rest.service.UserService;
 import org.apache.kylin.rest.util.Serializer;
@@ -67,9 +66,9 @@ public class ITAclTableMigrationToolTest extends HBaseMetadataTestCase {
 
     private Logger logger = LoggerFactory.getLogger(ITAclTableMigrationToolTest.class);
 
-    private TableName aclTable = TableName.valueOf(STORE_WITH_OLD_TABLE + AclHBaseStorage.ACL_TABLE_NAME);
+    private TableName aclTable = TableName.valueOf(STORE_WITH_OLD_TABLE + AclConstant.ACL_TABLE_NAME);
 
-    private TableName userTable = TableName.valueOf(STORE_WITH_OLD_TABLE + AclHBaseStorage.USER_TABLE_NAME);
+    private TableName userTable = TableName.valueOf(STORE_WITH_OLD_TABLE + AclConstant.USER_TABLE_NAME);
 
     private Serializer<UserGrantedAuthority[]> ugaSerializer = new Serializer<UserGrantedAuthority[]>(UserGrantedAuthority[].class);
 
@@ -127,15 +126,15 @@ public class ITAclTableMigrationToolTest extends HBaseMetadataTestCase {
     private void createTestHTables() throws IOException {
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
         Admin hbaseAdmin = new HBaseAdmin(conf);
-        creatTable(hbaseAdmin, conf, aclTable, new String[] { AclHBaseStorage.ACL_INFO_FAMILY, AclHBaseStorage.ACL_ACES_FAMILY });
-        creatTable(hbaseAdmin, conf, userTable, new String[] { AclHBaseStorage.USER_AUTHORITY_FAMILY });
+        creatTable(hbaseAdmin, conf, aclTable, new String[] { AclConstant.ACL_INFO_FAMILY, AclConstant.ACL_ACES_FAMILY });
+        creatTable(hbaseAdmin, conf, userTable, new String[] { AclConstant.USER_AUTHORITY_FAMILY });
     }
 
     private void addRecordsToTable() throws Exception {
         Table htable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(userTable);
         Pair<byte[], byte[]> pair = getRandomUserRecord();
         Put put = new Put(pair.getKey());
-        put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
+        put.addColumn(Bytes.toBytes(AclConstant.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclConstant.USER_AUTHORITY_COLUMN), pair.getSecond());
         htable.put(put);
     }
 
@@ -196,7 +195,7 @@ public class ITAclTableMigrationToolTest extends HBaseMetadataTestCase {
         UserGrantedAuthority[] serializing = new UserGrantedAuthority[authorities.size() + 1];
 
         // password is stored as the [0] authority
-        serializing[0] = new UserGrantedAuthority(LegacyUserService.PWD_PREFIX + "password");
+        serializing[0] = new UserGrantedAuthority(AclConstant.PWD_PREFIX + "password");
         int i = 1;
         for (GrantedAuthority a : authorities) {
             serializing[i++] = new UserGrantedAuthority(a.getAuthority());

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITHBaseResourceStoreTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITHBaseResourceStoreTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITHBaseResourceStoreTest.java
index fd91397..d879fac 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITHBaseResourceStoreTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hbase/ITHBaseResourceStoreTest.java
@@ -18,6 +18,9 @@
 
 package org.apache.kylin.storage.hbase;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -28,11 +31,10 @@ import org.apache.kylin.common.persistence.StringEntity;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
+@Ignore
 public class ITHBaseResourceStoreTest extends HBaseMetadataTestCase {
 
     private KylinConfig kylinConfig;

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanJobHbaseUtil.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanJobHbaseUtil.java b/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanJobHbaseUtil.java
new file mode 100644
index 0000000..3728ea1
--- /dev/null
+++ b/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanJobHbaseUtil.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.rest.job;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class StorageCleanJobHbaseUtil {
+
+    protected static final Logger logger = LoggerFactory.getLogger(StorageCleanJobHbaseUtil.class);
+
+    public static void cleanUnusedHBaseTables(boolean delete, int deleteTimeout) throws IOException {
+        Configuration conf = HBaseConfiguration.create();
+        CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
+        // get all kylin hbase tables
+        try (HBaseAdmin hbaseAdmin = new HBaseAdmin(conf)) {
+            String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
+            HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
+            List<String> allTablesNeedToBeDropped = new ArrayList<String>();
+            for (HTableDescriptor desc : tableDescriptors) {
+                String host = desc.getValue(IRealizationConstants.HTableTag);
+                if (KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix().equalsIgnoreCase(host)) {
+                    //only take care htables that belongs to self, and created more than 2 days
+                    allTablesNeedToBeDropped.add(desc.getTableName().getNameAsString());
+                }
+            }
+
+            // remove every segment htable from drop list
+            for (CubeInstance cube : cubeMgr.listAllCubes()) {
+                for (CubeSegment seg : cube.getSegments()) {
+                    String tablename = seg.getStorageLocationIdentifier();
+                    if (allTablesNeedToBeDropped.contains(tablename)) {
+                        allTablesNeedToBeDropped.remove(tablename);
+                        logger.info("Exclude table " + tablename + " from drop list, as the table belongs to cube " + cube.getName() + " with status " + cube.getStatus());
+                    }
+                }
+            }
+
+            if (delete == true) {
+                // drop tables
+                ExecutorService executorService = Executors.newSingleThreadExecutor();
+                for (String htableName : allTablesNeedToBeDropped) {
+                    FutureTask futureTask = new FutureTask(new DeleteHTableRunnable(hbaseAdmin, htableName));
+                    executorService.execute(futureTask);
+                    try {
+                        futureTask.get(deleteTimeout, TimeUnit.MINUTES);
+                    } catch (TimeoutException e) {
+                        logger.warn("It fails to delete htable " + htableName + ", for it cost more than " + deleteTimeout + " minutes!");
+                        futureTask.cancel(true);
+                    } catch (Exception e) {
+                        e.printStackTrace();
+                        futureTask.cancel(true);
+                    }
+                }
+                executorService.shutdown();
+            } else {
+                System.out.println("--------------- Tables To Be Dropped ---------------");
+                for (String htableName : allTablesNeedToBeDropped) {
+                    System.out.println(htableName);
+                }
+                System.out.println("----------------------------------------------------");
+            }
+        } catch (IOException e) {
+            throw new IOException(e);
+        }
+    }
+
+    static class DeleteHTableRunnable implements Callable {
+        HBaseAdmin hbaseAdmin;
+        String htableName;
+
+        DeleteHTableRunnable(HBaseAdmin hbaseAdmin, String htableName) {
+            this.hbaseAdmin = hbaseAdmin;
+            this.htableName = htableName;
+        }
+
+        public Object call() throws Exception {
+            logger.info("Deleting HBase table " + htableName);
+            if (hbaseAdmin.tableExists(htableName)) {
+                if (hbaseAdmin.isTableEnabled(htableName)) {
+                    hbaseAdmin.disableTable(htableName);
+                }
+
+                hbaseAdmin.deleteTable(htableName);
+                logger.info("Deleted HBase table " + htableName);
+            } else {
+                logger.info("HBase table" + htableName + " does not exist");
+            }
+            return null;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanupJob.java b/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanupJob.java
index d92107b..448e3c6 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanupJob.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanupJob.java
@@ -19,15 +19,10 @@
 package org.apache.kylin.rest.job;
 
 import java.io.IOException;
+import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.FutureTask;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
 import java.util.regex.Pattern;
 
 import javax.annotation.Nullable;
@@ -39,9 +34,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.CliCommandExecutor;
@@ -56,9 +48,8 @@ import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableManager;
 import org.apache.kylin.job.execution.ExecutableState;
-import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.source.SourceFactory;
 import org.apache.kylin.source.ISourceMetadataExplorer;
+import org.apache.kylin.source.SourceFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -79,55 +70,16 @@ public class StorageCleanupJob extends AbstractApplication {
     protected boolean force = false;
     protected static ExecutableManager executableManager = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv());
 
-    private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
-        CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
-        // get all kylin hbase tables
-        try (HBaseAdmin hbaseAdmin = new HBaseAdmin(conf)) {
-            String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
-            HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
-            List<String> allTablesNeedToBeDropped = new ArrayList<String>();
-            for (HTableDescriptor desc : tableDescriptors) {
-                String host = desc.getValue(IRealizationConstants.HTableTag);
-                if (KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix().equalsIgnoreCase(host)) {
-                    //only take care htables that belongs to self, and created more than 2 days
-                    allTablesNeedToBeDropped.add(desc.getTableName().getNameAsString());
-                }
-            }
-
-            // remove every segment htable from drop list
-            for (CubeInstance cube : cubeMgr.listAllCubes()) {
-                for (CubeSegment seg : cube.getSegments()) {
-                    String tablename = seg.getStorageLocationIdentifier();
-                    if (allTablesNeedToBeDropped.contains(tablename)) {
-                        allTablesNeedToBeDropped.remove(tablename);
-                        logger.info("Exclude table " + tablename + " from drop list, as the table belongs to cube " + cube.getName() + " with status " + cube.getStatus());
-                    }
-                }
-            }
-
-            if (delete == true) {
-                // drop tables
-                ExecutorService executorService = Executors.newSingleThreadExecutor();
-                for (String htableName : allTablesNeedToBeDropped) {
-                    FutureTask futureTask = new FutureTask(new DeleteHTableRunnable(hbaseAdmin, htableName));
-                    executorService.execute(futureTask);
-                    try {
-                        futureTask.get(deleteTimeout, TimeUnit.MINUTES);
-                    } catch (TimeoutException e) {
-                        logger.warn("It fails to delete htable " + htableName + ", for it cost more than " + deleteTimeout + " minutes!");
-                        futureTask.cancel(true);
-                    } catch (Exception e) {
-                        e.printStackTrace();
-                        futureTask.cancel(true);
-                    }
-                }
-                executorService.shutdown();
-            } else {
-                System.out.println("--------------- Tables To Be Dropped ---------------");
-                for (String htableName : allTablesNeedToBeDropped) {
-                    System.out.println(htableName);
-                }
-                System.out.println("----------------------------------------------------");
+    protected void cleanUnusedHBaseTables() throws IOException {
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        if ("hbase".equals(config.getMetadataUrl().getScheme())) {
+            try {
+                // use reflection to isolate NoClassDef errors when HBase is not available
+                Class hbaseCleanUpUtil = Class.forName("org.apache.kylin.rest.job.StorageCleanJobHbaseUtil");
+                Method cleanUnusedHBaseTables = hbaseCleanUpUtil.getDeclaredMethod("cleanUnusedHBaseTables", boolean.class, int.class);
+                cleanUnusedHBaseTables.invoke(hbaseCleanUpUtil, delete, deleteTimeout);
+            } catch (Throwable e) {
+                throw new IOException(e);
             }
         }
     }
@@ -147,41 +99,13 @@ public class StorageCleanupJob extends AbstractApplication {
         logger.info("force option value: '" + optionsHelper.getOptionValue(OPTION_FORCE) + "'");
         delete = Boolean.parseBoolean(optionsHelper.getOptionValue(OPTION_DELETE));
         force = Boolean.parseBoolean(optionsHelper.getOptionValue(OPTION_FORCE));
-
-        Configuration conf = HBaseConfiguration.create();
-
-        cleanUnusedIntermediateHiveTable(conf);
-        cleanUnusedHdfsFiles(conf);
-        cleanUnusedHBaseTables(conf);
-
-    }
-
-    class DeleteHTableRunnable implements Callable {
-        HBaseAdmin hbaseAdmin;
-        String htableName;
-
-        DeleteHTableRunnable(HBaseAdmin hbaseAdmin, String htableName) {
-            this.hbaseAdmin = hbaseAdmin;
-            this.htableName = htableName;
-        }
-
-        public Object call() throws Exception {
-            logger.info("Deleting HBase table " + htableName);
-            if (hbaseAdmin.tableExists(htableName)) {
-                if (hbaseAdmin.isTableEnabled(htableName)) {
-                    hbaseAdmin.disableTable(htableName);
-                }
-
-                hbaseAdmin.deleteTable(htableName);
-                logger.info("Deleted HBase table " + htableName);
-            } else {
-                logger.info("HBase table" + htableName + " does not exist");
-            }
-            return null;
-        }
+        cleanUnusedIntermediateHiveTable();
+        cleanUnusedHdfsFiles();
+        cleanUnusedHBaseTables();
     }
 
-    private void cleanUnusedHdfsFiles(Configuration conf) throws IOException {
+    private void cleanUnusedHdfsFiles() throws IOException {
+        Configuration conf = HadoopUtil.getCurrentConfiguration();
         JobEngineConfig engineConfig = new JobEngineConfig(KylinConfig.getInstanceFromEnv());
         CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
 
@@ -245,7 +169,8 @@ public class StorageCleanupJob extends AbstractApplication {
         }
     }
 
-    private void cleanUnusedIntermediateHiveTable(Configuration conf) throws Exception {
+    private void cleanUnusedIntermediateHiveTable() throws Exception {
+        Configuration conf = HadoopUtil.getCurrentConfiguration();
         final KylinConfig config = KylinConfig.getInstanceFromEnv();
         JobEngineConfig engineConfig = new JobEngineConfig(KylinConfig.getInstanceFromEnv());
         final CliCommandExecutor cmdExec = config.getCliCommandExecutor();

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/security/AclConstant.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AclConstant.java b/server-base/src/main/java/org/apache/kylin/rest/security/AclConstant.java
new file mode 100644
index 0000000..7b959e1
--- /dev/null
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AclConstant.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.rest.security;
+
+/**
+ * Created by xiefan on 17-5-3.
+ */
+public interface AclConstant {
+
+    static final String ACL_INFO_FAMILY = "i";
+
+    static final String ACL_ACES_FAMILY = "a";
+
+    static final String ACL_TABLE_NAME = "_acl";
+
+    static final String USER_AUTHORITY_FAMILY = "a";
+
+    static final String USER_TABLE_NAME = "_user";
+
+    static final String USER_AUTHORITY_COLUMN = "c";
+
+    static String ACL_INFO_FAMILY_TYPE_COLUMN = "t";
+
+    static String ACL_INFO_FAMILY_OWNER_COLUMN = "o";
+
+    static String ACL_INFO_FAMILY_PARENT_COLUMN = "p";
+
+    static String ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN = "i";
+
+    static final String PWD_PREFIX = "PWD:";
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
deleted file mode 100644
index b595c72..0000000
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.rest.security;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hbase.client.Table;
-
-/**
- */
-@Deprecated  //use ResourceStore interface instead.
-public interface AclHBaseStorage {
-
-    String ACL_INFO_FAMILY = "i";
-    String ACL_ACES_FAMILY = "a";
-    String ACL_TABLE_NAME = "_acl";
-
-    String USER_AUTHORITY_FAMILY = "a";
-    String USER_TABLE_NAME = "_user";
-    String USER_AUTHORITY_COLUMN = "c";
-
-    String prepareHBaseTable(Class<?> clazz) throws IOException;
-
-    Table getTable(String tableName) throws IOException;
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
deleted file mode 100644
index ca49641..0000000
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.rest.security;
-
-import java.io.IOException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.StorageURL;
-import org.apache.kylin.rest.service.LegacyAclService;
-import org.apache.kylin.rest.service.LegacyUserService;
-import org.apache.kylin.rest.service.QueryService;
-
-/**
- */
-@Deprecated
-public class MockAclHBaseStorage implements AclHBaseStorage {
-
-    private static final String aclTableName = "MOCK-ACL-TABLE";
-    private static final String userTableName = "MOCK-USER-TABLE";
-
-    private Table mockedAclTable;
-    private Table mockedUserTable;
-    private RealAclHBaseStorage realAcl;
-
-    public MockAclHBaseStorage() {
-        StorageURL metadataUrl = KylinConfig.getInstanceFromEnv().getMetadataUrl();
-        if (metadataUrl.getScheme().endsWith("hbase")) {
-            // hbase must be available since metadata is on it
-            // in this case, let us use a real ACL instead of mockup
-            realAcl = new RealAclHBaseStorage();
-        }
-    }
-
-    @Override
-    public String prepareHBaseTable(Class<?> clazz) throws IOException {
-        if (realAcl != null) {
-            return realAcl.prepareHBaseTable(clazz);
-        }
-
-        if (clazz == LegacyAclService.class) {
-            mockedAclTable = new MockHTable(aclTableName, ACL_INFO_FAMILY, ACL_ACES_FAMILY);
-            return aclTableName;
-        } else if (clazz == LegacyUserService.class) {
-            mockedUserTable = new MockHTable(userTableName, USER_AUTHORITY_FAMILY, QueryService.USER_QUERY_FAMILY);
-            return userTableName;
-        } else {
-            throw new IllegalStateException("prepareHBaseTable for unknown class: " + clazz);
-        }
-    }
-
-    @Override
-    public Table getTable(String tableName) throws IOException {
-        if (realAcl != null) {
-            return realAcl.getTable(tableName);
-        }
-
-        if (StringUtils.equals(tableName, aclTableName)) {
-            return mockedAclTable;
-        } else if (StringUtils.equals(tableName, userTableName)) {
-            return mockedUserTable;
-        } else {
-            throw new IllegalStateException("getTable failed" + tableName);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
deleted file mode 100644
index 98cef3a..0000000
--- a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.rest.security;
-
-import java.io.IOException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.StorageURL;
-import org.apache.kylin.rest.service.LegacyAclService;
-import org.apache.kylin.rest.service.LegacyUserService;
-import org.apache.kylin.rest.service.QueryService;
-import org.apache.kylin.storage.hbase.HBaseConnection;
-
-/**
- */
-@Deprecated
-public class RealAclHBaseStorage implements AclHBaseStorage {
-
-    private StorageURL hbaseUrl;
-    private String aclTableName;
-    private String userTableName;
-
-    @Override
-    public String prepareHBaseTable(Class<?> clazz) throws IOException {
-        KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        hbaseUrl = kylinConfig.getMetadataUrl();
-        String tableNameBase = hbaseUrl.getIdentifier();
-
-        if (clazz == LegacyAclService.class) {
-            aclTableName = tableNameBase + ACL_TABLE_NAME;
-            HBaseConnection.createHTableIfNeeded(hbaseUrl, aclTableName, ACL_INFO_FAMILY, ACL_ACES_FAMILY);
-            return aclTableName;
-        } else if (clazz == LegacyUserService.class) {
-            userTableName = tableNameBase + USER_TABLE_NAME;
-            HBaseConnection.createHTableIfNeeded(hbaseUrl, userTableName, USER_AUTHORITY_FAMILY, QueryService.USER_QUERY_FAMILY);
-            return userTableName;
-        } else {
-            throw new IllegalStateException("prepareHBaseTable for unknown class: " + clazz);
-        }
-    }
-
-    @Override
-    public Table getTable(String tableName) throws IOException {
-        if (StringUtils.equals(tableName, aclTableName)) {
-            return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(aclTableName));
-        } else if (StringUtils.equals(tableName, userTableName)) {
-            return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
-        } else {
-            throw new IllegalStateException("getTable failed" + tableName);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
index d88ca74..e22e0dc 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
@@ -28,19 +28,18 @@ import java.util.Map;
 import java.util.NavigableMap;
 
 import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.StorageURL;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.StringEntity;
 import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.rest.security.AclHBaseStorage;
+import org.apache.kylin.rest.security.AclConstant;
 import org.apache.kylin.rest.util.Serializer;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.HBaseResourceStore;
@@ -74,16 +73,16 @@ public class AclTableMigrationTool {
             }
             logger.info("Start to migrate acl table data");
             ResourceStore store = ResourceStore.getStore(kylinConfig);
-            String userTableName = kylinConfig.getMetadataUrlPrefix() + AclHBaseStorage.USER_TABLE_NAME;
+            String userTableName = kylinConfig.getMetadataUrlPrefix() + AclConstant.USER_TABLE_NAME;
             //System.out.println("user table name : " + userTableName);
-            String aclTableName = kylinConfig.getMetadataUrlPrefix() + AclHBaseStorage.ACL_TABLE_NAME;
+            String aclTableName = kylinConfig.getMetadataUrlPrefix() + AclConstant.ACL_TABLE_NAME;
             if (needMigrateTable(aclTableName, store)) {
                 logger.info("Migrate table : {}", aclTableName);
-                migrate(store, AclHBaseStorage.ACL_TABLE_NAME, kylinConfig);
+                migrate(store, AclConstant.ACL_TABLE_NAME, kylinConfig);
             }
             if (needMigrateTable(userTableName, store)) {
                 logger.info("Migrate table : {}", userTableName);
-                migrate(store, AclHBaseStorage.USER_TABLE_NAME, kylinConfig);
+                migrate(store, AclConstant.USER_TABLE_NAME, kylinConfig);
             }
         }
     }
@@ -95,8 +94,8 @@ public class AclTableMigrationTool {
             return false;
         }
 
-        String userTableName = kylinConfig.getMetadataUrlPrefix() + AclHBaseStorage.USER_TABLE_NAME;
-        String aclTableName = kylinConfig.getMetadataUrlPrefix() + AclHBaseStorage.ACL_TABLE_NAME;
+        String userTableName = kylinConfig.getMetadataUrlPrefix() + AclConstant.USER_TABLE_NAME;
+        String aclTableName = kylinConfig.getMetadataUrlPrefix() + AclConstant.ACL_TABLE_NAME;
         if (needMigrateTable(aclTableName, store) || needMigrateTable(userTableName, store))
             return true;
         return false;
@@ -112,8 +111,8 @@ public class AclTableMigrationTool {
     private void migrate(ResourceStore store, String tableType, KylinConfig kylinConfig) throws IOException {
 
         switch (tableType) {
-        case AclHBaseStorage.ACL_TABLE_NAME:
-            String aclTableName = kylinConfig.getMetadataUrlPrefix() + AclHBaseStorage.ACL_TABLE_NAME;
+        case AclConstant.ACL_TABLE_NAME:
+            String aclTableName = kylinConfig.getMetadataUrlPrefix() + AclConstant.ACL_TABLE_NAME;
             convertToResourceStore(kylinConfig, aclTableName, store, new ResultConverter() {
                 @Override
                 public void convertResult(ResultScanner rs, ResourceStore store) throws IOException {
@@ -135,8 +134,8 @@ public class AclTableMigrationTool {
                 }
             });
             break;
-        case AclHBaseStorage.USER_TABLE_NAME:
-            String userTableName = kylinConfig.getMetadataUrlPrefix() + AclHBaseStorage.USER_TABLE_NAME;
+        case AclConstant.USER_TABLE_NAME:
+            String userTableName = kylinConfig.getMetadataUrlPrefix() + AclConstant.USER_TABLE_NAME;
 
             convertToResourceStore(kylinConfig, userTableName, store, new ResultConverter() {
                 @Override
@@ -162,9 +161,10 @@ public class AclTableMigrationTool {
     }
 
     private boolean checkTableExist(String tableName) throws IOException {
-        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        Admin hbaseAdmin = new HBaseAdmin(conf);
-        return hbaseAdmin.tableExists(TableName.valueOf(tableName));
+        StorageURL metadataUrl = KylinConfig.getInstanceFromEnv().getMetadataUrl();
+        try (Admin admin = HBaseConnection.get(metadataUrl).getAdmin()) {
+            return admin.tableExists(TableName.valueOf(tableName));
+        }
     }
 
     private boolean isTableAlreadyMigrate(ResourceStore store, String tableName) throws IOException {
@@ -189,7 +189,7 @@ public class AclTableMigrationTool {
     }
 
     private DomainObjectInfo getDomainObjectInfoFromRs(Result result) {
-        String type = String.valueOf(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_TYPE_COLUMN)));
+        String type = String.valueOf(result.getValue(Bytes.toBytes(AclConstant.ACL_INFO_FAMILY), Bytes.toBytes(AclConstant.ACL_INFO_FAMILY_TYPE_COLUMN)));
         String id = String.valueOf(result.getRow());
         DomainObjectInfo newInfo = new DomainObjectInfo();
         newInfo.setId(id);
@@ -198,23 +198,23 @@ public class AclTableMigrationTool {
     }
 
     private DomainObjectInfo getParentDomainObjectInfoFromRs(Result result) throws IOException {
-        DomainObjectInfo parentInfo = domainObjSerializer.deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_PARENT_COLUMN)));
+        DomainObjectInfo parentInfo = domainObjSerializer.deserialize(result.getValue(Bytes.toBytes(AclConstant.ACL_INFO_FAMILY), Bytes.toBytes(AclConstant.ACL_INFO_FAMILY_PARENT_COLUMN)));
         return parentInfo;
     }
 
     private boolean getInheriting(Result result) {
-        boolean entriesInheriting = Bytes.toBoolean(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN)));
+        boolean entriesInheriting = Bytes.toBoolean(result.getValue(Bytes.toBytes(AclConstant.ACL_INFO_FAMILY), Bytes.toBytes(AclConstant.ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN)));
         return entriesInheriting;
     }
 
     private SidInfo getOwnerSidInfo(Result result) throws IOException {
-        SidInfo owner = sidSerializer.deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(LegacyAclService.ACL_INFO_FAMILY_OWNER_COLUMN)));
+        SidInfo owner = sidSerializer.deserialize(result.getValue(Bytes.toBytes(AclConstant.ACL_INFO_FAMILY), Bytes.toBytes(AclConstant.ACL_INFO_FAMILY_OWNER_COLUMN)));
         return owner;
     }
 
     private Map<String, AceInfo> getAllAceInfo(Result result) throws IOException {
         Map<String, AceInfo> allAceInfoMap = new HashMap<>();
-        NavigableMap<byte[], byte[]> familyMap = result.getFamilyMap(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY));
+        NavigableMap<byte[], byte[]> familyMap = result.getFamilyMap(Bytes.toBytes(AclConstant.ACL_ACES_FAMILY));
         for (Map.Entry<byte[], byte[]> entry : familyMap.entrySet()) {
             String sid = String.valueOf(entry.getKey());
             AceInfo aceInfo = aceSerializer.deserialize(familyMap.get(entry.getValue()));
@@ -245,7 +245,7 @@ public class AclTableMigrationTool {
 
         String username = Bytes.toString(result.getRow());
 
-        byte[] valueBytes = result.getValue(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
+        byte[] valueBytes = result.getValue(Bytes.toBytes(AclConstant.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclConstant.USER_AUTHORITY_COLUMN));
         UserGrantedAuthority[] deserialized = ugaSerializer.deserialize(valueBytes);
 
         String password = "";
@@ -253,8 +253,8 @@ public class AclTableMigrationTool {
 
         // password is stored at [0] of authorities for backward compatibility
         if (deserialized != null) {
-            if (deserialized.length > 0 && deserialized[0].getAuthority().startsWith(LegacyUserService.PWD_PREFIX)) {
-                password = deserialized[0].getAuthority().substring(LegacyUserService.PWD_PREFIX.length());
+            if (deserialized.length > 0 && deserialized[0].getAuthority().startsWith(AclConstant.PWD_PREFIX)) {
+                password = deserialized[0].getAuthority().substring(AclConstant.PWD_PREFIX.length());
                 authorities = Arrays.asList(deserialized).subList(1, deserialized.length);
             } else {
                 authorities = Arrays.asList(deserialized);

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 22baabe..1eaa31c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -26,12 +26,10 @@ import java.util.Collections;
 import java.util.Date;
 import java.util.EnumSet;
 import java.util.List;
-import java.util.Map;
 import java.util.UUID;
 import java.util.WeakHashMap;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.client.Connection;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
@@ -61,8 +59,6 @@ import org.apache.kylin.rest.request.MetricsRequest;
 import org.apache.kylin.rest.response.HBaseResponse;
 import org.apache.kylin.rest.response.MetricsResponse;
 import org.apache.kylin.rest.security.AclPermission;
-import org.apache.kylin.storage.hbase.HBaseConnection;
-import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -405,33 +401,27 @@ public class CubeService extends BasicService {
      *
      * @param tableName The table name.
      * @return The HBaseResponse object contains table size, region count. null
-     * if error happens.
+     * if error happens
      * @throws IOException Exception when HTable resource is not closed correctly.
      */
     public HBaseResponse getHTableInfo(String tableName) throws IOException {
         if (htableInfoCache.containsKey(tableName)) {
             return htableInfoCache.get(tableName);
         }
-        Connection conn = HBaseConnection.get(this.getConfig().getStorageUrl());
-        HBaseResponse hr = null;
-        long tableSize = 0;
-        int regionCount = 0;
 
-        HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
-        Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
-
-        for (long s : sizeMap.values()) {
-            tableSize += s;
+        HBaseResponse hr = new HBaseResponse();
+        if ("hbase".equals(getConfig().getMetadataUrl().getScheme())) {
+            try {
+                // use reflection to isolate NoClassDef errors when HBase is not available
+                hr = (HBaseResponse) Class.forName("org.apache.kylin.rest.service.HBaseInfoUtil")//
+                        .getMethod("getHBaseInfo", new Class[] { String.class, String.class })//
+                        .invoke(null, new Object[] { tableName, this.getConfig().getStorageUrl() });
+            } catch (Throwable e) {
+                throw new IOException(e);
+            }
         }
 
-        regionCount = sizeMap.size();
-
-        // Set response.
-        hr = new HBaseResponse();
-        hr.setTableSize(tableSize);
-        hr.setRegionCount(regionCount);
         htableInfoCache.put(tableName, hr);
-
         return hr;
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/service/HBaseInfoUtil.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/HBaseInfoUtil.java b/server-base/src/main/java/org/apache/kylin/rest/service/HBaseInfoUtil.java
new file mode 100644
index 0000000..3f0b2b5
--- /dev/null
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/HBaseInfoUtil.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.rest.service;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.rest.response.HBaseResponse;
+import org.apache.kylin.storage.hbase.HBaseConnection;
+import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
+
+/**
+ * Created by xiefan on 17-5-5.
+ */
+public class HBaseInfoUtil {
+    public static HBaseResponse getHBaseInfo(String tableName, KylinConfig config) throws IOException {
+        if (!config.getStorageUrl().getScheme().equals("hbase"))
+            return null;
+        
+        Connection conn = HBaseConnection.get(config.getStorageUrl());
+        HBaseResponse hr = null;
+        long tableSize = 0;
+        int regionCount = 0;
+
+        HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
+        Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
+
+        for (long s : sizeMap.values()) {
+            tableSize += s;
+        }
+
+        regionCount = sizeMap.size();
+
+        // Set response.
+        hr = new HBaseResponse();
+        hr.setTableSize(tableSize);
+        hr.setRegionCount(regionCount);
+        return hr;
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/service/LegacyAclService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/LegacyAclService.java b/server-base/src/main/java/org/apache/kylin/rest/service/LegacyAclService.java
deleted file mode 100644
index 8ab6ebe..0000000
--- a/server-base/src/main/java/org/apache/kylin/rest/service/LegacyAclService.java
+++ /dev/null
@@ -1,368 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.rest.service;
-
-import java.io.IOException;
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-
-import javax.annotation.PostConstruct;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.rest.security.AclHBaseStorage;
-import org.apache.kylin.rest.util.Serializer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.security.acls.domain.AccessControlEntryImpl;
-import org.springframework.security.acls.domain.AclAuthorizationStrategy;
-import org.springframework.security.acls.domain.AclImpl;
-import org.springframework.security.acls.domain.AuditLogger;
-import org.springframework.security.acls.domain.GrantedAuthoritySid;
-import org.springframework.security.acls.domain.ObjectIdentityImpl;
-import org.springframework.security.acls.domain.PermissionFactory;
-import org.springframework.security.acls.domain.PrincipalSid;
-import org.springframework.security.acls.model.AccessControlEntry;
-import org.springframework.security.acls.model.Acl;
-import org.springframework.security.acls.model.AlreadyExistsException;
-import org.springframework.security.acls.model.ChildrenExistException;
-import org.springframework.security.acls.model.MutableAcl;
-import org.springframework.security.acls.model.MutableAclService;
-import org.springframework.security.acls.model.NotFoundException;
-import org.springframework.security.acls.model.ObjectIdentity;
-import org.springframework.security.acls.model.PermissionGrantingStrategy;
-import org.springframework.security.acls.model.Sid;
-import org.springframework.security.core.Authentication;
-import org.springframework.security.core.context.SecurityContextHolder;
-import org.springframework.security.util.FieldUtils;
-import org.springframework.util.Assert;
-
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.databind.JsonMappingException;
-
-/**
- * @author xduo
- */
-//@Component("aclService")
-@Deprecated
-public class LegacyAclService implements MutableAclService {
-
-    private static final Logger logger = LoggerFactory.getLogger(LegacyAclService.class);
-
-    public static String ACL_INFO_FAMILY_TYPE_COLUMN = "t";
-    public static String ACL_INFO_FAMILY_OWNER_COLUMN = "o";
-    public static String ACL_INFO_FAMILY_PARENT_COLUMN = "p";
-    public static String ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN = "i";
-
-    private Serializer<SidInfo> sidSerializer = new Serializer<SidInfo>(SidInfo.class);
-    private Serializer<DomainObjectInfo> domainObjSerializer = new Serializer<DomainObjectInfo>(DomainObjectInfo.class);
-    private Serializer<AceInfo> aceSerializer = new Serializer<AceInfo>(AceInfo.class);
-
-    private String aclTableName = null;
-
-    private final Field fieldAces = FieldUtils.getField(AclImpl.class, "aces");
-
-    private final Field fieldAcl = FieldUtils.getField(AccessControlEntryImpl.class, "acl");
-
-    @Autowired
-    protected PermissionGrantingStrategy permissionGrantingStrategy;
-
-    @Autowired
-    protected PermissionFactory aclPermissionFactory;
-
-    @Autowired
-    protected AclAuthorizationStrategy aclAuthorizationStrategy;
-
-    @Autowired
-    protected AuditLogger auditLogger;
-
-    @Autowired
-    protected AclHBaseStorage aclHBaseStorage;
-
-    public LegacyAclService() throws IOException {
-        fieldAces.setAccessible(true);
-        fieldAcl.setAccessible(true);
-    }
-
-    @PostConstruct
-    public void init() throws IOException {
-        aclTableName = aclHBaseStorage.prepareHBaseTable(LegacyAclService.class);
-    }
-
-    @Override
-    public List<ObjectIdentity> findChildren(ObjectIdentity parentIdentity) {
-        List<ObjectIdentity> oids = new ArrayList<ObjectIdentity>();
-        Table htable = null;
-        try {
-            htable = aclHBaseStorage.getTable(aclTableName);
-
-            Scan scan = new Scan();
-            SingleColumnValueFilter parentFilter = new SingleColumnValueFilter(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), CompareOp.EQUAL, domainObjSerializer.serialize(new DomainObjectInfo(parentIdentity)));
-            parentFilter.setFilterIfMissing(true);
-            scan.setFilter(parentFilter);
-
-            ResultScanner scanner = htable.getScanner(scan);
-            for (Result result = scanner.next(); result != null; result = scanner.next()) {
-                String id = Bytes.toString(result.getRow());
-                String type = Bytes.toString(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN)));
-
-                oids.add(new ObjectIdentityImpl(type, id));
-            }
-        } catch (IOException e) {
-            throw new RuntimeException(e.getMessage(), e);
-        } finally {
-            IOUtils.closeQuietly(htable);
-        }
-
-        return oids;
-    }
-
-    @Override
-    public Acl readAclById(ObjectIdentity object) throws NotFoundException {
-        Map<ObjectIdentity, Acl> aclsMap = readAclsById(Arrays.asList(object), null);
-        //        Assert.isTrue(aclsMap.containsKey(object), "There should have been an Acl entry for ObjectIdentity " + object);
-
-        return aclsMap.get(object);
-    }
-
-    @Override
-    public Acl readAclById(ObjectIdentity object, List<Sid> sids) throws NotFoundException {
-        Map<ObjectIdentity, Acl> aclsMap = readAclsById(Arrays.asList(object), sids);
-        Assert.isTrue(aclsMap.containsKey(object), "There should have been an Acl entry for ObjectIdentity " + object);
-
-        return aclsMap.get(object);
-    }
-
-    @Override
-    public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> objects) throws NotFoundException {
-        return readAclsById(objects, null);
-    }
-
-    @Override
-    public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> oids, List<Sid> sids) throws NotFoundException {
-        Map<ObjectIdentity, Acl> aclMaps = new HashMap<ObjectIdentity, Acl>();
-        Table htable = null;
-        Result result = null;
-        try {
-            htable = aclHBaseStorage.getTable(aclTableName);
-
-            for (ObjectIdentity oid : oids) {
-                result = htable.get(new Get(Bytes.toBytes(String.valueOf(oid.getIdentifier()))));
-
-                if (null != result && !result.isEmpty()) {
-                    SidInfo owner = sidSerializer.deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN)));
-                    Sid ownerSid = (null == owner) ? null : (owner.isPrincipal() ? new PrincipalSid(owner.getSid()) : new GrantedAuthoritySid(owner.getSid()));
-                    boolean entriesInheriting = Bytes.toBoolean(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN)));
-
-                    Acl parentAcl = null;
-                    DomainObjectInfo parentInfo = domainObjSerializer.deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN)));
-                    if (null != parentInfo) {
-                        ObjectIdentity parentObj = new ObjectIdentityImpl(parentInfo.getType(), parentInfo.getId());
-                        parentAcl = readAclById(parentObj, null);
-                    }
-
-                    AclImpl acl = new AclImpl(oid, oid.getIdentifier(), aclAuthorizationStrategy, permissionGrantingStrategy, parentAcl, null, entriesInheriting, ownerSid);
-                    genAces(sids, result, acl);
-
-                    aclMaps.put(oid, acl);
-                } else {
-                    throw new NotFoundException("Unable to find ACL information for object identity '" + oid + "'");
-                }
-            }
-        } catch (IOException e) {
-            throw new RuntimeException(e.getMessage(), e);
-        } finally {
-            IOUtils.closeQuietly(htable);
-        }
-
-        return aclMaps;
-    }
-
-    @Override
-    public MutableAcl createAcl(ObjectIdentity objectIdentity) throws AlreadyExistsException {
-        Acl acl = null;
-
-        try {
-            acl = readAclById(objectIdentity);
-        } catch (NotFoundException e) {
-            //do nothing?
-        }
-        if (null != acl) {
-            throw new AlreadyExistsException("ACL of " + objectIdentity + " exists!");
-        }
-
-        Authentication auth = SecurityContextHolder.getContext().getAuthentication();
-        PrincipalSid sid = new PrincipalSid(auth);
-
-        Table htable = null;
-        try {
-            htable = aclHBaseStorage.getTable(aclTableName);
-
-            Put put = new Put(Bytes.toBytes(String.valueOf(objectIdentity.getIdentifier())));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
-
-            htable.put(put);
-
-            logger.debug("ACL of " + objectIdentity + " created successfully.");
-        } catch (IOException e) {
-            throw new RuntimeException(e.getMessage(), e);
-        } finally {
-            IOUtils.closeQuietly(htable);
-        }
-
-        return (MutableAcl) readAclById(objectIdentity);
-    }
-
-    @Override
-    public void deleteAcl(ObjectIdentity objectIdentity, boolean deleteChildren) throws ChildrenExistException {
-        Table htable = null;
-        try {
-            htable = aclHBaseStorage.getTable(aclTableName);
-
-            Delete delete = new Delete(Bytes.toBytes(String.valueOf(objectIdentity.getIdentifier())));
-
-            List<ObjectIdentity> children = findChildren(objectIdentity);
-            if (!deleteChildren && children.size() > 0) {
-                throw new ChildrenExistException("Children exists for " + objectIdentity);
-            }
-
-            for (ObjectIdentity oid : children) {
-                deleteAcl(oid, deleteChildren);
-            }
-
-            htable.delete(delete);
-
-            logger.debug("ACL of " + objectIdentity + " deleted successfully.");
-        } catch (IOException e) {
-            throw new RuntimeException(e.getMessage(), e);
-        } finally {
-            IOUtils.closeQuietly(htable);
-        }
-    }
-
-    @Override
-    public MutableAcl updateAcl(MutableAcl acl) throws NotFoundException {
-        try {
-            readAclById(acl.getObjectIdentity());
-        } catch (NotFoundException e) {
-            throw e;
-        }
-
-        Table htable = null;
-        try {
-            htable = aclHBaseStorage.getTable(aclTableName);
-
-            Delete delete = new Delete(Bytes.toBytes(String.valueOf(acl.getObjectIdentity().getIdentifier())));
-            delete.deleteFamily(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY));
-            htable.delete(delete);
-
-            Put put = new Put(Bytes.toBytes(String.valueOf(acl.getObjectIdentity().getIdentifier())));
-
-            if (null != acl.getParentAcl()) {
-                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
-            }
-
-            for (AccessControlEntry ace : acl.getEntries()) {
-                AceInfo aceInfo = new AceInfo(ace);
-                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
-            }
-
-            if (!put.isEmpty()) {
-                htable.put(put);
-
-                logger.debug("ACL of " + acl.getObjectIdentity() + " updated successfully.");
-            }
-        } catch (IOException e) {
-            throw new RuntimeException(e.getMessage(), e);
-        } finally {
-            IOUtils.closeQuietly(htable);
-        }
-
-        return (MutableAcl) readAclById(acl.getObjectIdentity());
-    }
-
-    private void genAces(List<Sid> sids, Result result, AclImpl acl) throws JsonParseException, JsonMappingException, IOException {
-        List<AceInfo> aceInfos = new ArrayList<AceInfo>();
-        if (null != sids) {
-            // Just return aces in sids
-            for (Sid sid : sids) {
-                String sidName = null;
-                if (sid instanceof PrincipalSid) {
-                    sidName = ((PrincipalSid) sid).getPrincipal();
-                } else if (sid instanceof GrantedAuthoritySid) {
-                    sidName = ((GrantedAuthoritySid) sid).getGrantedAuthority();
-                }
-
-                AceInfo aceInfo = aceSerializer.deserialize(result.getValue(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(sidName)));
-                if (null != aceInfo) {
-                    aceInfos.add(aceInfo);
-                }
-            }
-        } else {
-            NavigableMap<byte[], byte[]> familyMap = result.getFamilyMap(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY));
-            for (byte[] qualifier : familyMap.keySet()) {
-                AceInfo aceInfo = aceSerializer.deserialize(familyMap.get(qualifier));
-
-                if (null != aceInfo) {
-                    aceInfos.add(aceInfo);
-                }
-            }
-        }
-
-        List<AccessControlEntry> newAces = new ArrayList<AccessControlEntry>();
-        for (int i = 0; i < aceInfos.size(); i++) {
-            AceInfo aceInfo = aceInfos.get(i);
-
-            if (null != aceInfo) {
-                Sid sid = aceInfo.getSidInfo().isPrincipal() ? new PrincipalSid(aceInfo.getSidInfo().getSid()) : new GrantedAuthoritySid(aceInfo.getSidInfo().getSid());
-                AccessControlEntry ace = new AccessControlEntryImpl(Long.valueOf(i), acl, sid, aclPermissionFactory.buildFromMask(aceInfo.getPermissionMask()), true, false, false);
-                newAces.add(ace);
-            }
-        }
-
-        this.setAces(acl, newAces);
-    }
-
-    private void setAces(AclImpl acl, List<AccessControlEntry> aces) {
-        try {
-            fieldAces.set(acl, aces);
-        } catch (IllegalAccessException e) {
-            throw new IllegalStateException("Could not set AclImpl entries", e);
-        }
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/service/LegacyUserService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/LegacyUserService.java b/server-base/src/main/java/org/apache/kylin/rest/service/LegacyUserService.java
deleted file mode 100644
index b8f3700..0000000
--- a/server-base/src/main/java/org/apache/kylin/rest/service/LegacyUserService.java
+++ /dev/null
@@ -1,237 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.rest.service;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-
-import javax.annotation.PostConstruct;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.common.util.Pair;
-import org.apache.kylin.rest.security.AclHBaseStorage;
-import org.apache.kylin.rest.util.Serializer;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.security.core.GrantedAuthority;
-import org.springframework.security.core.userdetails.User;
-import org.springframework.security.core.userdetails.UserDetails;
-import org.springframework.security.core.userdetails.UsernameNotFoundException;
-import org.springframework.security.provisioning.UserDetailsManager;
-
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.JsonMappingException;
-
-/**
- */
-//@Component("userService")
-@Deprecated
-public class LegacyUserService implements UserDetailsManager {
-
-    public static final String PWD_PREFIX = "PWD:";
-
-    private Serializer<UserGrantedAuthority[]> ugaSerializer = new Serializer<UserGrantedAuthority[]>(UserGrantedAuthority[].class);
-
-    private String userTableName = null;
-
-    @Autowired
-    protected AclHBaseStorage aclHBaseStorage;
-
-    @PostConstruct
-    public void init() throws IOException {
-        userTableName = aclHBaseStorage.prepareHBaseTable(LegacyUserService.class);
-    }
-
-    @Override
-    public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
-        Table htable = null;
-        try {
-            htable = aclHBaseStorage.getTable(userTableName);
-
-            Get get = new Get(Bytes.toBytes(username));
-            get.addFamily(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY));
-            Result result = htable.get(get);
-
-            User user = hbaseRowToUser(result);
-            if (user == null)
-                throw new UsernameNotFoundException("User " + username + " not found.");
-
-            return user;
-        } catch (IOException e) {
-            throw new RuntimeException(e.getMessage(), e);
-        } finally {
-            IOUtils.closeQuietly(htable);
-        }
-    }
-
-    private User hbaseRowToUser(Result result) throws JsonParseException, JsonMappingException, IOException {
-        if (null == result || result.isEmpty())
-            return null;
-
-        String username = Bytes.toString(result.getRow());
-
-        byte[] valueBytes = result.getValue(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
-        UserGrantedAuthority[] deserialized = ugaSerializer.deserialize(valueBytes);
-
-        String password = "";
-        List<UserGrantedAuthority> authorities = Collections.emptyList();
-
-        // password is stored at [0] of authorities for backward compatibility
-        if (deserialized != null) {
-            if (deserialized.length > 0 && deserialized[0].getAuthority().startsWith(PWD_PREFIX)) {
-                password = deserialized[0].getAuthority().substring(PWD_PREFIX.length());
-                authorities = Arrays.asList(deserialized).subList(1, deserialized.length);
-            } else {
-                authorities = Arrays.asList(deserialized);
-            }
-        }
-
-        return new User(username, password, authorities);
-    }
-
-    private Pair<byte[], byte[]> userToHBaseRow(UserDetails user) throws JsonProcessingException {
-        byte[] key = Bytes.toBytes(user.getUsername());
-
-        Collection<? extends GrantedAuthority> authorities = user.getAuthorities();
-        if (authorities == null)
-            authorities = Collections.emptyList();
-
-        UserGrantedAuthority[] serializing = new UserGrantedAuthority[authorities.size() + 1];
-
-        // password is stored as the [0] authority
-        serializing[0] = new UserGrantedAuthority(PWD_PREFIX + user.getPassword());
-        int i = 1;
-        for (GrantedAuthority a : authorities) {
-            serializing[i++] = new UserGrantedAuthority(a.getAuthority());
-        }
-
-        byte[] value = ugaSerializer.serialize(serializing);
-        return Pair.newPair(key, value);
-    }
-
-    @Override
-    public void createUser(UserDetails user) {
-        updateUser(user);
-    }
-
-    @Override
-    public void updateUser(UserDetails user) {
-        Table htable = null;
-        try {
-            htable = aclHBaseStorage.getTable(userTableName);
-
-            Pair<byte[], byte[]> pair = userToHBaseRow(user);
-            Put put = new Put(pair.getKey());
-
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
-
-            htable.put(put);
-        } catch (IOException e) {
-            throw new RuntimeException(e.getMessage(), e);
-        } finally {
-            IOUtils.closeQuietly(htable);
-        }
-    }
-
-    @Override
-    public void deleteUser(String username) {
-        Table htable = null;
-        try {
-            htable = aclHBaseStorage.getTable(userTableName);
-
-            Delete delete = new Delete(Bytes.toBytes(username));
-
-            htable.delete(delete);
-        } catch (IOException e) {
-            throw new RuntimeException(e.getMessage(), e);
-        } finally {
-            IOUtils.closeQuietly(htable);
-        }
-    }
-
-    @Override
-    public void changePassword(String oldPassword, String newPassword) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public boolean userExists(String username) {
-        Table htable = null;
-        try {
-            htable = aclHBaseStorage.getTable(userTableName);
-
-            Result result = htable.get(new Get(Bytes.toBytes(username)));
-
-            return null != result && !result.isEmpty();
-        } catch (IOException e) {
-            throw new RuntimeException(e.getMessage(), e);
-        } finally {
-            IOUtils.closeQuietly(htable);
-        }
-    }
-
-    public List<String> listUserAuthorities() {
-        List<String> all = new ArrayList<String>();
-        for (UserDetails user : listUsers()) {
-            for (GrantedAuthority auth : user.getAuthorities()) {
-                if (!all.contains(auth.getAuthority())) {
-                    all.add(auth.getAuthority());
-                }
-            }
-        }
-        return all;
-    }
-
-    public List<UserDetails> listUsers() {
-        Scan s = new Scan();
-        s.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
-
-        List<UserDetails> all = new ArrayList<UserDetails>();
-        Table htable = null;
-        ResultScanner scanner = null;
-        try {
-            htable = aclHBaseStorage.getTable(userTableName);
-            scanner = htable.getScanner(s);
-
-            for (Result result = scanner.next(); result != null; result = scanner.next()) {
-                User user = hbaseRowToUser(result);
-                all.add(user);
-            }
-        } catch (IOException e) {
-            throw new RuntimeException("Failed to scan users", e);
-        } finally {
-            IOUtils.closeQuietly(scanner);
-            IOUtils.closeQuietly(htable);
-        }
-        return all;
-    }
-
-}


[23/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/upgrade/entry/CubeMetadataUpgradeEntry_v_1_5_1.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/entry/CubeMetadataUpgradeEntry_v_1_5_1.java b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/entry/CubeMetadataUpgradeEntry_v_1_5_1.java
index f8ed82b..75036c0 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/entry/CubeMetadataUpgradeEntry_v_1_5_1.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/entry/CubeMetadataUpgradeEntry_v_1_5_1.java
@@ -29,23 +29,18 @@ public class CubeMetadataUpgradeEntry_v_1_5_1 {
     public static void main(String[] args) {
         if (!(args != null && (args.length == 1))) {
             System.out.println("Usage: java CubeMetadataUpgradeEntry_v_1_5_1 <metadata_export_folder>");
-            System.out.println(
-                    ", where metadata_export_folder is the folder containing your current metadata's dump (Upgrade program will not modify it directly, relax.");
+            System.out.println(", where metadata_export_folder is the folder containing your current metadata's dump (Upgrade program will not modify it directly, relax.");
             return;
         }
 
         try {
             CubeMetadataUpgrade_v_1_4_0.upgradeOrVerify(CubeMetadataUpgrade_v_1_4_0.class, args, true, false);
-            CubeMetadataUpgrade_v_1_5_1.upgradeOrVerify(CubeMetadataUpgrade_v_1_5_1.class,
-                    new String[] { args[0] + "_workspace" }, false, true);
+            CubeMetadataUpgrade_v_1_5_1.upgradeOrVerify(CubeMetadataUpgrade_v_1_5_1.class, new String[] { args[0] + "_workspace" }, false, true);
         } catch (Exception e) {
-            logger.error(
-                    "something went wrong when upgrading, don't override your metadata store with this workspace folder yet!",
-                    e);
+            logger.error("something went wrong when upgrading, don't override your metadata store with this workspace folder yet!", e);
             return;
         }
 
-        logger.info(
-                "The metadata upgrade is complete locally. You need to upload the metadata to you actual metadata store to verify locally. You need to upload the metadata to you actual metadata store to verify.");
+        logger.info("The metadata upgrade is complete locally. You need to upload the metadata to you actual metadata store to verify locally. You need to upload the metadata to you actual metadata store to verify.");
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/upgrade/v1_4_0/CubeMetadataUpgrade_v_1_4_0.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/v1_4_0/CubeMetadataUpgrade_v_1_4_0.java b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/v1_4_0/CubeMetadataUpgrade_v_1_4_0.java
index 02ba81a..587842a 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/upgrade/v1_4_0/CubeMetadataUpgrade_v_1_4_0.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/upgrade/v1_4_0/CubeMetadataUpgrade_v_1_4_0.java
@@ -112,8 +112,7 @@ public class CubeMetadataUpgrade_v_1_4_0 extends CubeMetadataUpgrade {
                         if (!modelDimMap.containsKey(cubeDimDesc.getTable())) {
                             modelDimMap.put(cubeDimDesc.getTable(), new HashSet<String>());
                         }
-                        modelDimMap.get(cubeDimDesc.getTable()).addAll(Lists.newArrayList(
-                                cubeDimDesc.getDerived() != null ? cubeDimDesc.getDerived() : cubeDimDesc.getColumn()));
+                        modelDimMap.get(cubeDimDesc.getTable()).addAll(Lists.newArrayList(cubeDimDesc.getDerived() != null ? cubeDimDesc.getDerived() : cubeDimDesc.getColumn()));
                     }
 
                     List<ModelDimensionDesc> modelDimDescList = Lists.newArrayListWithCapacity(modelDimMap.size());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java b/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java
index 78fbcd4..b1b6bce 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java
@@ -59,8 +59,7 @@ public class CubingUtils {
 
     private static Logger logger = LoggerFactory.getLogger(CubingUtils.class);
 
-    public static Map<Long, HLLCounter> sampling(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDescIn,
-            Iterable<List<String>> streams) {
+    public static Map<Long, HLLCounter> sampling(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDescIn, Iterable<List<String>> streams) {
         final CubeJoinedFlatTableEnrich flatDesc = new CubeJoinedFlatTableEnrich(flatDescIn, cubeDesc);
         final int rowkeyLength = cubeDesc.getRowkey().getRowKeyColumns().length;
         final List<Long> allCuboidIds = new CuboidScheduler(cubeDesc).getAllCuboidIds();
@@ -133,10 +132,8 @@ public class CubingUtils {
         return result;
     }
 
-    public static Map<TblColRef, Dictionary<String>> buildDictionary(final CubeInstance cubeInstance,
-            Iterable<List<String>> recordList) throws IOException {
-        final List<TblColRef> columnsNeedToBuildDictionary = cubeInstance.getDescriptor()
-                .listDimensionColumnsExcludingDerived(true);
+    public static Map<TblColRef, Dictionary<String>> buildDictionary(final CubeInstance cubeInstance, Iterable<List<String>> recordList) throws IOException {
+        final List<TblColRef> columnsNeedToBuildDictionary = cubeInstance.getDescriptor().listDimensionColumnsExcludingDerived(true);
         final HashMap<Integer, TblColRef> tblColRefMap = Maps.newHashMap();
         int index = 0;
         for (TblColRef column : columnsNeedToBuildDictionary) {
@@ -156,16 +153,14 @@ public class CubingUtils {
         }
         for (TblColRef tblColRef : valueMap.keySet()) {
             Set<String> values = valueMap.get(tblColRef);
-            Dictionary<String> dict = DictionaryGenerator.buildDictionary(tblColRef.getType(),
-                    new IterableDictionaryValueEnumerator(values));
+            Dictionary<String> dict = DictionaryGenerator.buildDictionary(tblColRef.getType(), new IterableDictionaryValueEnumerator(values));
             result.put(tblColRef, dict);
         }
         return result;
     }
 
     @SuppressWarnings("unchecked")
-    public static Map<TblColRef, Dictionary<String>> writeDictionary(CubeSegment cubeSegment,
-            Map<TblColRef, Dictionary<String>> dictionaryMap, long startOffset, long endOffset) {
+    public static Map<TblColRef, Dictionary<String>> writeDictionary(CubeSegment cubeSegment, Map<TblColRef, Dictionary<String>> dictionaryMap, long startOffset, long endOffset) {
         Map<TblColRef, Dictionary<String>> realDictMap = Maps.newHashMap();
 
         for (Map.Entry<TblColRef, Dictionary<String>> entry : dictionaryMap.entrySet()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
index 2ebec04..92d0fac 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
@@ -272,8 +272,7 @@ public class GTAggregateScanner implements IGTScanner {
                 final long estMemSize = estimatedMemSize();
                 if (spillThreshold > 0 && estMemSize > spillThreshold) {
                     if (!spillEnabled) {
-                        throw new ResourceLimitExceededException("aggregation's memory consumption " + estMemSize
-                                + " exceeds threshold " + spillThreshold);
+                        throw new ResourceLimitExceededException("aggregation's memory consumption " + estMemSize + " exceeds threshold " + spillThreshold);
                     }
                     spillBuffMap(estMemSize); // spill to disk
                     aggBufMap = createBuffMap();
@@ -357,8 +356,7 @@ public class GTAggregateScanner implements IGTScanner {
 
                 final ReturningRecord returningRecord = new ReturningRecord();
                 Entry<byte[], MeasureAggregator[]> returningEntry = null;
-                final HavingFilterChecker havingFilterChecker = (havingFilter == null) ? null
-                        : new HavingFilterChecker();
+                final HavingFilterChecker havingFilterChecker = (havingFilter == null) ? null : new HavingFilterChecker();
 
                 @Override
                 public boolean hasNext() {
@@ -532,8 +530,7 @@ public class GTAggregateScanner implements IGTScanner {
             public Iterator<Pair<byte[], byte[]>> iterator() {
                 try {
                     if (dumpedFile == null || !dumpedFile.exists()) {
-                        throw new RuntimeException("Dumped file cannot be found at: "
-                                + (dumpedFile == null ? "<null>" : dumpedFile.getAbsolutePath()));
+                        throw new RuntimeException("Dumped file cannot be found at: " + (dumpedFile == null ? "<null>" : dumpedFile.getAbsolutePath()));
                     }
 
                     dis = new DataInputStream(new FileInputStream(dumpedFile));
@@ -558,8 +555,7 @@ public class GTAggregateScanner implements IGTScanner {
                                 dis.read(value);
                                 return new Pair<>(key, value);
                             } catch (Exception e) {
-                                throw new RuntimeException(
-                                        "Cannot read AggregationCache from dumped file: " + e.getMessage());
+                                throw new RuntimeException("Cannot read AggregationCache from dumped file: " + e.getMessage());
                             }
                         }
 
@@ -574,8 +570,7 @@ public class GTAggregateScanner implements IGTScanner {
             }
 
             public void flush() throws IOException {
-                logger.info("AggregationCache(size={} est_mem_size={} threshold={}) will spill to {}", buffMap.size(),
-                        estMemSize, spillThreshold, dumpedFile.getAbsolutePath());
+                logger.info("AggregationCache(size={} est_mem_size={} threshold={}) will spill to {}", buffMap.size(), estMemSize, spillThreshold, dumpedFile.getAbsolutePath());
 
                 if (buffMap != null) {
                     DataOutputStream dos = null;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/gridtable/GTInfo.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTInfo.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTInfo.java
index df9a234..ba62af3 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTInfo.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTInfo.java
@@ -294,8 +294,7 @@ public class GTInfo {
                 BytesUtil.writeAsciiString(value.codeSystem.getClass().getCanonicalName(), out);
                 BytesSerializer<IGTCodeSystem> serializer = null;
                 try {
-                    serializer = (BytesSerializer<IGTCodeSystem>) value.codeSystem.getClass().getField("serializer")
-                            .get(null);
+                    serializer = (BytesSerializer<IGTCodeSystem>) value.codeSystem.getClass().getField("serializer").get(null);
                 } catch (IllegalAccessException | NoSuchFieldException e) {
                     throw new RuntimeException("failed to get serializer for " + value.codeSystem.getClass(), e);
                 }
@@ -327,8 +326,7 @@ public class GTInfo {
             } else {
                 try {
                     Class clazz = Class.forName(codeSystemType);
-                    BytesSerializer<IGTCodeSystem> serializer = (BytesSerializer<IGTCodeSystem>) clazz
-                            .getField("serializer").get(null);
+                    BytesSerializer<IGTCodeSystem> serializer = (BytesSerializer<IGTCodeSystem>) clazz.getField("serializer").get(null);
                     codeSystem = serializer.deserialize(in);
                 } catch (Exception e) {
                     throw new RuntimeException("Failed to deserialize IGTCodeSystem " + codeSystemType, e);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/gridtable/GTRecord.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTRecord.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTRecord.java
index 97d9f53..3e62ea7 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTRecord.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTRecord.java
@@ -45,7 +45,7 @@ public class GTRecord implements Comparable<GTRecord>, Cloneable {
         }
         this.info = info;
     }
-
+    
     @Override
     public GTRecord clone() { // deep copy
         ByteArray[] cols = new ByteArray[this.cols.length];
@@ -114,6 +114,7 @@ public class GTRecord implements Comparable<GTRecord>, Cloneable {
         return result;
     }
 
+
     /** decode and return the values of this record */
     public Object[] getValues(int[] selectedColumns, Object[] result) {
         assert selectedColumns.length <= result.length;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/gridtable/GTRowBlock.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTRowBlock.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTRowBlock.java
index 804d06d..2cbbd8d 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTRowBlock.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTRowBlock.java
@@ -94,8 +94,7 @@ public class GTRowBlock {
             primaryKey.copyFrom(other.primaryKey);
             for (int i = 0; i < info.colBlocks.length; i++) {
                 cellBlockBuffers[i].clear();
-                cellBlockBuffers[i].put(other.cellBlocks[i].array(), other.cellBlocks[i].offset(),
-                        other.cellBlocks[i].length());
+                cellBlockBuffers[i].put(other.cellBlocks[i].array(), other.cellBlocks[i].offset(), other.cellBlocks[i].length());
             }
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
index 1a89471..ffaa8bd 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
@@ -78,8 +78,7 @@ public class GTScanRequest {
     private transient boolean doingStorageAggregation = false;
 
     GTScanRequest(GTInfo info, List<GTScanRange> ranges, ImmutableBitSet dimensions, ImmutableBitSet aggrGroupBy, //
-            ImmutableBitSet aggrMetrics, String[] aggrMetricsFuncs, TupleFilter filterPushDown,
-            TupleFilter havingFilterPushDown, // 
+            ImmutableBitSet aggrMetrics, String[] aggrMetricsFuncs, TupleFilter filterPushDown, TupleFilter havingFilterPushDown, // 
             boolean allowStorageAggregation, double aggCacheMemThreshold, int storageScanRowNumThreshold, //
             int storagePushDownLimit, String storageBehavior, long startTime, long timeout) {
         this.info = info;
@@ -173,16 +172,14 @@ public class GTScanRequest {
      * 
      * Refer to CoprocessorBehavior for explanation
      */
-    public IGTScanner decorateScanner(IGTScanner scanner, boolean filterToggledOn, boolean aggrToggledOn)
-            throws IOException {
+    public IGTScanner decorateScanner(IGTScanner scanner, boolean filterToggledOn, boolean aggrToggledOn) throws IOException {
         return decorateScanner(scanner, filterToggledOn, aggrToggledOn, false, true);
     }
 
     /**
      * hasPreFiltered indicate the data has been filtered before scanning
      */
-    public IGTScanner decorateScanner(IGTScanner scanner, boolean filterToggledOn, boolean aggrToggledOn,
-            boolean hasPreFiltered, boolean spillEnabled) throws IOException {
+    public IGTScanner decorateScanner(IGTScanner scanner, boolean filterToggledOn, boolean aggrToggledOn, boolean hasPreFiltered, boolean spillEnabled) throws IOException {
         IGTScanner result = scanner;
         if (!filterToggledOn) { //Skip reading this section if you're not profiling! 
             lookAndForget(result);
@@ -283,7 +280,7 @@ public class GTScanRequest {
     public TupleFilter getHavingFilterPushDown() {
         return havingFilterPushDown;
     }
-
+    
     public ImmutableBitSet getDimensions() {
         return this.getColumns().andNot(this.getAggrMetrics());
     }
@@ -337,9 +334,7 @@ public class GTScanRequest {
 
     @Override
     public String toString() {
-        return "GTScanRequest [range=" + ranges + ", columns=" + columns + ", filterPushDown=" + filterPushDown
-                + ", aggrGroupBy=" + aggrGroupBy + ", aggrMetrics=" + aggrMetrics + ", aggrMetricsFuncs="
-                + Arrays.toString(aggrMetricsFuncs) + "]";
+        return "GTScanRequest [range=" + ranges + ", columns=" + columns + ", filterPushDown=" + filterPushDown + ", aggrGroupBy=" + aggrGroupBy + ", aggrMetrics=" + aggrMetrics + ", aggrMetricsFuncs=" + Arrays.toString(aggrMetricsFuncs) + "]";
     }
 
     public byte[] toByteArray() {
@@ -369,8 +364,7 @@ public class GTScanRequest {
 
             ImmutableBitSet.serializer.serialize(value.columns, out);
             BytesUtil.writeByteArray(GTUtil.serializeGTFilter(value.filterPushDown, value.info), out);
-            BytesUtil.writeByteArray(
-                    TupleFilterSerializer.serialize(value.havingFilterPushDown, StringCodeSystem.INSTANCE), out);
+            BytesUtil.writeByteArray(TupleFilterSerializer.serialize(value.havingFilterPushDown, StringCodeSystem.INSTANCE), out);
 
             ImmutableBitSet.serializer.serialize(value.aggrGroupBy, out);
             ImmutableBitSet.serializer.serialize(value.aggrMetrics, out);
@@ -404,8 +398,7 @@ public class GTScanRequest {
 
             ImmutableBitSet sColumns = ImmutableBitSet.serializer.deserialize(in);
             TupleFilter sGTFilter = GTUtil.deserializeGTFilter(BytesUtil.readByteArray(in), sInfo);
-            TupleFilter sGTHavingFilter = TupleFilterSerializer.deserialize(BytesUtil.readByteArray(in),
-                    StringCodeSystem.INSTANCE);
+            TupleFilter sGTHavingFilter = TupleFilterSerializer.deserialize(BytesUtil.readByteArray(in), StringCodeSystem.INSTANCE);
 
             ImmutableBitSet sAggGroupBy = ImmutableBitSet.serializer.deserialize(in);
             ImmutableBitSet sAggrMetrics = ImmutableBitSet.serializer.deserialize(in);
@@ -420,8 +413,7 @@ public class GTScanRequest {
 
             return new GTScanRequestBuilder().setInfo(sInfo).setRanges(sRanges).setDimensions(sColumns).//
             setAggrGroupBy(sAggGroupBy).setAggrMetrics(sAggrMetrics).setAggrMetricsFuncs(sAggrMetricFuncs).//
-            setFilterPushDown(sGTFilter).setHavingFilterPushDown(sGTHavingFilter)
-                    .setAllowStorageAggregation(sAllowPreAggr).setAggCacheMemThreshold(sAggrCacheGB).//
+            setFilterPushDown(sGTFilter).setHavingFilterPushDown(sGTHavingFilter).setAllowStorageAggregation(sAllowPreAggr).setAggCacheMemThreshold(sAggrCacheGB).//
             setStorageScanRowNumThreshold(storageScanRowNumThreshold).setStoragePushDownLimit(storagePushDownLimit).//
             setStartTime(startTime).setTimeout(timeout).setStorageBehavior(storageBehavior).createGTScanRequest();
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequestBuilder.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequestBuilder.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequestBuilder.java
index fe09061..ba1fdbc 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequestBuilder.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequestBuilder.java
@@ -61,7 +61,7 @@ public class GTScanRequestBuilder {
         this.havingFilterPushDown = havingFilterPushDown;
         return this;
     }
-
+    
     public GTScanRequestBuilder setDimensions(ImmutableBitSet dimensions) {
         this.dimensions = dimensions;
         return this;
@@ -131,16 +131,12 @@ public class GTScanRequestBuilder {
         }
 
         if (storageBehavior == null) {
-            storageBehavior = BackdoorToggles.getCoprocessorBehavior() == null
-                    ? StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM.toString()
-                    : BackdoorToggles.getCoprocessorBehavior();
+            storageBehavior = BackdoorToggles.getCoprocessorBehavior() == null ? StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM.toString() : BackdoorToggles.getCoprocessorBehavior();
         }
 
         this.startTime = startTime == -1 ? System.currentTimeMillis() : startTime;
         this.timeout = timeout == -1 ? 300000 : timeout;
 
-        return new GTScanRequest(info, ranges, dimensions, aggrGroupBy, aggrMetrics, aggrMetricsFuncs, filterPushDown,
-                havingFilterPushDown, allowStorageAggregation, aggCacheMemThreshold, storageScanRowNumThreshold,
-                storagePushDownLimit, storageBehavior, startTime, timeout);
+        return new GTScanRequest(info, ranges, dimensions, aggrGroupBy, aggrMetrics, aggrMetricsFuncs, filterPushDown, havingFilterPushDown, allowStorageAggregation, aggCacheMemThreshold, storageScanRowNumThreshold, storagePushDownLimit, storageBehavior, startTime, timeout);
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/gridtable/GTStreamAggregateScanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTStreamAggregateScanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTStreamAggregateScanner.java
index 5914d09..4eb5791 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTStreamAggregateScanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTStreamAggregateScanner.java
@@ -18,18 +18,17 @@
 
 package org.apache.kylin.gridtable;
 
-import java.util.Comparator;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Iterators;
+import com.google.common.collect.PeekingIterator;
 import org.apache.kylin.GTForwardingScanner;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.measure.BufferedMeasureCodec;
 import org.apache.kylin.measure.MeasureAggregator;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Iterators;
-import com.google.common.collect.PeekingIterator;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
 
 /**
  * GTStreamAggregateScanner requires input records to be sorted on group fields.

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/gridtable/GTUtil.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTUtil.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTUtil.java
index 900b9ff..7a7e4e6 100755
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTUtil.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTUtil.java
@@ -81,9 +81,8 @@ public class GTUtil {
             final Set<TblColRef> unevaluatableColumnCollector) {
 
         IFilterCodeSystem<ByteArray> filterCodeSystem = wrap(info.codeSystem.getComparator());
-
-        GTConvertDecorator decorator = new GTConvertDecorator(unevaluatableColumnCollector, colMapping, info,
-                encodeConstants);
+        
+        GTConvertDecorator decorator = new GTConvertDecorator(unevaluatableColumnCollector, colMapping, info, encodeConstants);
 
         byte[] bytes = TupleFilterSerializer.serialize(rootFilter, decorator, filterCodeSystem);
         return TupleFilterSerializer.deserialize(bytes, filterCodeSystem);
@@ -123,15 +122,14 @@ public class GTUtil {
         protected final GTInfo info;
         protected final boolean encodeConstants;
 
-        public GTConvertDecorator(Set<TblColRef> unevaluatableColumnCollector, Map<TblColRef, Integer> colMapping,
-                GTInfo info, boolean encodeConstants) {
+        public GTConvertDecorator(Set<TblColRef> unevaluatableColumnCollector, Map<TblColRef, Integer> colMapping, GTInfo info, boolean encodeConstants) {
             this.unevaluatableColumnCollector = unevaluatableColumnCollector;
             this.colMapping = colMapping;
             this.info = info;
             this.encodeConstants = encodeConstants;
             buf = ByteBuffer.allocate(info.getMaxColumnLength());
         }
-
+        
         protected int mapCol(TblColRef col) {
             Integer i = colMapping.get(col);
             return i == null ? -1 : i;
@@ -145,8 +143,7 @@ public class GTUtil {
             // In case of NOT(unEvaluatableFilter), we should immediately replace it as TRUE,
             // Otherwise, unEvaluatableFilter will later be replace with TRUE and NOT(unEvaluatableFilter)
             // will always return FALSE.
-            if (filter.getOperator() == TupleFilter.FilterOperatorEnum.NOT
-                    && !TupleFilter.isEvaluableRecursively(filter)) {
+            if (filter.getOperator() == TupleFilter.FilterOperatorEnum.NOT && !TupleFilter.isEvaluableRecursively(filter)) {
                 TupleFilter.collectColumns(filter, unevaluatableColumnCollector);
                 return ConstantTupleFilter.TRUE;
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/gridtable/UnitTestSupport.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/UnitTestSupport.java b/core-cube/src/main/java/org/apache/kylin/gridtable/UnitTestSupport.java
index 286f3df..6190f29 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/UnitTestSupport.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/UnitTestSupport.java
@@ -109,18 +109,12 @@ public class UnitTestSupport {
             result.add(newRec(info, d_01_14, "Luke", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
             result.add(newRec(info, d_01_15, "Xu", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
             result.add(newRec(info, d_01_15, "Dong", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
-            result.add(
-                    newRec(info, d_01_15, "Jason", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
-            result.add(
-                    newRec(info, d_01_16, "Mahone", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
-            result.add(newRec(info, d_01_16, "Shaofeng", "Food", new Long(10), new BigDecimal("10.5"),
-                    new HLLCounter(14)));
-            result.add(
-                    newRec(info, d_01_16, "Qianhao", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
-            result.add(
-                    newRec(info, d_01_16, "George", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
-            result.add(
-                    newRec(info, d_01_17, "Kejia", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
+            result.add(newRec(info, d_01_15, "Jason", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
+            result.add(newRec(info, d_01_16, "Mahone", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
+            result.add(newRec(info, d_01_16, "Shaofeng", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
+            result.add(newRec(info, d_01_16, "Qianhao", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
+            result.add(newRec(info, d_01_16, "George", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
+            result.add(newRec(info, d_01_17, "Kejia", "Food", new Long(10), new BigDecimal("10.5"), new HLLCounter(14)));
         }
         return result;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/GTScannerBenchmark.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/GTScannerBenchmark.java b/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/GTScannerBenchmark.java
index 3fa3cee..589f37c 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/GTScannerBenchmark.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/GTScannerBenchmark.java
@@ -111,9 +111,7 @@ public class GTScannerBenchmark {
     @SuppressWarnings("unused")
     private void testAggregate(ImmutableBitSet groupBy) throws IOException {
         long t = System.currentTimeMillis();
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(dimensions)
-                .setAggrGroupBy(groupBy).setAggrMetrics(metrics).setAggrMetricsFuncs(aggrFuncs).setFilterPushDown(null)
-                .createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(dimensions).setAggrGroupBy(groupBy).setAggrMetrics(metrics).setAggrMetricsFuncs(aggrFuncs).setFilterPushDown(null).createGTScanRequest();
         IGTScanner scanner = req.decorateScanner(gen.generate(N));
 
         long count = 0;
@@ -157,8 +155,7 @@ public class GTScannerBenchmark {
     @SuppressWarnings("unused")
     private void testFilter(TupleFilter filter) throws IOException {
         long t = System.currentTimeMillis();
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(info.getAllColumns())
-                .setFilterPushDown(filter).createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(info.getAllColumns()).setFilterPushDown(filter).createGTScanRequest();
         IGTScanner scanner = req.decorateScanner(gen.generate(N));
 
         long count = 0;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/GTScannerBenchmark2.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/GTScannerBenchmark2.java b/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/GTScannerBenchmark2.java
index 5302949..85d8c37 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/GTScannerBenchmark2.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/GTScannerBenchmark2.java
@@ -133,9 +133,7 @@ public class GTScannerBenchmark2 {
     @SuppressWarnings("unused")
     private void testAggregate(ImmutableBitSet groupBy) throws IOException {
         long t = System.currentTimeMillis();
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(dimensions)
-                .setAggrGroupBy(groupBy).setAggrMetrics(metrics).setAggrMetricsFuncs(aggrFuncs).setFilterPushDown(null)
-                .createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(dimensions).setAggrGroupBy(groupBy).setAggrMetrics(metrics).setAggrMetricsFuncs(aggrFuncs).setFilterPushDown(null).createGTScanRequest();
         IGTScanner scanner = req.decorateScanner(gen.generate(N));
 
         long count = 0;
@@ -179,8 +177,7 @@ public class GTScannerBenchmark2 {
     @SuppressWarnings("unused")
     private void testFilter(TupleFilter filter) throws IOException {
         long t = System.currentTimeMillis();
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(info.getAllColumns())
-                .setFilterPushDown(filter).createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(info.getAllColumns()).setFilterPushDown(filter).createGTScanRequest();
         IGTScanner scanner = req.decorateScanner(gen.generate(N));
 
         long count = 0;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/AggregationGroupRuleTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/AggregationGroupRuleTest.java b/core-cube/src/test/java/org/apache/kylin/cube/AggregationGroupRuleTest.java
index dd50b1e..d81d366 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/AggregationGroupRuleTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/AggregationGroupRuleTest.java
@@ -86,8 +86,7 @@ public class AggregationGroupRuleTest extends LocalFileMetadataTestCase {
             rule.validate(desc, vContext);
             vContext.print(System.out);
             assertTrue(vContext.getResults().length > 0);
-            assertTrue(
-                    vContext.getResults()[0].getMessage().startsWith("Aggregation group 0 has too many combinations"));
+            assertTrue(vContext.getResults()[0].getMessage().startsWith("Aggregation group 0 has too many combinations"));
         }
     }
 
@@ -95,9 +94,7 @@ public class AggregationGroupRuleTest extends LocalFileMetadataTestCase {
     public void testGoodDesc2() throws IOException {
 
         ValidateContext vContext = new ValidateContext();
-        CubeDesc desc = JsonUtil.readValue(new FileInputStream(
-                LocalFileMetadataTestCase.LOCALMETA_TEMP_DATA + "/cube_desc/test_kylin_cube_with_slr_desc.json"),
-                CubeDesc.class);
+        CubeDesc desc = JsonUtil.readValue(new FileInputStream(LocalFileMetadataTestCase.LOCALMETA_TEMP_DATA + "/cube_desc/test_kylin_cube_with_slr_desc.json"), CubeDesc.class);
         desc.getAggregationGroups().get(0).getSelectRule().jointDims = new String[][] { //
                 new String[] { "lstg_format_name", "lstg_site_id", "slr_segment_cd", "CATEG_LVL2_NAME" } };
 
@@ -111,40 +108,30 @@ public class AggregationGroupRuleTest extends LocalFileMetadataTestCase {
     public void testBadDesc1() throws IOException {
 
         ValidateContext vContext = new ValidateContext();
-        CubeDesc desc = JsonUtil.readValue(new FileInputStream(
-                LocalFileMetadataTestCase.LOCALMETA_TEMP_DATA + "/cube_desc/test_kylin_cube_with_slr_desc.json"),
-                CubeDesc.class);
-        String[] temp = Arrays.asList(desc.getAggregationGroups().get(0).getIncludes()).subList(0, 3)
-                .toArray(new String[3]);
+        CubeDesc desc = JsonUtil.readValue(new FileInputStream(LocalFileMetadataTestCase.LOCALMETA_TEMP_DATA + "/cube_desc/test_kylin_cube_with_slr_desc.json"), CubeDesc.class);
+        String[] temp = Arrays.asList(desc.getAggregationGroups().get(0).getIncludes()).subList(0, 3).toArray(new String[3]);
 
         desc.getAggregationGroups().get(0).setIncludes(temp);
         IValidatorRule<CubeDesc> rule = getAggregationGroupRule();
         rule.validate(desc, vContext);
         vContext.print(System.out);
         assertEquals(1, vContext.getResults().length);
-        assertEquals(
-                "Aggregation group 0 'includes' dimensions not include all the dimensions:[seller_id, META_CATEG_NAME, lstg_format_name, lstg_site_id, slr_segment_cd]",
-                (vContext.getResults()[0].getMessage()));
+        assertEquals("Aggregation group 0 'includes' dimensions not include all the dimensions:[seller_id, META_CATEG_NAME, lstg_format_name, lstg_site_id, slr_segment_cd]", (vContext.getResults()[0].getMessage()));
     }
 
     @Test
     public void testBadDesc2() throws IOException {
 
         ValidateContext vContext = new ValidateContext();
-        CubeDesc desc = JsonUtil.readValue(new FileInputStream(
-                LocalFileMetadataTestCase.LOCALMETA_TEMP_DATA + "/cube_desc/test_kylin_cube_with_slr_desc.json"),
-                CubeDesc.class);
+        CubeDesc desc = JsonUtil.readValue(new FileInputStream(LocalFileMetadataTestCase.LOCALMETA_TEMP_DATA + "/cube_desc/test_kylin_cube_with_slr_desc.json"), CubeDesc.class);
         desc.getAggregationGroups().get(0).getSelectRule().jointDims = new String[][] { //
-                new String[] { "lstg_format_name", "lstg_site_id", "slr_segment_cd", "META_CATEG_NAME",
-                        "CATEG_LVL2_NAME" } };
+                new String[] { "lstg_format_name", "lstg_site_id", "slr_segment_cd", "META_CATEG_NAME", "CATEG_LVL2_NAME" } };
 
         IValidatorRule<CubeDesc> rule = getAggregationGroupRule();
         rule.validate(desc, vContext);
         vContext.print(System.out);
         assertEquals(1, vContext.getResults().length);
-        assertEquals(
-                "Aggregation group 0 joint dimensions has overlap with more than 1 dimensions in same hierarchy: [CATEG_LVL2_NAME, META_CATEG_NAME]",
-                (vContext.getResults()[0].getMessage()));
+        assertEquals("Aggregation group 0 joint dimensions has overlap with more than 1 dimensions in same hierarchy: [CATEG_LVL2_NAME, META_CATEG_NAME]", (vContext.getResults()[0].getMessage()));
     }
 
     @Test
@@ -157,8 +144,7 @@ public class AggregationGroupRuleTest extends LocalFileMetadataTestCase {
         }
 
         ValidateContext vContext = new ValidateContext();
-        CubeDesc desc = JsonUtil.readValue(new FileInputStream(LocalFileMetadataTestCase.LOCALMETA_TEMP_DATA
-                + "/cube_desc/ut_cube_desc_combination_int_overflow.json"), CubeDesc.class);
+        CubeDesc desc = JsonUtil.readValue(new FileInputStream(LocalFileMetadataTestCase.LOCALMETA_TEMP_DATA + "/cube_desc/ut_cube_desc_combination_int_overflow.json"), CubeDesc.class);
 
         IValidatorRule<CubeDesc> rule = getAggregationGroupRule();
         try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
index c5d89eb..0bbc874 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
@@ -110,7 +110,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
         CubeDesc ic = mgr.getCubeDesc("ci_inner_join_cube");
         assertNotNull(lc);
         assertNotNull(ic);
-
+        
         // assert the two CI cubes are identical apart from the left/inner difference
         assertEquals(lc.getDimensions().size(), ic.getDimensions().size());
         for (int i = 0, n = lc.getDimensions().size(); i < n; i++) {
@@ -120,11 +120,11 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
             assertEquals(ld.getColumn(), id.getColumn());
             assertArrayEquals(ld.getDerived(), id.getDerived());
         }
-
+        
         // To enable spark in IT, the inner cube removed the percentile measure, so ignore that particular measure
         List<MeasureDesc> lcMeasures = dropPercentile(lc.getMeasures());
         List<MeasureDesc> icMeasures = ic.getMeasures();
-
+        
         assertEquals(lcMeasures.size(), icMeasures.size());
         for (int i = 0, n = lcMeasures.size(); i < n; i++) {
             MeasureDesc lm = lcMeasures.get(i);
@@ -133,7 +133,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
             assertEquals(lm.getFunction().getFullExpression(), im.getFunction().getFullExpression());
             assertEquals(lm.getFunction().getReturnType(), im.getFunction().getReturnType());
         }
-
+        
         assertEquals(lc.getAggregationGroups().size(), ic.getAggregationGroups().size());
         for (int i = 0, n = lc.getAggregationGroups().size(); i < n; i++) {
             AggregationGroup lag = lc.getAggregationGroups().get(i);
@@ -143,15 +143,15 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
             assertArrayEquals(lag.getSelectRule().hierarchyDims, iag.getSelectRule().hierarchyDims);
             assertArrayEquals(lag.getSelectRule().jointDims, iag.getSelectRule().jointDims);
         }
-
+        
         assertEquals(lc.listAllColumnDescs().size(), ic.listAllColumnDescs().size());
         assertEquals(lc.listAllColumns().size(), ic.listAllColumns().size());
-
+        
         // test KYLIN-2440
         assertTrue(lc.listAllColumns().contains(lc.getModel().findColumn("SELLER_ACCOUNT.ACCOUNT_ID")));
         assertTrue(ic.listAllColumns().contains(ic.getModel().findColumn("SELLER_ACCOUNT.ACCOUNT_ID")));
     }
-
+    
     private List<MeasureDesc> dropPercentile(List<MeasureDesc> measures) {
         ArrayList<MeasureDesc> result = new ArrayList<>();
         for (MeasureDesc m : measures) {
@@ -196,8 +196,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
         thrown.expect(IllegalStateException.class);
         thrown.expectMessage("Aggregation group 0 'includes' dimensions not include all the dimensions:");
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc(CUBE_WITH_SLR_DESC);
-        String[] temp = Arrays.asList(cubeDesc.getAggregationGroups().get(0).getIncludes()).subList(0, 3)
-                .toArray(new String[3]);
+        String[] temp = Arrays.asList(cubeDesc.getAggregationGroups().get(0).getIncludes()).subList(0, 3).toArray(new String[3]);
         cubeDesc.getAggregationGroups().get(0).setIncludes(temp);
 
         cubeDesc.init(getTestConfig());
@@ -206,8 +205,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
     @Test
     public void testBadInit4() throws Exception {
         thrown.expect(IllegalStateException.class);
-        thrown.expectMessage(
-                "Aggregation group 0 has too many combinations, use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max-combination' to a bigger value.");
+        thrown.expectMessage("Aggregation group 0 has too many combinations, use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max-combination' to a bigger value.");
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc(CUBE_WITH_SLR_DESC);
         try {
@@ -222,8 +220,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
     @Test
     public void testBadInit5() throws Exception {
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc(CUBE_WITH_SLR_DESC);
-        cubeDesc.getAggregationGroups().get(0).getSelectRule().mandatoryDims = new String[] { SELLER_ID,
-                META_CATEG_NAME };
+        cubeDesc.getAggregationGroups().get(0).getSelectRule().mandatoryDims = new String[] { SELLER_ID, META_CATEG_NAME };
 
         cubeDesc.init(getTestConfig());
     }
@@ -231,8 +228,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
     @Test
     public void testBadInit6() throws Exception {
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc(CUBE_WITH_SLR_DESC);
-        cubeDesc.getAggregationGroups().get(0).getSelectRule().mandatoryDims = new String[] { SELLER_ID,
-                LSTG_FORMAT_NAME };
+        cubeDesc.getAggregationGroups().get(0).getSelectRule().mandatoryDims = new String[] { SELLER_ID, LSTG_FORMAT_NAME };
 
         cubeDesc.init(getTestConfig());
     }
@@ -243,8 +239,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
         thrown.expectMessage("Aggregation group 0 require at least 2 dimensions in a joint");
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc(CUBE_WITH_SLR_DESC);
-        cubeDesc.getAggregationGroups().get(0).getSelectRule().jointDims = new String[][] {
-                new String[] { LSTG_FORMAT_NAME } };
+        cubeDesc.getAggregationGroups().get(0).getSelectRule().jointDims = new String[][] { new String[] { LSTG_FORMAT_NAME } };
 
         cubeDesc.init(getTestConfig());
     }
@@ -253,12 +248,10 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
     public void testBadInit8() throws Exception {
         String[] strs = new String[] { CATEG_LVL2_NAME, META_CATEG_NAME };
         thrown.expect(IllegalStateException.class);
-        thrown.expectMessage(
-                "Aggregation group 0 hierarchy dimensions overlap with joint dimensions: " + sortStrs(strs));
+        thrown.expectMessage("Aggregation group 0 hierarchy dimensions overlap with joint dimensions: " + sortStrs(strs));
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc(CUBE_WITH_SLR_DESC);
-        cubeDesc.getAggregationGroups().get(0).getSelectRule().jointDims = new String[][] {
-                new String[] { META_CATEG_NAME, CATEG_LVL2_NAME } };
+        cubeDesc.getAggregationGroups().get(0).getSelectRule().jointDims = new String[][] { new String[] { META_CATEG_NAME, CATEG_LVL2_NAME } };
 
         cubeDesc.init(getTestConfig());
     }
@@ -267,14 +260,10 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
     public void testBadInit9() throws Exception {
         String[] strs = new String[] { LSTG_FORMAT_NAME, META_CATEG_NAME };
         thrown.expect(IllegalStateException.class);
-        thrown.expectMessage(
-                "Aggregation group 0 hierarchy dimensions overlap with joint dimensions: " + sortStrs(strs));
+        thrown.expectMessage("Aggregation group 0 hierarchy dimensions overlap with joint dimensions: " + sortStrs(strs));
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc(CUBE_WITH_SLR_DESC);
-        cubeDesc.getAggregationGroups().get(0).getSelectRule().hierarchyDims = new String[][] {
-                new String[] { META_CATEG_NAME, CATEG_LVL2_NAME, CATEG_LVL3_NAME },
-                new String[] { LSTG_FORMAT_NAME, LSTG_SITE_ID } };
-        cubeDesc.getAggregationGroups().get(0).getSelectRule().jointDims = new String[][] {
-                new String[] { META_CATEG_NAME, LSTG_FORMAT_NAME } };
+        cubeDesc.getAggregationGroups().get(0).getSelectRule().hierarchyDims = new String[][] { new String[] { META_CATEG_NAME, CATEG_LVL2_NAME, CATEG_LVL3_NAME }, new String[] { LSTG_FORMAT_NAME, LSTG_SITE_ID } };
+        cubeDesc.getAggregationGroups().get(0).getSelectRule().jointDims = new String[][] { new String[] { META_CATEG_NAME, LSTG_FORMAT_NAME } };
 
         cubeDesc.init(getTestConfig());
     }
@@ -286,9 +275,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
         thrown.expectMessage("Aggregation group 0 a dimension exist in more than one joint: " + sortStrs(strs));
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc(CUBE_WITH_SLR_DESC);
-        cubeDesc.getAggregationGroups().get(0).getSelectRule().jointDims = new String[][] {
-                new String[] { LSTG_FORMAT_NAME, LSTG_SITE_ID, SLR_SEGMENT_CD },
-                new String[] { LSTG_FORMAT_NAME, LSTG_SITE_ID, LEAF_CATEG_ID } };
+        cubeDesc.getAggregationGroups().get(0).getSelectRule().jointDims = new String[][] { new String[] { LSTG_FORMAT_NAME, LSTG_SITE_ID, SLR_SEGMENT_CD }, new String[] { LSTG_FORMAT_NAME, LSTG_SITE_ID, LEAF_CATEG_ID } };
 
         cubeDesc.init(getTestConfig());
     }
@@ -299,8 +286,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
         thrown.expectMessage("Aggregation group 0 require at least 2 dimensions in a hierarchy.");
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc(CUBE_WITH_SLR_DESC);
-        cubeDesc.getAggregationGroups().get(0).getSelectRule().hierarchyDims = new String[][] {
-                new String[] { META_CATEG_NAME } };
+        cubeDesc.getAggregationGroups().get(0).getSelectRule().hierarchyDims = new String[][] { new String[] { META_CATEG_NAME } };
 
         cubeDesc.init(getTestConfig());
     }
@@ -312,9 +298,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
         thrown.expectMessage("Aggregation group 0 a dimension exist in more than one hierarchy: " + sortStrs(strs));
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc(CUBE_WITH_SLR_DESC);
-        cubeDesc.getAggregationGroups().get(0).getSelectRule().hierarchyDims = new String[][] {
-                new String[] { META_CATEG_NAME, CATEG_LVL2_NAME, CATEG_LVL3_NAME },
-                new String[] { META_CATEG_NAME, CATEG_LVL2_NAME } };
+        cubeDesc.getAggregationGroups().get(0).getSelectRule().hierarchyDims = new String[][] { new String[] { META_CATEG_NAME, CATEG_LVL2_NAME, CATEG_LVL3_NAME }, new String[] { META_CATEG_NAME, CATEG_LVL2_NAME } };
 
         cubeDesc.init(getTestConfig());
     }
@@ -330,8 +314,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
 
         thrown.expect(IllegalStateException.class);
         CubeDescManager.clearCache();
-        CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig())
-                .getCubeDesc("ut_cube_desc_combination_int_overflow");
+        CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("ut_cube_desc_combination_int_overflow");
         cubeDesc.init(getTestConfig());
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/CubeManagerTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeManagerTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeManagerTest.java
index 3f8e439..3cae37d 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeManagerTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeManagerTest.java
@@ -81,8 +81,7 @@ public class CubeManagerTest extends LocalFileMetadataTestCase {
 
         CubeDescManager cubeDescMgr = getCubeDescManager();
         CubeDesc desc = cubeDescMgr.getCubeDesc("test_kylin_cube_with_slr_desc");
-        CubeInstance createdCube = cubeMgr.createCube("a_whole_new_cube", ProjectInstance.DEFAULT_PROJECT_NAME, desc,
-                null);
+        CubeInstance createdCube = cubeMgr.createCube("a_whole_new_cube", ProjectInstance.DEFAULT_PROJECT_NAME, desc, null);
         assertTrue(createdCube == cubeMgr.getCube("a_whole_new_cube"));
 
         assertTrue(prjMgr.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME).contains(createdCube));
@@ -127,6 +126,7 @@ public class CubeManagerTest extends LocalFileMetadataTestCase {
 
     }
 
+
     @Test
     public void testConcurrentBuildAndMerge() throws Exception {
         CubeManager mgr = CubeManager.getInstance(getTestConfig());
@@ -135,30 +135,34 @@ public class CubeManagerTest extends LocalFileMetadataTestCase {
         // no segment at first
         assertEquals(0, cube.getSegments().size());
 
-        Map m1 = Maps.newHashMap();
+        Map m1 =  Maps.newHashMap();
         m1.put(1, 1000L);
-        Map m2 = Maps.newHashMap();
+        Map m2 =  Maps.newHashMap();
         m2.put(1, 2000L);
-        Map m3 = Maps.newHashMap();
+        Map m3 =  Maps.newHashMap();
         m3.put(1, 3000L);
-        Map m4 = Maps.newHashMap();
+        Map m4 =  Maps.newHashMap();
         m4.put(1, 4000L);
 
         // append first
         CubeSegment seg1 = mgr.appendSegment(cube, 0, 0, 0, 1000, null, m1);
         seg1.setStatus(SegmentStatusEnum.READY);
 
+
         CubeSegment seg2 = mgr.appendSegment(cube, 0, 0, 1000, 2000, m1, m2);
         seg2.setStatus(SegmentStatusEnum.READY);
 
+
         CubeSegment seg3 = mgr.mergeSegments(cube, 0, 0, 0000, 2000, true);
         seg3.setStatus(SegmentStatusEnum.NEW);
 
+
         CubeSegment seg4 = mgr.appendSegment(cube, 0, 0, 2000, 3000, m2, m3);
         seg4.setStatus(SegmentStatusEnum.NEW);
         seg4.setLastBuildJobID("test");
         seg4.setStorageLocationIdentifier("test");
 
+
         CubeSegment seg5 = mgr.appendSegment(cube, 0, 0, 3000, 4000, m3, m4);
         seg5.setStatus(SegmentStatusEnum.READY);
 
@@ -166,23 +170,20 @@ public class CubeManagerTest extends LocalFileMetadataTestCase {
 
         mgr.updateCube(cubeBuilder);
 
+
         mgr.promoteNewlyBuiltSegments(cube, seg4);
 
         assertTrue(cube.getSegments().size() == 5);
 
-        assertTrue(cube.getSegmentById(seg1.getUuid()) != null
-                && cube.getSegmentById(seg1.getUuid()).getStatus() == SegmentStatusEnum.READY);
-        assertTrue(cube.getSegmentById(seg2.getUuid()) != null
-                && cube.getSegmentById(seg2.getUuid()).getStatus() == SegmentStatusEnum.READY);
-        assertTrue(cube.getSegmentById(seg3.getUuid()) != null
-                && cube.getSegmentById(seg3.getUuid()).getStatus() == SegmentStatusEnum.NEW);
-        assertTrue(cube.getSegmentById(seg4.getUuid()) != null
-                && cube.getSegmentById(seg4.getUuid()).getStatus() == SegmentStatusEnum.READY);
-        assertTrue(cube.getSegmentById(seg5.getUuid()) != null
-                && cube.getSegmentById(seg5.getUuid()).getStatus() == SegmentStatusEnum.READY);
+        assertTrue(cube.getSegmentById(seg1.getUuid()) != null && cube.getSegmentById(seg1.getUuid()).getStatus() == SegmentStatusEnum.READY);
+        assertTrue(cube.getSegmentById(seg2.getUuid()) != null && cube.getSegmentById(seg2.getUuid()).getStatus() == SegmentStatusEnum.READY);
+        assertTrue(cube.getSegmentById(seg3.getUuid()) != null && cube.getSegmentById(seg3.getUuid()).getStatus() == SegmentStatusEnum.NEW);
+        assertTrue(cube.getSegmentById(seg4.getUuid()) != null && cube.getSegmentById(seg4.getUuid()).getStatus() == SegmentStatusEnum.READY);
+        assertTrue(cube.getSegmentById(seg5.getUuid()) != null && cube.getSegmentById(seg5.getUuid()).getStatus() == SegmentStatusEnum.READY);
 
     }
 
+
     @Test
     public void testConcurrentMergeAndMerge() throws Exception {
         System.setProperty("kylin.cube.max-building-segments", "10");
@@ -191,13 +192,13 @@ public class CubeManagerTest extends LocalFileMetadataTestCase {
 
         // no segment at first
         assertEquals(0, cube.getSegments().size());
-        Map m1 = Maps.newHashMap();
+        Map m1 =  Maps.newHashMap();
         m1.put(1, 1000L);
-        Map m2 = Maps.newHashMap();
+        Map m2 =  Maps.newHashMap();
         m2.put(1, 2000L);
-        Map m3 = Maps.newHashMap();
+        Map m3 =  Maps.newHashMap();
         m3.put(1, 3000L);
-        Map m4 = Maps.newHashMap();
+        Map m4 =  Maps.newHashMap();
         m4.put(1, 4000L);
 
         // append first
@@ -213,6 +214,8 @@ public class CubeManagerTest extends LocalFileMetadataTestCase {
         CubeSegment seg4 = mgr.appendSegment(cube, 0, 0, 3000, 4000, m3, m4);
         seg4.setStatus(SegmentStatusEnum.READY);
 
+
+
         CubeSegment merge1 = mgr.mergeSegments(cube, 0, 0, 0, 2000, true);
         merge1.setStatus(SegmentStatusEnum.NEW);
         merge1.setLastBuildJobID("test");
@@ -226,20 +229,17 @@ public class CubeManagerTest extends LocalFileMetadataTestCase {
         CubeUpdate cubeBuilder = new CubeUpdate(cube);
         mgr.updateCube(cubeBuilder);
 
+
         mgr.promoteNewlyBuiltSegments(cube, merge1);
 
         assertTrue(cube.getSegments().size() == 4);
 
         assertTrue(cube.getSegmentById(seg1.getUuid()) == null);
         assertTrue(cube.getSegmentById(seg2.getUuid()) == null);
-        assertTrue(cube.getSegmentById(merge1.getUuid()) != null
-                && cube.getSegmentById(merge1.getUuid()).getStatus() == SegmentStatusEnum.READY);
-        assertTrue(cube.getSegmentById(seg3.getUuid()) != null
-                && cube.getSegmentById(seg3.getUuid()).getStatus() == SegmentStatusEnum.READY);
-        assertTrue(cube.getSegmentById(seg4.getUuid()) != null
-                && cube.getSegmentById(seg4.getUuid()).getStatus() == SegmentStatusEnum.READY);
-        assertTrue(cube.getSegmentById(merge2.getUuid()) != null
-                && cube.getSegmentById(merge2.getUuid()).getStatus() == SegmentStatusEnum.NEW);
+        assertTrue(cube.getSegmentById(merge1.getUuid()) != null && cube.getSegmentById(merge1.getUuid()).getStatus() == SegmentStatusEnum.READY);
+        assertTrue(cube.getSegmentById(seg3.getUuid()) != null && cube.getSegmentById(seg3.getUuid()).getStatus() == SegmentStatusEnum.READY);
+        assertTrue(cube.getSegmentById(seg4.getUuid()) != null && cube.getSegmentById(seg4.getUuid()).getStatus() == SegmentStatusEnum.READY);
+        assertTrue(cube.getSegmentById(merge2.getUuid()) != null && cube.getSegmentById(merge2.getUuid()).getStatus() == SegmentStatusEnum.NEW);
 
     }
 
@@ -249,8 +249,7 @@ public class CubeManagerTest extends LocalFileMetadataTestCase {
         final NavigableSet<String> cubePath = store.listResources(ResourceStore.CUBE_RESOURCE_ROOT);
         assertTrue(cubePath.size() > 1);
 
-        final List<CubeInstance> cubes = store.getAllResources(ResourceStore.CUBE_RESOURCE_ROOT, CubeInstance.class,
-                CubeManager.CUBE_SERIALIZER);
+        final List<CubeInstance> cubes = store.getAllResources(ResourceStore.CUBE_RESOURCE_ROOT, CubeInstance.class, CubeManager.CUBE_SERIALIZER);
         assertEquals(cubePath.size(), cubes.size());
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
index 2dddcf5..17c02cc 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
@@ -20,13 +20,13 @@ package org.apache.kylin.cube;
 
 import static org.junit.Assert.assertEquals;
 
-import java.io.IOException;
-
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.HotLoadKylinPropertiesTestCase;
 import org.apache.kylin.cube.model.CubeDesc;
 import org.junit.Test;
 
+import java.io.IOException;
+
 public class CubeSpecificConfigTest extends HotLoadKylinPropertiesTestCase {
     @Test
     public void test() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/RowKeyAttrRuleTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/RowKeyAttrRuleTest.java b/core-cube/src/test/java/org/apache/kylin/cube/RowKeyAttrRuleTest.java
index 2836c31..40a458a 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/RowKeyAttrRuleTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/RowKeyAttrRuleTest.java
@@ -49,9 +49,7 @@ public class RowKeyAttrRuleTest {
     @Test
     public void testBadDesc() throws IOException {
         ValidateContext vContext = new ValidateContext();
-        CubeDesc desc = JsonUtil.readValue(new FileInputStream(
-                LocalFileMetadataTestCase.LOCALMETA_TEST_DATA + "/cube_desc/test_kylin_cube_with_slr_desc.json"),
-                CubeDesc.class);
+        CubeDesc desc = JsonUtil.readValue(new FileInputStream(LocalFileMetadataTestCase.LOCALMETA_TEST_DATA + "/cube_desc/test_kylin_cube_with_slr_desc.json"), CubeDesc.class);
         desc.getRowkey().getRowKeyColumns()[2].setColumn("");
         IValidatorRule<CubeDesc> rule = new RowKeyAttrRule();
         rule.validate(desc, vContext);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java b/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java
index 480bd22..074d973 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java
@@ -36,10 +36,8 @@ public class BytesSplitterTest {
         bytesSplitter.split(input, input.length, (byte) 127);
 
         assertEquals(2, bytesSplitter.getBufferSize());
-        assertEquals("2013-02-17",
-                new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length));
-        assertEquals("Collectibles",
-                new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length));
+        assertEquals("2013-02-17", new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length));
+        assertEquals("Collectibles", new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length));
     }
 
     @Test
@@ -49,11 +47,8 @@ public class BytesSplitterTest {
         bytesSplitter.split(input, input.length, (byte) 127);
 
         assertEquals(3, bytesSplitter.getBufferSize());
-        assertEquals("2013-02-17",
-                new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length));
-        assertEquals("",
-                new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length));
-        assertEquals("Collectibles",
-                new String(bytesSplitter.getSplitBuffers()[2].value, 0, bytesSplitter.getSplitBuffers()[2].length));
+        assertEquals("2013-02-17", new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length));
+        assertEquals("", new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length));
+        assertEquals("Collectibles", new String(bytesSplitter.getSplitBuffers()[2].value, 0, bytesSplitter.getSplitBuffers()[2].length));
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/common/RowKeySplitterTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/common/RowKeySplitterTest.java b/core-cube/src/test/java/org/apache/kylin/cube/common/RowKeySplitterTest.java
index a738ffb..b41f23b 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/common/RowKeySplitterTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/common/RowKeySplitterTest.java
@@ -48,8 +48,7 @@ public class RowKeySplitterTest extends LocalFileMetadataTestCase {
 
         RowKeySplitter rowKeySplitter = new RowKeySplitter(cube.getFirstSegment(), 11, 20);
         // base cuboid rowkey
-        byte[] input = { 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -104, -106, -128, 11, 54, -105, 55, 9, 9, 9, 9, 9, 9, 9, 9,
-                9, 9, 9, 9, 9, 9, 9, 9, 9, 13, 71, 114, 65, 66, 73, 78, 9, 9, 9, 9, 9, 9, 9, 9, 0, 10, 0 };
+        byte[] input = { 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -104, -106, -128, 11, 54, -105, 55, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 13, 71, 114, 65, 66, 73, 78, 9, 9, 9, 9, 9, 9, 9, 9, 0, 10, 0 };
         rowKeySplitter.split(input);
 
         assertEquals(11, rowKeySplitter.getBufferSize());
@@ -62,8 +61,7 @@ public class RowKeySplitterTest extends LocalFileMetadataTestCase {
 
         RowKeySplitter rowKeySplitter = new RowKeySplitter(cube.getFirstSegment(), 11, 20);
         // base cuboid rowkey
-        byte[] input = { 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 11, 55, -13, 13, 22, 34, 121, 70, 80, 45, 71, 84, 67, 9, 9, 9,
-                9, 9, 9, 0, 10, 5 };
+        byte[] input = { 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 11, 55, -13, 13, 22, 34, 121, 70, 80, 45, 71, 84, 67, 9, 9, 9, 9, 9, 9, 0, 10, 5 };
         rowKeySplitter.split(input);
 
         assertEquals(10, rowKeySplitter.getBufferSize());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CombinationTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CombinationTest.java b/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CombinationTest.java
index 313fdea..a69f179 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CombinationTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CombinationTest.java
@@ -30,8 +30,7 @@ public class CombinationTest {
 
     public int findSmallerSibling(long valueBits, long valueMask) {
         if ((valueBits | valueMask) != valueMask) {
-            throw new IllegalArgumentException(
-                    "Dismatch " + Long.toBinaryString(valueBits) + " from " + Long.toBinaryString(valueMask));
+            throw new IllegalArgumentException("Dismatch " + Long.toBinaryString(valueBits) + " from " + Long.toBinaryString(valueMask));
         }
 
         int n = Long.bitCount(valueMask);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CuboidSchedulerTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CuboidSchedulerTest.java b/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CuboidSchedulerTest.java
index 0b80f3c..e2a71db 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CuboidSchedulerTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CuboidSchedulerTest.java
@@ -107,14 +107,12 @@ public class CuboidSchedulerTest extends LocalFileMetadataTestCase {
         return getCubeDescManager().getCubeDesc("ci_inner_join_cube");
     }
 
-    private void testSpanningAndGetParent(CuboidScheduler scheduler, CubeDesc cube, long[] cuboidIds,
-            long[] expectChildren) {
+    private void testSpanningAndGetParent(CuboidScheduler scheduler, CubeDesc cube, long[] cuboidIds, long[] expectChildren) {
         Set<Long> totalSpanning = Sets.newHashSet();
         for (long cuboidId : cuboidIds) {
             List<Long> spannings = scheduler.getSpanningCuboid(cuboidId);
             totalSpanning.addAll(spannings);
-            System.out.println("Spanning result for " + cuboidId + "(" + Long.toBinaryString(cuboidId) + "): "
-                    + toString(spannings));
+            System.out.println("Spanning result for " + cuboidId + "(" + Long.toBinaryString(cuboidId) + "): " + toString(spannings));
 
             for (long child : spannings) {
                 assertTrue(Cuboid.isValid(cube, child));
@@ -137,20 +135,16 @@ public class CuboidSchedulerTest extends LocalFileMetadataTestCase {
         testSpanningAndGetParent(scheduler, cube, new long[] { 511 }, new long[] { 504, 447, 503, 383 });
         // generate 7d
         System.out.println("Spanning for 7D Cuboids");
-        testSpanningAndGetParent(scheduler, cube, new long[] { 504, 447, 503, 383 },
-                new long[] { 440, 496, 376, 439, 487, 319, 375 });
+        testSpanningAndGetParent(scheduler, cube, new long[] { 504, 447, 503, 383 }, new long[] { 440, 496, 376, 439, 487, 319, 375 });
         // generate 6d
         System.out.println("Spanning for 6D Cuboids");
-        testSpanningAndGetParent(scheduler, cube, new long[] { 440, 496, 376, 439, 487, 319, 375 },
-                new long[] { 432, 480, 312, 368, 423, 455, 311, 359 });
+        testSpanningAndGetParent(scheduler, cube, new long[] { 440, 496, 376, 439, 487, 319, 375 }, new long[] { 432, 480, 312, 368, 423, 455, 311, 359 });
         // generate 5d
         System.out.println("Spanning for 5D Cuboids");
-        testSpanningAndGetParent(scheduler, cube, new long[] { 432, 480, 312, 368, 423, 455, 311, 359 },
-                new long[] { 416, 448, 304, 352, 391, 295, 327 });
+        testSpanningAndGetParent(scheduler, cube, new long[] { 432, 480, 312, 368, 423, 455, 311, 359 }, new long[] { 416, 448, 304, 352, 391, 295, 327 });
         // generate 4d
         System.out.println("Spanning for 4D Cuboids");
-        testSpanningAndGetParent(scheduler, cube, new long[] { 416, 448, 304, 352, 391, 295, 327 },
-                new long[] { 384, 288, 320, 263 });
+        testSpanningAndGetParent(scheduler, cube, new long[] { 416, 448, 304, 352, 391, 295, 327 }, new long[] { 384, 288, 320, 263 });
         // generate 3d
         System.out.println("Spanning for 3D Cuboids");
         testSpanningAndGetParent(scheduler, cube, new long[] { 384, 288, 320, 263 }, new long[0]);
@@ -169,12 +163,10 @@ public class CuboidSchedulerTest extends LocalFileMetadataTestCase {
         testSpanningAndGetParent(scheduler, cube, new long[] { 255 }, new long[] { 135, 251, 253, 254 });
         // generate 6d
         System.out.println("Spanning for 6D Cuboids");
-        testSpanningAndGetParent(scheduler, cube, new long[] { 135, 251, 253, 254 },
-                new long[] { 131, 133, 134, 249, 250, 252 });
+        testSpanningAndGetParent(scheduler, cube, new long[] { 135, 251, 253, 254 }, new long[] { 131, 133, 134, 249, 250, 252 });
         // generate 5d
         System.out.println("Spanning for 5D Cuboids");
-        testSpanningAndGetParent(scheduler, cube, new long[] { 131, 133, 134, 249, 250, 252 },
-                new long[] { 129, 130, 132, 248 });
+        testSpanningAndGetParent(scheduler, cube, new long[] { 131, 133, 134, 249, 250, 252 }, new long[] { 129, 130, 132, 248 });
         // generate 4d
         System.out.println("Spanning for 4D Cuboids");
         testSpanningAndGetParent(scheduler, cube, new long[] { 129, 130, 132, 248 }, new long[] { 184, 240 });
@@ -332,7 +324,7 @@ public class CuboidSchedulerTest extends LocalFileMetadataTestCase {
         System.out.println("build tree takes: " + (System.currentTimeMillis() - start) + "ms");
     }
 
-    @Test(expected = RuntimeException.class)
+    @Test(expected=RuntimeException.class)
     public void testTooManyCombination() {
         File twentyFile = new File(new File(LocalFileMetadataTestCase.LOCALMETA_TEMP_DATA, "cube_desc"), "twenty_dim");
         twentyFile.renameTo(new File(twentyFile.getPath().substring(0, twentyFile.getPath().length() - 4)));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStoreTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStoreTest.java b/core-cube/src/test/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStoreTest.java
index 7dab04b..0de1406 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStoreTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStoreTest.java
@@ -85,8 +85,7 @@ public class ConcurrentDiskStoreTest extends LocalFileMetadataTestCase {
             t[i] = new Thread() {
                 public void run() {
                     try {
-                        IGTScanner scanner = table.scan(new GTScanRequestBuilder().setInfo(table.getInfo())
-                                .setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest());
+                        IGTScanner scanner = table.scan(new GTScanRequestBuilder().setInfo(table.getInfo()).setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest());
                         int i = 0;
                         for (GTRecord r : scanner) {
                             assertEquals(data.get(i++), r);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/inmemcubing/MemDiskStoreTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/inmemcubing/MemDiskStoreTest.java b/core-cube/src/test/java/org/apache/kylin/cube/inmemcubing/MemDiskStoreTest.java
index dcaea54..a64fc21 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/inmemcubing/MemDiskStoreTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/inmemcubing/MemDiskStoreTest.java
@@ -101,8 +101,7 @@ public class MemDiskStoreTest extends LocalFileMetadataTestCase {
         }
         builder.close();
 
-        IGTScanner scanner = table.scan(new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null)
-                .setFilterPushDown(null).createGTScanRequest());
+        IGTScanner scanner = table.scan(new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest());
         int i = 0;
         for (GTRecord r : scanner) {
             assertEquals(data.get(i++), r);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/kv/RowKeyDecoderTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/kv/RowKeyDecoderTest.java b/core-cube/src/test/java/org/apache/kylin/cube/kv/RowKeyDecoderTest.java
index b931f69..1d1d147 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/kv/RowKeyDecoderTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/kv/RowKeyDecoderTest.java
@@ -54,13 +54,11 @@ public class RowKeyDecoderTest extends LocalFileMetadataTestCase {
 
         RowKeyDecoder rowKeyDecoder = new RowKeyDecoder(cube.getFirstSegment());
 
-        byte[] key = { 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 11, 55, -13, 13, 22, 34, 121, 70, 80, 45, 71, 84, 67, 9, 9, 9, 9,
-                9, 9, 0, 10, 5 };
+        byte[] key = { 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 11, 55, -13, 13, 22, 34, 121, 70, 80, 45, 71, 84, 67, 9, 9, 9, 9, 9, 9, 0, 10, 5 };
 
         rowKeyDecoder.decode(key);
         List<String> values = rowKeyDecoder.getValues();
-        assertEquals("[" + millis("2012-12-15") + ", 11848, Health & Beauty, Fragrances, Women, FP-GTC, 0, 15]",
-                values.toString());
+        assertEquals("[" + millis("2012-12-15") + ", 11848, Health & Beauty, Fragrances, Women, FP-GTC, 0, 15]", values.toString());
     }
 
     @Ignore
@@ -70,13 +68,11 @@ public class RowKeyDecoderTest extends LocalFileMetadataTestCase {
 
         RowKeyDecoder rowKeyDecoder = new RowKeyDecoder(cube.getFirstSegment());
 
-        byte[] key = { 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -104, -106, -128, 11, 54, -105, 50, 48, 50, 49, 51, 9, 9, 9, 9,
-                9, 9, 9, 9, 9, 9, 9, 9, 9, 13, 71, 114, 65, 66, 73, 78, 9, 9, 9, 9, 9, 9, 9, 9, 0, 10, 0 };
+        byte[] key = { 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -104, -106, -128, 11, 54, -105, 50, 48, 50, 49, 51, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 13, 71, 114, 65, 66, 73, 78, 9, 9, 9, 9, 9, 9, 9, 9, 0, 10, 0 };
 
         rowKeyDecoder.decode(key);
         List<String> values = rowKeyDecoder.getValues();
-        assertEquals("[10000000, " + millis("2012-01-02")
-                + ", 20213, Collectibles, Postcards, US StateCities & Towns, ABIN, 0, -99]", values.toString());
+        assertEquals("[10000000, " + millis("2012-01-02") + ", 20213, Collectibles, Postcards, US StateCities & Towns, ABIN, 0, -99]", values.toString());
     }
 
     @Test
@@ -104,8 +100,7 @@ public class RowKeyDecoderTest extends LocalFileMetadataTestCase {
         RowKeyDecoder rowKeyDecoder = new RowKeyDecoder(cube.getFirstSegment());
         rowKeyDecoder.decode(encodedKey);
         List<String> values = rowKeyDecoder.getValues();
-        assertEquals("[" + millis("2012-12-15") + ", 11848, Health & Beauty, Fragrances, Women, 刊登格式, 0, 15]",
-                values.toString());
+        assertEquals("[" + millis("2012-12-15") + ", 11848, Health & Beauty, Fragrances, Women, 刊登格式, 0, 15]", values.toString());
     }
 
     private String millis(String dateStr) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/kv/RowKeyEncoderTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/kv/RowKeyEncoderTest.java b/core-cube/src/test/java/org/apache/kylin/cube/kv/RowKeyEncoderTest.java
index b0c6c96..75e2458 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/kv/RowKeyEncoderTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/kv/RowKeyEncoderTest.java
@@ -71,12 +71,10 @@ public class RowKeyEncoderTest extends LocalFileMetadataTestCase {
 
         byte[] encodedKey = rowKeyEncoder.encode(data);
         assertEquals(22 + rowKeyEncoder.getHeaderLength(), encodedKey.length);
-        byte[] cuboidId = Arrays.copyOfRange(encodedKey, RowConstants.ROWKEY_SHARDID_LEN,
-                rowKeyEncoder.getHeaderLength());
+        byte[] cuboidId = Arrays.copyOfRange(encodedKey, RowConstants.ROWKEY_SHARDID_LEN, rowKeyEncoder.getHeaderLength());
         byte[] rest = Arrays.copyOfRange(encodedKey, rowKeyEncoder.getHeaderLength(), encodedKey.length);
         assertEquals(255, Bytes.toLong(cuboidId));
-        assertArrayEquals(
-                new byte[] { 11, 55, -13, 13, 22, 34, 121, 70, 80, 45, 71, 84, 67, 9, 9, 9, 9, 9, 9, 0, 10, 5 }, rest);
+        assertArrayEquals(new byte[] { 11, 55, -13, 13, 22, 34, 121, 70, 80, 45, 71, 84, 67, 9, 9, 9, 9, 9, 9, 0, 10, 5 }, rest);
     }
 
     @Ignore
@@ -106,16 +104,13 @@ public class RowKeyEncoderTest extends LocalFileMetadataTestCase {
         assertEquals(43 + rowKeyEncoder.getHeaderLength(), encodedKey.length);
         byte[] shard = Arrays.copyOfRange(encodedKey, 0, RowConstants.ROWKEY_SHARDID_LEN);
         @SuppressWarnings("unused")
-        byte[] sellerId = Arrays.copyOfRange(encodedKey, rowKeyEncoder.getHeaderLength(),
-                4 + rowKeyEncoder.getHeaderLength());
-        byte[] cuboidId = Arrays.copyOfRange(encodedKey, RowConstants.ROWKEY_SHARDID_LEN,
-                rowKeyEncoder.getHeaderLength());
+        byte[] sellerId = Arrays.copyOfRange(encodedKey, rowKeyEncoder.getHeaderLength(), 4 + rowKeyEncoder.getHeaderLength());
+        byte[] cuboidId = Arrays.copyOfRange(encodedKey, RowConstants.ROWKEY_SHARDID_LEN, rowKeyEncoder.getHeaderLength());
         byte[] rest = Arrays.copyOfRange(encodedKey, 4 + rowKeyEncoder.getHeaderLength(), encodedKey.length);
         assertEquals(0, Bytes.toShort(shard));
         //        assertTrue(Bytes.toString(sellerId).startsWith("123456789"));
         assertEquals(511, Bytes.toLong(cuboidId));
-        assertArrayEquals(new byte[] { 11, 55, -13, 49, 49, 56, 52, 56, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 22, 34,
-                121, 70, 80, 45, 71, 84, 67, 9, 9, 9, 9, 9, 9, 0, 10, 5 }, rest);
+        assertArrayEquals(new byte[] { 11, 55, -13, 49, 49, 56, 52, 56, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 22, 34, 121, 70, 80, 45, 71, 84, 67, 9, 9, 9, 9, 9, 9, 0, 10, 5 }, rest);
     }
 
     @Ignore
@@ -144,16 +139,13 @@ public class RowKeyEncoderTest extends LocalFileMetadataTestCase {
         byte[] encodedKey = rowKeyEncoder.encode(data);
         assertEquals(43 + rowKeyEncoder.getHeaderLength(), encodedKey.length);
         byte[] shard = Arrays.copyOfRange(encodedKey, 0, RowConstants.ROWKEY_SHARDID_LEN);
-        byte[] cuboidId = Arrays.copyOfRange(encodedKey, RowConstants.ROWKEY_SHARDID_LEN,
-                rowKeyEncoder.getHeaderLength());
+        byte[] cuboidId = Arrays.copyOfRange(encodedKey, RowConstants.ROWKEY_SHARDID_LEN, rowKeyEncoder.getHeaderLength());
         @SuppressWarnings("unused")
-        byte[] sellerId = Arrays.copyOfRange(encodedKey, rowKeyEncoder.getHeaderLength(),
-                18 + rowKeyEncoder.getHeaderLength());
+        byte[] sellerId = Arrays.copyOfRange(encodedKey, rowKeyEncoder.getHeaderLength(), 18 + rowKeyEncoder.getHeaderLength());
         byte[] rest = Arrays.copyOfRange(encodedKey, 4 + rowKeyEncoder.getHeaderLength(), encodedKey.length);
         assertEquals(0, Bytes.toShort(shard));
         //assertTrue(Bytes.toString(sellerId).startsWith("123456789"));
         assertEquals(511, Bytes.toLong(cuboidId));
-        assertArrayEquals(new byte[] { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-                -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }, rest);
+        assertArrayEquals(new byte[] { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }, rest);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/DictionaryRuleTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/DictionaryRuleTest.java b/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/DictionaryRuleTest.java
index 9cfccb3..0dd9b76 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/DictionaryRuleTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/DictionaryRuleTest.java
@@ -30,6 +30,7 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.util.List;
 
+import com.google.common.collect.Lists;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -41,8 +42,6 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.collect.Lists;
-
 public class DictionaryRuleTest extends LocalFileMetadataTestCase {
     private static KylinConfig config;
 
@@ -75,16 +74,13 @@ public class DictionaryRuleTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testBadDesc() throws IOException {
-        testDictionaryDesc(ERROR_DUPLICATE_DICTIONARY_COLUMN,
-                DictionaryDesc.create("ORDER_ID", null, "FakeBuilderClass"));
-        testDictionaryDesc(ERROR_DUPLICATE_DICTIONARY_COLUMN,
-                DictionaryDesc.create("ORDER_ID", null, GlobalDictionaryBuilder.class.getName()));
+        testDictionaryDesc(ERROR_DUPLICATE_DICTIONARY_COLUMN, DictionaryDesc.create("ORDER_ID", null, "FakeBuilderClass"));
+        testDictionaryDesc(ERROR_DUPLICATE_DICTIONARY_COLUMN, DictionaryDesc.create("ORDER_ID", null, GlobalDictionaryBuilder.class.getName()));
     }
 
     @Test
     public void testBadDesc2() throws IOException {
-        testDictionaryDesc(ERROR_REUSE_BUILDER_BOTH_SET,
-                DictionaryDesc.create("lstg_site_id", "SITE_NAME", "FakeBuilderClass"));
+        testDictionaryDesc(ERROR_REUSE_BUILDER_BOTH_SET, DictionaryDesc.create("lstg_site_id", "SITE_NAME", "FakeBuilderClass"));
     }
 
     @Test
@@ -94,7 +90,8 @@ public class DictionaryRuleTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testBadDesc4() throws IOException {
-        testDictionaryDesc(ERROR_TRANSITIVE_REUSE, DictionaryDesc.create("lstg_site_id", "SELLER_ID", null),
+        testDictionaryDesc(ERROR_TRANSITIVE_REUSE,
+                DictionaryDesc.create("lstg_site_id", "SELLER_ID", null),
                 DictionaryDesc.create("price", "lstg_site_id", null));
     }
 
@@ -111,8 +108,7 @@ public class DictionaryRuleTest extends LocalFileMetadataTestCase {
 
     private void testDictionaryDesc(String expectMessage, DictionaryDesc... descs) throws IOException {
         DictionaryRule rule = new DictionaryRule();
-        File f = new File(LocalFileMetadataTestCase.LOCALMETA_TEST_DATA
-                + "/cube_desc/test_kylin_cube_without_slr_left_join_desc.json");
+        File f = new File(LocalFileMetadataTestCase.LOCALMETA_TEST_DATA + "/cube_desc/test_kylin_cube_without_slr_left_join_desc.json");
         CubeDesc desc = JsonUtil.readValue(new FileInputStream(f), CubeDesc.class);
 
         List<DictionaryDesc> newDicts = Lists.newArrayList(desc.getDictionaries());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/FunctionRuleTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/FunctionRuleTest.java b/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/FunctionRuleTest.java
index c9139fd..5368e16 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/FunctionRuleTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/FunctionRuleTest.java
@@ -25,6 +25,7 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.util.List;
 
+import com.google.common.collect.Lists;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -35,8 +36,6 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.collect.Lists;
-
 public class FunctionRuleTest extends LocalFileMetadataTestCase {
     private static KylinConfig config;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/test/java/org/apache/kylin/cube/project/ProjectManagerTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/project/ProjectManagerTest.java b/core-cube/src/test/java/org/apache/kylin/cube/project/ProjectManagerTest.java
index 891dc80..a0fb4a3 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/project/ProjectManagerTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/project/ProjectManagerTest.java
@@ -88,8 +88,7 @@ public class ProjectManagerTest extends LocalFileMetadataTestCase {
         System.out.println(JsonUtil.writeValueAsIndentString(createdCube));
 
         assertTrue(prjMgr.listAllProjects().size() == originalProjectCount + 1);
-        assertTrue(prjMgr.listAllRealizations("ALIEN").iterator().next().getName()
-                .equalsIgnoreCase("CUBE_IN_ALIEN_PROJECT"));
+        assertTrue(prjMgr.listAllRealizations("ALIEN").iterator().next().getName().equalsIgnoreCase("CUBE_IN_ALIEN_PROJECT"));
         assertTrue(cubeMgr.listAllCubes().size() == originalCubeCount + 1);
 
         prjMgr.moveRealizationToProject(RealizationType.CUBE, "cube_in_alien_project", "default", null);
@@ -127,8 +126,7 @@ public class ProjectManagerTest extends LocalFileMetadataTestCase {
         store.deleteResource("/cube/new_cube_in_default.json");
 
         CubeDesc desc = cubeDescMgr.getCubeDesc("test_kylin_cube_with_slr_desc");
-        CubeInstance createdCube = cubeMgr.createCube("new_cube_in_default", ProjectInstance.DEFAULT_PROJECT_NAME, desc,
-                null);
+        CubeInstance createdCube = cubeMgr.createCube("new_cube_in_default", ProjectInstance.DEFAULT_PROJECT_NAME, desc, null);
         assertTrue(createdCube == cubeMgr.getCube("new_cube_in_default"));
 
         System.out.println(JsonUtil.writeValueAsIndentString(createdCube));


[32/67] [abbrv] kylin git commit: Broadercaster should allow dynamic rest server list

Posted by li...@apache.org.
Broadercaster should allow dynamic rest server list


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/3fbf90ae
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/3fbf90ae
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/3fbf90ae

Branch: refs/heads/master
Commit: 3fbf90aed1bd78c1f44f9fd1f37fc34ffa704762
Parents: eafbe73
Author: shaofengshi <sh...@apache.org>
Authored: Fri May 26 17:55:36 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Sat May 27 16:20:07 2017 +0800

----------------------------------------------------------------------
 .../kylin/common/restclient/RestClient.java     | 11 +++++++++-
 .../kylin/metadata/cachesync/Broadcaster.java   | 23 ++++++++++++--------
 2 files changed, 24 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/3fbf90ae/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java b/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
index 33a4e7a..fc34a6b 100644
--- a/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
+++ b/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
@@ -33,6 +33,7 @@ import org.apache.http.HttpResponse;
 import org.apache.http.auth.AuthScope;
 import org.apache.http.auth.UsernamePasswordCredentials;
 import org.apache.http.client.CredentialsProvider;
+import org.apache.http.client.HttpClient;
 import org.apache.http.client.methods.HttpGet;
 import org.apache.http.client.methods.HttpPost;
 import org.apache.http.client.methods.HttpPut;
@@ -65,6 +66,10 @@ public class RestClient {
     private static final int HTTP_CONNECTION_TIMEOUT_MS = 30000;
     private static final int HTTP_SOCKET_TIMEOUT_MS = 120000;
 
+    public static final String SCHEME_HTTP = "http://";
+
+    public static final String KYLIN_API_PATH = "/kylin/api";
+
     public static boolean matchFullRestPattern(String uri) {
         Matcher m = fullRestPattern.matcher(uri);
         return m.matches();
@@ -97,7 +102,7 @@ public class RestClient {
         this.port = port;
         this.userName = userName;
         this.password = password;
-        this.baseUrl = "http://" + host + ":" + port + "/kylin/api";
+        this.baseUrl = SCHEME_HTTP + host + ":" + port + KYLIN_API_PATH;
 
         final HttpParams httpParams = new BasicHttpParams();
         HttpConnectionParams.setSoTimeout(httpParams, HTTP_SOCKET_TIMEOUT_MS);
@@ -114,6 +119,10 @@ public class RestClient {
     }
 
     public void wipeCache(String entity, String event, String cacheKey) throws IOException {
+        wipeCache(client, baseUrl, entity, event, cacheKey);
+    }
+
+    public static void wipeCache(HttpClient client, String baseUrl, String entity, String event, String cacheKey) throws IOException {
         String url = baseUrl + "/cache/" + entity + "/" + cacheKey + "/" + event;
         HttpPut request = new HttpPut(url);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/3fbf90ae/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java b/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
index 1394f7b..35d2f42 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
@@ -32,6 +32,11 @@ import java.util.concurrent.LinkedBlockingDeque;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.http.client.HttpClient;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.params.BasicHttpParams;
+import org.apache.http.params.HttpConnectionParams;
+import org.apache.http.params.HttpParams;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.restclient.RestClient;
 import org.apache.kylin.common.util.DaemonThreadFactory;
@@ -104,29 +109,29 @@ public class Broadcaster {
         final String[] nodes = config.getRestServers();
         if (nodes == null || nodes.length < 1) {
             logger.warn("There is no available rest server; check the 'kylin.server.cluster-servers' config");
-            broadcastEvents = null; // disable the broadcaster
-            return;
         }
         logger.debug(nodes.length + " nodes in the cluster: " + Arrays.toString(nodes));
 
         Executors.newSingleThreadExecutor(new DaemonThreadFactory()).execute(new Runnable() {
             @Override
             public void run() {
-                final List<RestClient> restClients = Lists.newArrayList();
-                for (String node : config.getRestServers()) {
-                    restClients.add(new RestClient(node));
-                }
-                final ExecutorService wipingCachePool = Executors.newFixedThreadPool(restClients.size(), new DaemonThreadFactory());
+                final HttpParams httpParams = new BasicHttpParams();
+                HttpConnectionParams.setConnectionTimeout(httpParams, 3000);
+
+                final HttpClient client = new DefaultHttpClient(httpParams);
+
+                final ExecutorService wipingCachePool = Executors.newFixedThreadPool(3, new DaemonThreadFactory());
                 while (true) {
                     try {
                         final BroadcastEvent broadcastEvent = broadcastEvents.takeFirst();
+                        logger.debug("Servers in the cluster: " + Arrays.toString(config.getRestServers()));
                         logger.info("Announcing new broadcast event: " + broadcastEvent);
-                        for (final RestClient restClient : restClients) {
+                        for (final String address : config.getRestServers()) {
                             wipingCachePool.execute(new Runnable() {
                                 @Override
                                 public void run() {
                                     try {
-                                        restClient.wipeCache(broadcastEvent.getEntity(), broadcastEvent.getEvent(), broadcastEvent.getCacheKey());
+                                        RestClient.wipeCache(client, RestClient.SCHEME_HTTP + address + RestClient.KYLIN_API_PATH,  broadcastEvent.getEntity(), broadcastEvent.getEvent(), broadcastEvent.getCacheKey());
                                     } catch (IOException e) {
                                         logger.warn("Thread failed during wipe cache at " + broadcastEvent, e);
                                     }


[42/67] [abbrv] kylin git commit: KYLIN-2515 Disable ad-hoc query for default.

Posted by li...@apache.org.
KYLIN-2515 Disable ad-hoc query for default.


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/1008bd2c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/1008bd2c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/1008bd2c

Branch: refs/heads/master
Commit: 1008bd2cd6ca8d1412b6c68bcfa4cb438abf23ec
Parents: f3b4085
Author: nichunen <ch...@kyligence.io>
Authored: Sat May 27 21:08:19 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Sat May 27 22:25:23 2017 +0800

----------------------------------------------------------------------
 examples/test_case_data/sandbox/kylin.properties | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/1008bd2c/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index 6a4f785..8caebc2 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -189,11 +189,11 @@ kylin.engine.spark-conf.spark.executor.extraJavaOptions=-Dhdp.version=current
 ### AD-HOC QUERY ###
 #kylin.query.ad-hoc.runner.class-name=org.apache.kylin.rest.adhoc.AdHocRunnerJdbcImpl
 
-kylin.query.ad-hoc.jdbc.url=jdbc:hive2://sandbox:10000/default
-kylin.query.ad-hoc.jdbc.driver=org.apache.hive.jdbc.HiveDriver
-kylin.query.ad-hoc.jdbc.username=hive
-kylin.query.ad-hoc.jdbc.password=
-
-kylin.query.ad-hoc.pool.max-total=8
-kylin.query.ad-hoc.pool.max-idle=8
-kylin.query.ad-hoc.pool.min-idle=0
+#kylin.query.ad-hoc.jdbc.url=jdbc:hive2://sandbox:10000/default
+#kylin.query.ad-hoc.jdbc.driver=org.apache.hive.jdbc.HiveDriver
+#kylin.query.ad-hoc.jdbc.username=hive
+#kylin.query.ad-hoc.jdbc.password=
+
+#kylin.query.ad-hoc.pool.max-total=8
+#kylin.query.ad-hoc.pool.max-idle=8
+#kylin.query.ad-hoc.pool.min-idle=0


[44/67] [abbrv] kylin git commit: KYLIN-2632 Refactor error msg

Posted by li...@apache.org.
KYLIN-2632 Refactor error msg


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/99f08a97
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/99f08a97
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/99f08a97

Branch: refs/heads/master
Commit: 99f08a97a70b207a16a05671f5f611e3f31623b4
Parents: 1008bd2
Author: Luwei-Chen <ch...@apache.org>
Authored: Sat May 27 20:45:10 2017 +0800
Committer: liyang-gmt8 <li...@apache.org>
Committed: Sat May 27 22:39:07 2017 +0800

----------------------------------------------------------------------
 .../rest/controller2/AccessControllerV2.java    |  29 +-
 .../rest/controller2/AdminControllerV2.java     |  31 +-
 .../rest/controller2/CacheControllerV2.java     |  22 +-
 .../rest/controller2/CubeControllerV2.java      | 183 +++++----
 .../rest/controller2/CubeDescControllerV2.java  |  13 +-
 .../rest/controller2/DiagnosisControllerV2.java |  25 +-
 .../rest/controller2/EncodingControllerV2.java  |  10 +-
 .../controller2/ExternalFilterControllerV2.java |  24 +-
 .../rest/controller2/HybridControllerV2.java    |  27 +-
 .../kylin/rest/controller2/JobControllerV2.java |  54 ++-
 .../rest/controller2/ModelControllerV2.java     |  55 +--
 .../rest/controller2/ModelDescControllerV2.java |   7 +-
 .../rest/controller2/ProjectControllerV2.java   |  32 +-
 .../rest/controller2/QueryControllerV2.java     |  54 +--
 .../rest/controller2/StreamingControllerV2.java |  41 +-
 .../rest/controller2/TableControllerV2.java     |  51 +--
 .../rest/controller2/UserControllerV2.java      |  20 +-
 .../org/apache/kylin/rest/msg/CnMessage.java    | 378 +++++++++++++++++-
 .../java/org/apache/kylin/rest/msg/Message.java | 381 +++++--------------
 19 files changed, 840 insertions(+), 597 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/AccessControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/AccessControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/AccessControllerV2.java
index 3258de9..255e312 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/AccessControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/AccessControllerV2.java
@@ -22,7 +22,6 @@ import java.io.IOException;
 
 import org.apache.kylin.common.persistence.AclEntity;
 import org.apache.kylin.rest.controller.BasicController;
-import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.request.AccessRequest;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.response.ResponseCode;
@@ -36,7 +35,6 @@ import org.springframework.security.acls.model.Sid;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.ResponseBody;
@@ -61,10 +59,10 @@ public class AccessControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getAccessEntitiesV2(@RequestHeader("Accept-Language") String lang, @PathVariable String type, @PathVariable String uuid) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getAccessEntitiesV2(@PathVariable String type, @PathVariable String uuid) {
 
         AclEntity ae = accessService.getAclEntity(type, uuid);
         Acl acl = accessService.getAcl(ae);
@@ -77,10 +75,11 @@ public class AccessControllerV2 extends BasicController {
      * @param accessRequest
      */
 
-    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.POST }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse grantV2(@RequestHeader("Accept-Language") String lang, @PathVariable String type, @PathVariable String uuid, @RequestBody AccessRequest accessRequest) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse grantV2(@PathVariable String type, @PathVariable String uuid,
+            @RequestBody AccessRequest accessRequest) {
 
         AclEntity ae = accessService.getAclEntity(type, uuid);
         Sid sid = accessService.getSid(accessRequest.getSid(), accessRequest.isPrincipal());
@@ -96,10 +95,11 @@ public class AccessControllerV2 extends BasicController {
      * @param accessRequest
      */
 
-    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateV2(@RequestHeader("Accept-Language") String lang, @PathVariable String type, @PathVariable String uuid, @RequestBody AccessRequest accessRequest) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse updateV2(@PathVariable String type, @PathVariable String uuid,
+            @RequestBody AccessRequest accessRequest) {
 
         AclEntity ae = accessService.getAclEntity(type, uuid);
         Permission permission = AclPermissionFactory.getPermission(accessRequest.getPermission());
@@ -114,10 +114,11 @@ public class AccessControllerV2 extends BasicController {
      * @param accessRequest
      */
 
-    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.DELETE }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse revokeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String type, @PathVariable String uuid, AccessRequest accessRequest) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse revokeV2(@PathVariable String type, @PathVariable String uuid,
+            AccessRequest accessRequest) {
 
         AclEntity ae = accessService.getAclEntity(type, uuid);
         Acl acl = accessService.revoke(ae, accessRequest.getAccessEntryId());

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/AdminControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/AdminControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/AdminControllerV2.java
index 01176d0..a1028d7 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/AdminControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/AdminControllerV2.java
@@ -23,7 +23,6 @@ import java.io.IOException;
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.controller.BasicController;
-import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.request.MetricsRequest;
 import org.apache.kylin.rest.request.UpdateConfigRequest;
 import org.apache.kylin.rest.response.EnvelopeResponse;
@@ -34,7 +33,6 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.ResponseBody;
@@ -57,40 +55,41 @@ public class AdminControllerV2 extends BasicController {
     @Qualifier("cubeMgmtService")
     private CubeService cubeMgmtService;
 
-    @RequestMapping(value = "/env", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/env", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getEnvV2(@RequestHeader("Accept-Language") String lang) throws ConfigurationException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getEnvV2() throws ConfigurationException {
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, adminService.getEnv(), "");
     }
 
-    @RequestMapping(value = "/config", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/config", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getConfigV2(@RequestHeader("Accept-Language") String lang) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getConfigV2() throws IOException {
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, adminService.getConfigAsString(), "");
     }
 
-    @RequestMapping(value = "/metrics/cubes", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/metrics/cubes", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse cubeMetricsV2(@RequestHeader("Accept-Language") String lang, MetricsRequest request) {
+    public EnvelopeResponse cubeMetricsV2(MetricsRequest request) {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeMgmtService.calculateMetrics(request), "");
     }
 
-    @RequestMapping(value = "/storage", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/storage", method = { RequestMethod.DELETE }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void cleanupStorageV2(@RequestHeader("Accept-Language") String lang) {
-        MsgPicker.setMsg(lang);
+    public void cleanupStorageV2() {
 
         adminService.cleanupStorage();
     }
 
-    @RequestMapping(value = "/config", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/config", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void updateKylinConfigV2(@RequestHeader("Accept-Language") String lang, @RequestBody UpdateConfigRequest updateConfigRequest) {
-        MsgPicker.setMsg(lang);
+    public void updateKylinConfigV2(@RequestBody UpdateConfigRequest updateConfigRequest) {
 
         KylinConfig.getInstanceFromEnv().setProperty(updateConfigRequest.getKey(), updateConfigRequest.getValue());
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/CacheControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/CacheControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/CacheControllerV2.java
index 3cd5abd..fa786b6 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/CacheControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/CacheControllerV2.java
@@ -23,7 +23,6 @@ import java.io.IOException;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metadata.cachesync.Broadcaster;
 import org.apache.kylin.rest.controller.BasicController;
-import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.service.CacheService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -31,7 +30,6 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.ResponseBody;
@@ -56,10 +54,11 @@ public class CacheControllerV2 extends BasicController {
      * Announce wipe cache to all cluster nodes
      */
 
-    @RequestMapping(value = "/announce/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/announce/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void announceWipeCacheV2(@RequestHeader("Accept-Language") String lang, @PathVariable String entity, @PathVariable String event, @PathVariable String cacheKey) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void announceWipeCacheV2(@PathVariable String entity, @PathVariable String event,
+            @PathVariable String cacheKey) throws IOException {
 
         cacheService.annouceWipeCache(entity, event, cacheKey);
     }
@@ -68,18 +67,19 @@ public class CacheControllerV2 extends BasicController {
      * Wipe cache on this node
      */
 
-    @RequestMapping(value = "/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void wipeCacheV2(@RequestHeader("Accept-Language") String lang, @PathVariable String entity, @PathVariable String event, @PathVariable String cacheKey) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void wipeCacheV2(@PathVariable String entity, @PathVariable String event, @PathVariable String cacheKey)
+            throws IOException {
 
         cacheService.notifyMetadataChange(entity, Broadcaster.Event.getEvent(event), cacheKey);
     }
 
-    @RequestMapping(value = "/announce/config", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/announce/config", method = { RequestMethod.POST }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void hotLoadKylinConfigV2(@RequestHeader("Accept-Language") String lang) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void hotLoadKylinConfigV2() throws IOException {
 
         KylinConfig.getInstanceFromEnv().hotLoadKylinProperties();
         cacheService.notifyMetadataChange(Broadcaster.SYNC_ALL, Broadcaster.Event.UPDATE, Broadcaster.SYNC_ALL);

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
index 386aad2..8179bc8 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
@@ -69,7 +69,6 @@ import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
@@ -87,7 +86,8 @@ import com.google.common.collect.Lists;
 public class CubeControllerV2 extends BasicController {
     private static final Logger logger = LoggerFactory.getLogger(CubeControllerV2.class);
 
-    public static final char[] VALID_CUBENAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_".toCharArray();
+    public static final char[] VALID_CUBENAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_"
+            .toCharArray();
 
     @Autowired
     @Qualifier("cubeMgmtService")
@@ -107,8 +107,11 @@ public class CubeControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getCubesPaging(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "cubeName", required = false) String cubeName, @RequestParam(value = "modelName", required = false) String modelName, @RequestParam(value = "projectName", required = false) String projectName, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getCubesPaging(@RequestParam(value = "cubeName", required = false) String cubeName,
+            @RequestParam(value = "modelName", required = false) String modelName,
+            @RequestParam(value = "projectName", required = false) String projectName,
+            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
+            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
 
         HashMap<String, Object> data = new HashMap<String, Object>();
         List<CubeInstanceResponse> cubeInstanceResponses = new ArrayList<CubeInstanceResponse>();
@@ -136,7 +139,8 @@ public class CubeControllerV2 extends BasicController {
             DataModelDesc getModel = modelService.getMetadataManager().getDataModelDesc(getModelName);
             cubeInstanceResponse.setPartitionDateColumn(getModel.getPartitionDesc().getPartitionDateColumn());
 
-            cubeInstanceResponse.setIs_streaming(getModel.getRootFactTable().getTableDesc().getSourceType() == ISourceAware.ID_STREAMING);
+            cubeInstanceResponse.setIs_streaming(
+                    getModel.getRootFactTable().getTableDesc().getSourceType() == ISourceAware.ID_STREAMING);
 
             if (projectName != null)
                 cubeInstanceResponse.setProject(projectName);
@@ -158,19 +162,19 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }
 
-    @RequestMapping(value = "validEncodings", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "validEncodings", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getValidEncodingsV2(@RequestHeader("Accept-Language") String lang) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getValidEncodingsV2() {
 
         Map<String, Integer> encodings = DimensionEncodingFactory.getValidEncodings();
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, encodings, "");
     }
 
-    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getCubeV2(@PathVariable String cubeName) {
         Message msg = MsgPicker.getMsg();
 
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -187,7 +191,8 @@ public class CubeControllerV2 extends BasicController {
         DataModelDesc model = modelService.getMetadataManager().getDataModelDesc(modelName);
         cubeInstanceResponse.setPartitionDateColumn(model.getPartitionDesc().getPartitionDateColumn());
 
-        cubeInstanceResponse.setIs_streaming(model.getRootFactTable().getTableDesc().getSourceType() == ISourceAware.ID_STREAMING);
+        cubeInstanceResponse
+                .setIs_streaming(model.getRootFactTable().getTableDesc().getSourceType() == ISourceAware.ID_STREAMING);
 
         List<ProjectInstance> projectInstances = projectService.listProjects(null, null);
         for (ProjectInstance projectInstance : projectInstances) {
@@ -207,10 +212,10 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/sql", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/sql", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getSqlV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getSqlV2(@PathVariable String cubeName) {
         Message msg = MsgPicker.getMsg();
 
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -233,10 +238,11 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/notify_list", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/notify_list", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void updateNotifyListV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody List<String> notifyList) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void updateNotifyListV2(@PathVariable String cubeName, @RequestBody List<String> notifyList)
+            throws IOException {
         Message msg = MsgPicker.getMsg();
 
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -249,10 +255,11 @@ public class CubeControllerV2 extends BasicController {
 
     }
 
-    @RequestMapping(value = "/{cubeName}/cost", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/cost", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateCubeCostV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody Integer cost) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse updateCubeCostV2(@PathVariable String cubeName, @RequestBody Integer cost)
+            throws IOException {
         Message msg = MsgPicker.getMsg();
 
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -268,10 +275,11 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/segs/{segmentName}/refresh_lookup", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/segs/{segmentName}/refresh_lookup", method = {
+            RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse rebuildLookupSnapshotV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @PathVariable String segmentName, @RequestBody String lookupTable) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse rebuildLookupSnapshotV2(@PathVariable String cubeName, @PathVariable String segmentName,
+            @RequestBody String lookupTable) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         final CubeManager cubeMgr = cubeService.getCubeManager();
@@ -279,7 +287,8 @@ public class CubeControllerV2 extends BasicController {
         if (cube == null) {
             throw new BadRequestException(String.format(msg.getCUBE_NOT_FOUND(), cubeName));
         }
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.rebuildLookupSnapshot(cube, segmentName, lookupTable), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
+                cubeService.rebuildLookupSnapshot(cube, segmentName, lookupTable), "");
     }
 
     /**
@@ -288,10 +297,11 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/segs/{segmentName}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/segs/{segmentName}", method = { RequestMethod.DELETE }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse deleteSegmentV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @PathVariable String segmentName) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse deleteSegmentV2(@PathVariable String cubeName, @PathVariable String segmentName)
+            throws IOException {
         Message msg = MsgPicker.getMsg();
 
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -310,28 +320,35 @@ public class CubeControllerV2 extends BasicController {
     /** Build/Rebuild a cube segment */
 
     /** Build/Rebuild a cube segment */
-    @RequestMapping(value = "/{cubeName}/build", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/build", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse buildV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody JobBuildRequest req) throws IOException {
-        return rebuildV2(lang, cubeName, req);
+    public EnvelopeResponse buildV2(@PathVariable String cubeName, @RequestBody JobBuildRequest req)
+            throws IOException {
+        return rebuildV2(cubeName, req);
     }
 
     /** Build/Rebuild a cube segment */
 
-    @RequestMapping(value = "/{cubeName}/rebuild", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/rebuild", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse rebuildV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody JobBuildRequest req) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse rebuildV2(@PathVariable String cubeName, @RequestBody JobBuildRequest req)
+            throws IOException {
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, buildInternalV2(cubeName, req.getStartTime(), req.getEndTime(), 0, 0, null, null, req.getBuildType(), req.isForce() || req.isForceMergeEmptySegment()), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
+                buildInternalV2(cubeName, req.getStartTime(), req.getEndTime(), 0, 0, null, null, req.getBuildType(),
+                        req.isForce() || req.isForceMergeEmptySegment()),
+                "");
     }
 
     /** Build/Rebuild a cube segment by source offset */
 
-    @RequestMapping(value = "/{cubeName}/build_streaming", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/build_streaming", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse build2V2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody JobBuildRequest2 req) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse build2V2(@PathVariable String cubeName, @RequestBody JobBuildRequest2 req)
+            throws IOException {
         Message msg = MsgPicker.getMsg();
 
         boolean existKafkaClient = false;
@@ -346,20 +363,26 @@ public class CubeControllerV2 extends BasicController {
         if (!existKafkaClient) {
             throw new BadRequestException(msg.getKAFKA_DEP_NOT_FOUND());
         }
-        return rebuild2V2(lang, cubeName, req);
+        return rebuild2V2(cubeName, req);
     }
 
     /** Build/Rebuild a cube segment by source offset */
-    @RequestMapping(value = "/{cubeName}/rebuild_streaming", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/rebuild_streaming", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse rebuild2V2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody JobBuildRequest2 req) throws IOException {
-        MsgPicker.setMsg(lang);
-
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, buildInternalV2(cubeName, 0, 0, req.getSourceOffsetStart(), req.getSourceOffsetEnd(), req.getSourcePartitionOffsetStart(), req.getSourcePartitionOffsetEnd(), req.getBuildType(), req.isForce()), "");
+    public EnvelopeResponse rebuild2V2(@PathVariable String cubeName, @RequestBody JobBuildRequest2 req)
+            throws IOException {
+
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
+                buildInternalV2(cubeName, 0, 0, req.getSourceOffsetStart(), req.getSourceOffsetEnd(),
+                        req.getSourcePartitionOffsetStart(), req.getSourcePartitionOffsetEnd(), req.getBuildType(),
+                        req.isForce()),
+                "");
     }
 
     private JobInstance buildInternalV2(String cubeName, long startTime, long endTime, //
-            long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd, String buildType, boolean force) throws IOException {
+            long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart,
+            Map<Integer, Long> sourcePartitionOffsetEnd, String buildType, boolean force) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
@@ -372,13 +395,14 @@ public class CubeControllerV2 extends BasicController {
             throw new BadRequestException(msg.getBUILD_DRAFT_CUBE());
         }
         return jobService.submitJob(cube, startTime, endTime, startOffset, endOffset, //
-                sourcePartitionOffsetStart, sourcePartitionOffsetEnd, CubeBuildTypeEnum.valueOf(buildType), force, submitter);
+                sourcePartitionOffsetStart, sourcePartitionOffsetEnd, CubeBuildTypeEnum.valueOf(buildType), force,
+                submitter);
     }
 
-    @RequestMapping(value = "/{cubeName}/disable", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/disable", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse disableCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse disableCubeV2(@PathVariable String cubeName) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -391,10 +415,10 @@ public class CubeControllerV2 extends BasicController {
 
     }
 
-    @RequestMapping(value = "/{cubeName}/purge", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/purge", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse purgeCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse purgeCubeV2(@PathVariable String cubeName) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -405,10 +429,11 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.purgeCube(cube), "");
     }
 
-    @RequestMapping(value = "/{cubeName}/clone", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/clone", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse cloneCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName, @RequestBody CubeRequest cubeRequest) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse cloneCubeV2(@PathVariable String cubeName, @RequestBody CubeRequest cubeRequest)
+            throws IOException {
         Message msg = MsgPicker.getMsg();
 
         String newCubeName = cubeRequest.getCubeName();
@@ -440,10 +465,10 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, newCube, "");
     }
 
-    @RequestMapping(value = "/{cubeName}/enable", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/enable", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse enableCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse enableCubeV2(@PathVariable String cubeName) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -454,10 +479,10 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.enableCube(cube), "");
     }
 
-    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.DELETE }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void deleteCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void deleteCubeV2(@PathVariable String cubeName) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
@@ -477,10 +502,10 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/hbase", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/hbase", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getHBaseInfoV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getHBaseInfoV2(@PathVariable String cubeName) {
         Message msg = MsgPicker.getMsg();
 
         List<HBaseResponse> hbase = new ArrayList<HBaseResponse>();
@@ -531,13 +556,14 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getHolesV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getHolesV2(@PathVariable String cubeName) {
 
         checkCubeNameV2(cubeName);
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.getCubeManager().calculateHoles(cubeName), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.getCubeManager().calculateHoles(cubeName),
+                "");
     }
 
     /**
@@ -547,10 +573,10 @@ public class CubeControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/holes", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse fillHolesV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse fillHolesV2(@PathVariable String cubeName) {
 
         checkCubeNameV2(cubeName);
 
@@ -572,7 +598,7 @@ public class CubeControllerV2 extends BasicController {
                 request.setSourceOffsetEnd(hole.getSourceOffsetEnd());
                 request.setSourcePartitionOffsetEnd(hole.getSourcePartitionOffsetEnd());
                 try {
-                    JobInstance job = (JobInstance) build2V2(lang, cubeName, request).data;
+                    JobInstance job = (JobInstance) build2V2(cubeName, request).data;
                     jobs.add(job);
                 } catch (Exception e) {
                     // it may exceed the max allowed job number
@@ -586,7 +612,7 @@ public class CubeControllerV2 extends BasicController {
                 request.setEndTime(hole.getDateRangeEnd());
 
                 try {
-                    JobInstance job = (JobInstance) buildV2(lang, cubeName, request).data;
+                    JobInstance job = (JobInstance) buildV2(cubeName, request).data;
                     jobs.add(job);
                 } catch (Exception e) {
                     // it may exceed the max allowed job number
@@ -606,10 +632,10 @@ public class CubeControllerV2 extends BasicController {
      * @return
      */
 
-    @RequestMapping(value = "/{cubeName}/init_start_offsets", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/init_start_offsets", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse initStartOffsetsV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse initStartOffsetsV2(@PathVariable String cubeName) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         checkCubeNameV2(cubeName);
@@ -629,9 +655,10 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, response, "");
     }
 
-    @RequestMapping(value = "/checkNameAvailability/{cubeName}", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/checkNameAvailability/{cubeName}", method = RequestMethod.GET, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse checkNameAvailabilityV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
+    public EnvelopeResponse checkNameAvailabilityV2(@PathVariable String cubeName) {
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.checkNameAvailability(cubeName), "");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeDescControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeDescControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeDescControllerV2.java
index da429f5..fb90790 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeDescControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeDescControllerV2.java
@@ -34,7 +34,6 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.ResponseBody;
@@ -60,10 +59,10 @@ public class CubeDescControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getCubeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getCubeV2(@PathVariable String cubeName) {
         Message msg = MsgPicker.getMsg();
 
         CubeInstance cubeInstance = cubeService.getCubeManager().getCube(cubeName);
@@ -88,10 +87,10 @@ public class CubeDescControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{cubeName}/desc", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{cubeName}/desc", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getDescV2(@RequestHeader("Accept-Language") String lang, @PathVariable String cubeName) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getDescV2(@PathVariable String cubeName) {
         Message msg = MsgPicker.getMsg();
 
         HashMap<String, CubeDesc> data = new HashMap<String, CubeDesc>();

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/DiagnosisControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/DiagnosisControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/DiagnosisControllerV2.java
index 636e81c..eb4b078 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/DiagnosisControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/DiagnosisControllerV2.java
@@ -30,7 +30,6 @@ import org.apache.kylin.metadata.badquery.BadQueryEntry;
 import org.apache.kylin.metadata.badquery.BadQueryHistory;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.rest.controller.BasicController;
-import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.response.ResponseCode;
 import org.apache.kylin.rest.service.DiagnosisService;
@@ -41,7 +40,6 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
@@ -67,10 +65,13 @@ public class DiagnosisControllerV2 extends BasicController {
      * Get bad query history
      */
 
-    @RequestMapping(value = "/sql", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/sql", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getBadQuerySqlV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "project", required = false) String project, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getBadQuerySqlV2(@RequestParam(value = "project", required = false) String project,
+            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
+            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize)
+            throws IOException {
 
         HashMap<String, Object> data = new HashMap<String, Object>();
         List<BadQueryEntry> badEntry = Lists.newArrayList();
@@ -106,10 +107,11 @@ public class DiagnosisControllerV2 extends BasicController {
      * Get diagnosis information for project
      */
 
-    @RequestMapping(value = "/project/{project}/download", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/project/{project}/download", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void dumpProjectDiagnosisInfoV2(@RequestHeader("Accept-Language") String lang, @PathVariable String project, final HttpServletRequest request, final HttpServletResponse response) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void dumpProjectDiagnosisInfoV2(@PathVariable String project, final HttpServletRequest request,
+            final HttpServletResponse response) throws IOException {
 
         String filePath;
         filePath = dgService.dumpProjectDiagnosisInfo(project);
@@ -121,10 +123,11 @@ public class DiagnosisControllerV2 extends BasicController {
      * Get diagnosis information for job
      */
 
-    @RequestMapping(value = "/job/{jobId}/download", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/job/{jobId}/download", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void dumpJobDiagnosisInfoV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId, final HttpServletRequest request, final HttpServletResponse response) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void dumpJobDiagnosisInfoV2(@PathVariable String jobId, final HttpServletRequest request,
+            final HttpServletResponse response) throws IOException {
 
         String filePath;
         filePath = dgService.dumpJobDiagnosisInfo(jobId);

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/EncodingControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/EncodingControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/EncodingControllerV2.java
index edb58b4..f509913 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/EncodingControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/EncodingControllerV2.java
@@ -24,8 +24,6 @@ import java.util.Set;
 
 import org.apache.kylin.metadata.datatype.DataType;
 import org.apache.kylin.rest.controller.BasicController;
-import org.apache.kylin.rest.msg.Message;
-import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.response.ResponseCode;
 import org.apache.kylin.rest.service.EncodingService;
@@ -34,7 +32,6 @@ import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.ResponseBody;
@@ -58,11 +55,10 @@ public class EncodingControllerV2 extends BasicController {
      * @return suggestion map
      */
 
-    @RequestMapping(value = "valid_encodings", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "valid_encodings", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getValidEncodingsV2(@RequestHeader("Accept-Language") String lang) {
-        MsgPicker.setMsg(lang);
-        Message msg = MsgPicker.getMsg();
+    public EnvelopeResponse getValidEncodingsV2() {
 
         Set<String> allDatatypes = Sets.newHashSet();
         allDatatypes.addAll(DataType.DATETIME_FAMILY);

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/ExternalFilterControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/ExternalFilterControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/ExternalFilterControllerV2.java
index 4e82b41..37dc6e3 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/ExternalFilterControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/ExternalFilterControllerV2.java
@@ -25,7 +25,6 @@ import java.util.UUID;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.metadata.model.ExternalFilterDesc;
 import org.apache.kylin.rest.controller.BasicController;
-import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.request.ExternalFilterRequest;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.response.ResponseCode;
@@ -37,7 +36,6 @@ import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
@@ -57,10 +55,10 @@ public class ExternalFilterControllerV2 extends BasicController {
     @Qualifier("extFilterService")
     private ExtFilterService extFilterService;
 
-    @RequestMapping(value = "/saveExtFilter", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/saveExtFilter", method = { RequestMethod.POST }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void saveExternalFilterV2(@RequestHeader("Accept-Language") String lang, @RequestBody ExternalFilterRequest request) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void saveExternalFilterV2(@RequestBody ExternalFilterRequest request) throws IOException {
 
         String filterProject = request.getProject();
         ExternalFilterDesc desc = JsonUtil.readValue(request.getExtFilter(), ExternalFilterDesc.class);
@@ -69,20 +67,20 @@ public class ExternalFilterControllerV2 extends BasicController {
         extFilterService.syncExtFilterToProject(new String[] { desc.getName() }, filterProject);
     }
 
-    @RequestMapping(value = "/updateExtFilter", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/updateExtFilter", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void updateExternalFilterV2(@RequestHeader("Accept-Language") String lang, @RequestBody ExternalFilterRequest request) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void updateExternalFilterV2(@RequestBody ExternalFilterRequest request) throws IOException {
 
         ExternalFilterDesc desc = JsonUtil.readValue(request.getExtFilter(), ExternalFilterDesc.class);
         extFilterService.updateExternalFilter(desc);
         extFilterService.syncExtFilterToProject(new String[] { desc.getName() }, request.getProject());
     }
 
-    @RequestMapping(value = "/{filter}/{project}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{filter}/{project}", method = { RequestMethod.DELETE }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void removeFilterV2(@RequestHeader("Accept-Language") String lang, @PathVariable String filter, @PathVariable String project) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void removeFilterV2(@PathVariable String filter, @PathVariable String project) throws IOException {
 
         extFilterService.removeExtFilterFromProject(filter, project);
         extFilterService.removeExternalFilter(filter);
@@ -90,8 +88,8 @@ public class ExternalFilterControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getExternalFiltersV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "project", required = true) String project) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getExternalFiltersV2(@RequestParam(value = "project", required = true) String project)
+            throws IOException {
 
         List<ExternalFilterDesc> filterDescs = Lists.newArrayList();
         filterDescs.addAll(extFilterService.getProjectManager().listExternalFilterDescs(project).values());

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/HybridControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/HybridControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/HybridControllerV2.java
index ddf745a..e6d41f8 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/HybridControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/HybridControllerV2.java
@@ -19,7 +19,6 @@
 package org.apache.kylin.rest.controller2;
 
 import org.apache.kylin.rest.controller.BasicController;
-import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.request.HybridRequest;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.response.ResponseCode;
@@ -29,7 +28,6 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
@@ -44,34 +42,33 @@ public class HybridControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = RequestMethod.POST, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse createV2(@RequestHeader("Accept-Language") String lang, @RequestBody HybridRequest request) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse createV2(@RequestBody HybridRequest request) {
 
         checkRequiredArg("hybrid", request.getHybrid());
         checkRequiredArg("project", request.getProject());
         checkRequiredArg("model", request.getModel());
         checkRequiredArg("cubes", request.getCubes());
-        HybridInstance instance = hybridService.createHybridCube(request.getHybrid(), request.getProject(), request.getModel(), request.getCubes());
+        HybridInstance instance = hybridService.createHybridCube(request.getHybrid(), request.getProject(),
+                request.getModel(), request.getCubes());
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, instance, "");
     }
 
     @RequestMapping(value = "", method = RequestMethod.PUT, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateV2(@RequestHeader("Accept-Language") String lang, @RequestBody HybridRequest request) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse updateV2(@RequestBody HybridRequest request) {
 
         checkRequiredArg("hybrid", request.getHybrid());
         checkRequiredArg("project", request.getProject());
         checkRequiredArg("model", request.getModel());
         checkRequiredArg("cubes", request.getCubes());
-        HybridInstance instance = hybridService.updateHybridCube(request.getHybrid(), request.getProject(), request.getModel(), request.getCubes());
+        HybridInstance instance = hybridService.updateHybridCube(request.getHybrid(), request.getProject(),
+                request.getModel(), request.getCubes());
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, instance, "");
     }
 
     @RequestMapping(value = "", method = RequestMethod.DELETE, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void deleteV2(@RequestHeader("Accept-Language") String lang, @RequestBody HybridRequest request) {
-        MsgPicker.setMsg(lang);
+    public void deleteV2(@RequestBody HybridRequest request) {
 
         checkRequiredArg("hybrid", request.getHybrid());
         checkRequiredArg("project", request.getProject());
@@ -81,16 +78,16 @@ public class HybridControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse listV2(@RequestHeader("Accept-Language") String lang, @RequestParam(required = false) String project, @RequestParam(required = false) String model) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse listV2(@RequestParam(required = false) String project,
+            @RequestParam(required = false) String model) {
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, hybridService.listHybrids(project, model), "");
     }
 
-    @RequestMapping(value = "{hybrid}", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "{hybrid}", method = RequestMethod.GET, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getV2(@RequestHeader("Accept-Language") String lang, @PathVariable String hybrid) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getV2(@PathVariable String hybrid) {
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, hybridService.getHybridInstance(hybrid), "");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
index 2bcc11b..27126b6 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
@@ -31,7 +31,6 @@ import org.apache.kylin.job.constant.JobStatusEnum;
 import org.apache.kylin.job.constant.JobTimeFilterEnum;
 import org.apache.kylin.job.exception.JobException;
 import org.apache.kylin.rest.controller.BasicController;
-import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.response.ResponseCode;
 import org.apache.kylin.rest.service.JobService;
@@ -41,7 +40,6 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
@@ -107,15 +105,14 @@ public class JobControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse listV2(@RequestHeader("Accept-Language") String lang, //
-            @RequestParam(value = "status", required = false) Integer[] status, //
+    public EnvelopeResponse listV2(@RequestParam(value = "status", required = false) Integer[] status, //
             @RequestParam(value = "timeFilter", required = true) Integer timeFilter, //
             @RequestParam(value = "cubeName", required = false) String cubeName, //
             @RequestParam(value = "projectName", required = false) String projectName, //
             @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, //
             @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize, //
-            @RequestParam(value = "sortby", required = false, defaultValue = "last_modify") String sortby, @RequestParam(value = "reverse", required = false, defaultValue = "true") Boolean reverse) {
-        MsgPicker.setMsg(lang);
+            @RequestParam(value = "sortby", required = false, defaultValue = "last_modify") String sortby,
+            @RequestParam(value = "reverse", required = false, defaultValue = "true") Boolean reverse) {
 
         HashMap<String, Object> data = new HashMap<String, Object>();
         List<JobStatusEnum> statusList = new ArrayList<JobStatusEnum>();
@@ -126,7 +123,8 @@ public class JobControllerV2 extends BasicController {
             }
         }
 
-        List<JobInstance> jobInstanceList = jobService.searchJobs(cubeName, projectName, statusList, JobTimeFilterEnum.getByCode(timeFilter));
+        List<JobInstance> jobInstanceList = jobService.searchJobs(cubeName, projectName, statusList,
+                JobTimeFilterEnum.getByCode(timeFilter));
 
         if (sortby.equals("last_modify")) {
             if (reverse) {
@@ -174,10 +172,10 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getV2(@PathVariable String jobId) {
 
         JobInstance jobInstance = jobService.getJobInstance(jobId);
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, jobInstance, "");
@@ -190,10 +188,10 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/steps/{stepId}/output", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/steps/{stepId}/output", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getStepOutputV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId, @PathVariable String stepId) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getStepOutputV2(@PathVariable String jobId, @PathVariable String stepId) {
 
         Map<String, String> result = new HashMap<String, String>();
         result.put("jobId", jobId);
@@ -209,10 +207,10 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/resume", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/resume", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse resumeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse resumeV2(@PathVariable String jobId) {
 
         final JobInstance jobInstance = jobService.getJobInstance(jobId);
         jobService.resumeJob(jobInstance);
@@ -226,10 +224,10 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/cancel", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/cancel", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse cancelV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse cancelV2(@PathVariable String jobId) throws IOException {
 
         final JobInstance jobInstance = jobService.getJobInstance(jobId);
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, jobService.cancelJob(jobInstance), "");
@@ -242,10 +240,10 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/pause", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/pause", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse pauseV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse pauseV2(@PathVariable String jobId) {
 
         final JobInstance jobInstance = jobService.getJobInstance(jobId);
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, jobService.pauseJob(jobInstance), "");
@@ -258,10 +256,10 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/steps/{stepId}/rollback", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/steps/{stepId}/rollback", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse rollbackV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId, @PathVariable String stepId) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse rollbackV2(@PathVariable String jobId, @PathVariable String stepId) {
 
         final JobInstance jobInstance = jobService.getJobInstance(jobId);
         jobService.rollbackJob(jobInstance, stepId);
@@ -275,10 +273,10 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/drop", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/drop", method = { RequestMethod.DELETE }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse dropJobV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse dropJobV2(@PathVariable String jobId) throws IOException {
 
         JobInstance jobInstance = jobService.getJobInstance(jobId);
         jobService.dropJob(jobInstance);

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
index aa907a6..9e47790 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
@@ -56,7 +56,6 @@ import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
@@ -76,7 +75,8 @@ import com.google.common.collect.Sets;
 public class ModelControllerV2 extends BasicController {
     private static final Logger logger = LoggerFactory.getLogger(ModelControllerV2.class);
 
-    public static final char[] VALID_MODELNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_".toCharArray();
+    public static final char[] VALID_MODELNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_"
+            .toCharArray();
 
     @Autowired
     @Qualifier("modelMgmtService")
@@ -92,9 +92,11 @@ public class ModelControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getModelsPaging(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "modelName", required = false) String modelName, @RequestParam(value = "projectName", required = false) String projectName, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) throws IOException {
-        MsgPicker.setMsg(lang);
-
+    public EnvelopeResponse getModelsPaging(@RequestParam(value = "modelName", required = false) String modelName,
+            @RequestParam(value = "projectName", required = false) String projectName,
+            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
+            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize)
+            throws IOException {
         HashMap<String, Object> data = new HashMap<String, Object>();
         List<DataModelDesc> models = modelService.listAllModels(modelName, projectName);
 
@@ -131,13 +133,13 @@ public class ModelControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateModelDescV2(@RequestHeader("Accept-Language") String lang, @RequestBody ModelRequest modelRequest) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse updateModelDescV2(@RequestBody ModelRequest modelRequest) throws IOException {
 
         DataModelDesc modelDesc = deserializeDataModelDescV2(modelRequest);
         modelService.validateModelDesc(modelDesc);
 
-        String projectName = (null == modelRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : modelRequest.getProject();
+        String projectName = (null == modelRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
+                : modelRequest.getProject();
 
         ResourceStore store = ResourceStore.getStore(KylinConfig.getInstanceFromEnv());
         Checkpoint cp = store.checkpoint();
@@ -160,15 +162,16 @@ public class ModelControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }
 
-    @RequestMapping(value = "/draft", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/draft", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateModelDescDraftV2(@RequestHeader("Accept-Language") String lang, @RequestBody ModelRequest modelRequest) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse updateModelDescDraftV2(@RequestBody ModelRequest modelRequest) throws IOException {
 
         DataModelDesc modelDesc = deserializeDataModelDescV2(modelRequest);
         modelService.validateModelDesc(modelDesc);
 
-        String projectName = (null == modelRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : modelRequest.getProject();
+        String projectName = (null == modelRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
+                : modelRequest.getProject();
 
         ResourceStore store = ResourceStore.getStore(KylinConfig.getInstanceFromEnv());
         Checkpoint cp = store.checkpoint();
@@ -191,10 +194,10 @@ public class ModelControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }
 
-    @RequestMapping(value = "/{modelName}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{modelName}", method = { RequestMethod.DELETE }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void deleteModelV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void deleteModelV2(@PathVariable String modelName) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         DataModelDesc desc = modelService.getMetadataManager().getDataModelDesc(modelName);
@@ -204,10 +207,11 @@ public class ModelControllerV2 extends BasicController {
         modelService.dropModel(desc);
     }
 
-    @RequestMapping(value = "/{modelName}/clone", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{modelName}/clone", method = { RequestMethod.PUT }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse cloneModelV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName, @RequestBody ModelRequest modelRequest) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse cloneModelV2(@PathVariable String modelName, @RequestBody ModelRequest modelRequest)
+            throws IOException {
         Message msg = MsgPicker.getMsg();
 
         String project = modelRequest.getProject();
@@ -266,19 +270,19 @@ public class ModelControllerV2 extends BasicController {
         return desc;
     }
 
-    @RequestMapping(value = "/checkNameAvailability/{modelName}", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/checkNameAvailability/{modelName}", method = RequestMethod.GET, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse checkNameAvailabilityV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse checkNameAvailabilityV2(@PathVariable String modelName) throws IOException {
 
         boolean ret = modelService.checkNameAvailability(modelName);
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, ret, "");
     }
 
-    @RequestMapping(value = "/{modelName}/usedCols", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{modelName}/usedCols", method = RequestMethod.GET, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getUsedColsV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getUsedColsV2(@PathVariable String modelName) {
 
         Map<String, Set<String>> data = new HashMap<>();
 
@@ -293,7 +297,8 @@ public class ModelControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }
 
-    private void populateUsedColResponse(TblColRef tblColRef, Set<CubeInstance> cubeInstances, Map<String, Set<String>> ret) {
+    private void populateUsedColResponse(TblColRef tblColRef, Set<CubeInstance> cubeInstances,
+            Map<String, Set<String>> ret) {
         String columnIdentity = tblColRef.getIdentity();
         if (!ret.containsKey(columnIdentity)) {
             ret.put(columnIdentity, Sets.<String> newHashSet());

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelDescControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelDescControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelDescControllerV2.java
index 47aa902..56291ed 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelDescControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelDescControllerV2.java
@@ -36,7 +36,6 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.ResponseBody;
@@ -61,10 +60,10 @@ public class ModelDescControllerV2 extends BasicController {
      * @return
      * @throws IOException
      */
-    @RequestMapping(value = "/{modelName}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{modelName}", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getModelV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getModelV2(@PathVariable String modelName) {
         Message msg = MsgPicker.getMsg();
 
         HashMap<String, DataModelDescResponse> data = new HashMap<String, DataModelDescResponse>();

http://git-wip-us.apache.org/repos/asf/kylin/blob/99f08a97/server-base/src/main/java/org/apache/kylin/rest/controller2/ProjectControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/ProjectControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/ProjectControllerV2.java
index 4c43b61..8ce9ee5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/ProjectControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/ProjectControllerV2.java
@@ -40,7 +40,6 @@ import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
@@ -54,7 +53,8 @@ import org.springframework.web.bind.annotation.ResponseBody;
 public class ProjectControllerV2 extends BasicController {
     private static final Logger logger = LoggerFactory.getLogger(ProjectControllerV2.class);
 
-    private static final char[] VALID_PROJECTNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_".toCharArray();
+    private static final char[] VALID_PROJECTNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_"
+            .toCharArray();
 
     @Autowired
     @Qualifier("projectService")
@@ -62,8 +62,9 @@ public class ProjectControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getProjectsV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getProjectsV2(
+            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
+            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
 
         int offset = pageOffset * pageSize;
         int limit = pageSize;
@@ -71,10 +72,12 @@ public class ProjectControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, projectService.listProjects(limit, offset), "");
     }
 
-    @RequestMapping(value = "/readable", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/readable", method = { RequestMethod.GET }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getReadableProjectsV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse getReadableProjectsV2(
+            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
+            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
 
         HashMap<String, Object> data = new HashMap<String, Object>();
 
@@ -98,8 +101,7 @@ public class ProjectControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse saveProjectV2(@RequestHeader("Accept-Language") String lang, @RequestBody ProjectRequest projectRequest) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse saveProjectV2(@RequestBody ProjectRequest projectRequest) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         ProjectInstance projectDesc = deserializeProjectDescV2(projectRequest);
@@ -109,7 +111,8 @@ public class ProjectControllerV2 extends BasicController {
         }
 
         if (!StringUtils.containsOnly(projectDesc.getName(), VALID_PROJECTNAME)) {
-            logger.info("Invalid Project name {}, only letters, numbers and underline supported.", projectDesc.getName());
+            logger.info("Invalid Project name {}, only letters, numbers and underline supported.",
+                    projectDesc.getName());
             throw new BadRequestException(String.format(msg.getINVALID_PROJECT_NAME(), projectDesc.getName()));
         }
 
@@ -121,8 +124,7 @@ public class ProjectControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateProjectV2(@RequestHeader("Accept-Language") String lang, @RequestBody ProjectRequest projectRequest) throws IOException {
-        MsgPicker.setMsg(lang);
+    public EnvelopeResponse updateProjectV2(@RequestBody ProjectRequest projectRequest) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         String formerProjectName = projectRequest.getFormerProjectName();
@@ -150,10 +152,10 @@ public class ProjectControllerV2 extends BasicController {
         return projectDesc;
     }
 
-    @RequestMapping(value = "/{projectName}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{projectName}", method = { RequestMethod.DELETE }, produces = {
+            "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void deleteProjectV2(@RequestHeader("Accept-Language") String lang, @PathVariable String projectName) throws IOException {
-        MsgPicker.setMsg(lang);
+    public void deleteProjectV2(@PathVariable String projectName) throws IOException {
 
         ProjectInstance project = projectService.getProjectManager().getProject(projectName);
         projectService.deleteProject(projectName, project);


[38/67] [abbrv] kylin git commit: Update AdHocUtil.java

Posted by li...@apache.org.
Update AdHocUtil.java

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/f3b4085f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/f3b4085f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/f3b4085f

Branch: refs/heads/master
Commit: f3b4085fd217af869717160fce7053d9dfd16412
Parents: dd37153
Author: Roger Shi <ro...@gmail.com>
Authored: Sat May 27 21:10:31 2017 +0800
Committer: Roger Shi <ro...@gmail.com>
Committed: Sat May 27 21:11:03 2017 +0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/kylin/rest/util/AdHocUtil.java        | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/f3b4085f/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java b/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
index 648ef91..678e58e 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
@@ -64,7 +64,7 @@ public class AdHocUtil {
             try {
                 String adhocSql = converter.convert(sql);
                 if (!sql.equals(adhocSql)) {
-                    logger.info("the original query is converted to {} before delegating to ", adhocSql);
+                    logger.info("the original query is converted to {} before delegating to adhoc", adhocSql);
                 }
 
                 runner.executeQuery(adhocSql, results, columnMetas);


[34/67] [abbrv] kylin git commit: KYLIN-2648 make kylin.env.hdfs-working-dir qualified and absolute

Posted by li...@apache.org.
KYLIN-2648 make kylin.env.hdfs-working-dir qualified and absolute


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/e3a79c82
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/e3a79c82
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/e3a79c82

Branch: refs/heads/master
Commit: e3a79c8276123bb9fa3a9de9bfc653a81607d392
Parents: 6bfc6d2
Author: Li Yang <li...@apache.org>
Authored: Sat May 27 17:03:09 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Sat May 27 17:08:06 2017 +0800

----------------------------------------------------------------------
 .../apache/kylin/common/KylinConfigBase.java    | 40 +++++++++++++-------
 .../apache/kylin/common/KylinConfigTest.java    |  7 ++++
 .../java/org/apache/kylin/rest/DebugTomcat.java | 10 ++---
 3 files changed, 39 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/e3a79c82/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index ad08108..854ffbd 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -29,6 +29,9 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.lock.DistributedLockFactory;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.CliCommandExecutor;
@@ -184,21 +187,33 @@ abstract public class KylinConfigBase implements Serializable {
         return getOptional("kylin.env", "DEV");
     }
 
+    private String cachedHdfsWorkingDirectory;
+    
     public String getHdfsWorkingDirectory() {
+        if (cachedHdfsWorkingDirectory != null)
+            return cachedHdfsWorkingDirectory;
+        
         String root = getRequired("kylin.env.hdfs-working-dir");
-        if (!root.endsWith("/")) {
-            root += "/";
-        }
-
-        // make sure path qualified
-        if (!root.contains("://")) {
-            if (!root.startsWith("/"))
-                root = "hdfs:///" + root;
-            else
-                root = "hdfs://" + root;
+        Path path = new Path(root);
+        if (path.isAbsolute() == false)
+            throw new IllegalArgumentException("kylin.env.hdfs-working-dir must be absolute, but got " + root);
+        
+        // make sure path is qualified
+        try {
+            FileSystem fs = path.getFileSystem(new Configuration());
+            path = fs.makeQualified(path);
+        } catch (IOException e) {
+            throw new RuntimeException(e);
         }
-
-        return new StringBuffer(root).append(StringUtils.replaceChars(getMetadataUrlPrefix(), ':', '-')).append("/").toString();
+        
+        // append metadata-url prefix
+        root = new Path(path, StringUtils.replaceChars(getMetadataUrlPrefix(), ':', '-')).toString();
+        
+        if (root.endsWith("/") == false)
+            root += "/";
+        
+        cachedHdfsWorkingDirectory = root;
+        return cachedHdfsWorkingDirectory;
     }
 
     // ============================================================================
@@ -1028,5 +1043,4 @@ abstract public class KylinConfigBase implements Serializable {
     public boolean isWebCrossDomainEnabled() {
         return Boolean.parseBoolean(getOptional("kylin.web.cross-domain-enabled", "true"));
     }
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/e3a79c82/core-common/src/test/java/org/apache/kylin/common/KylinConfigTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/KylinConfigTest.java b/core-common/src/test/java/org/apache/kylin/common/KylinConfigTest.java
index 6027af3..fcbdd36 100644
--- a/core-common/src/test/java/org/apache/kylin/common/KylinConfigTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/KylinConfigTest.java
@@ -124,4 +124,11 @@ public class KylinConfigTest extends HotLoadKylinPropertiesTestCase {
             }
         }).start();
     }
+    
+    @Test
+    public void testHdfsWorkingDir() {
+        KylinConfig conf = KylinConfig.getInstanceFromEnv();
+        String hdfsWorkingDirectory = conf.getHdfsWorkingDirectory();
+        assertTrue(hdfsWorkingDirectory.startsWith("file:/"));
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/e3a79c82/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
index cd6768e..d87f1f9 100644
--- a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
+++ b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
@@ -18,6 +18,10 @@
 
 package org.apache.kylin.rest;
 
+import java.io.File;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+
 import org.apache.catalina.Context;
 import org.apache.catalina.core.AprLifecycleListener;
 import org.apache.catalina.core.StandardServer;
@@ -27,10 +31,6 @@ import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.util.Shell;
 import org.apache.kylin.common.KylinConfig;
 
-import java.io.File;
-import java.lang.reflect.Field;
-import java.lang.reflect.Modifier;
-
 public class DebugTomcat {
 
     public static void setupDebugEnv() {
@@ -101,7 +101,7 @@ public class DebugTomcat {
 
     public static void main(String[] args) throws Exception {
         setupDebugEnv();
-
+        
         int port = 7070;
         if (args.length >= 1) {
             port = Integer.parseInt(args[0]);


[06/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java
index 0d8daa4..e8a93bd 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java
@@ -88,23 +88,19 @@ public class CLIHiveClient implements IHiveClient {
         List<HiveTableMeta.HiveTableColumnMeta> allColumns = Lists.newArrayList();
         List<HiveTableMeta.HiveTableColumnMeta> partitionColumns = Lists.newArrayList();
         for (FieldSchema fieldSchema : allFields) {
-            allColumns.add(new HiveTableMeta.HiveTableColumnMeta(fieldSchema.getName(), fieldSchema.getType(),
-                    fieldSchema.getComment()));
+            allColumns.add(new HiveTableMeta.HiveTableColumnMeta(fieldSchema.getName(), fieldSchema.getType(), fieldSchema.getComment()));
         }
         if (partitionFields != null && partitionFields.size() > 0) {
             for (FieldSchema fieldSchema : partitionFields) {
-                partitionColumns.add(new HiveTableMeta.HiveTableColumnMeta(fieldSchema.getName(), fieldSchema.getType(),
-                        fieldSchema.getComment()));
+                partitionColumns.add(new HiveTableMeta.HiveTableColumnMeta(fieldSchema.getName(), fieldSchema.getType(), fieldSchema.getComment()));
             }
         }
         builder.setAllColumns(allColumns);
         builder.setPartitionColumns(partitionColumns);
 
         builder.setSdLocation(table.getSd().getLocation());
-        builder.setFileSize(
-                getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.TOTAL_SIZE));
-        builder.setFileNum(
-                getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES));
+        builder.setFileSize(getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.TOTAL_SIZE));
+        builder.setFileNum(getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES));
         builder.setIsNative(!MetaStoreUtils.isNonNativeTable(table));
         builder.setTableName(tableName);
         builder.setSdInputFormat(table.getSd().getInputFormat());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
index c907b44..15d4456 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
@@ -137,8 +137,7 @@ public class HiveMRInput implements IMRInput {
         @Override
         public void addStepPhase1_CreateFlatTable(DefaultChainedExecutable jobFlow) {
             final String cubeName = CubingExecutableUtil.getCubeName(jobFlow.getParams());
-            final KylinConfig cubeConfig = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName)
-                    .getConfig();
+            final KylinConfig cubeConfig = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName).getConfig();
             JobEngineConfig conf = new JobEngineConfig(cubeConfig);
 
             final String hiveInitStatements = JoinedFlatTable.generateHiveInitStatements(flatTableDatabase);
@@ -169,8 +168,7 @@ public class HiveMRInput implements IMRInput {
             return step;
         }
 
-        private ShellExecutable createLookupHiveViewMaterializationStep(String hiveInitStatements,
-                String jobWorkingDir) {
+        private ShellExecutable createLookupHiveViewMaterializationStep(String hiveInitStatements, String jobWorkingDir) {
             ShellExecutable step = new ShellExecutable();
             step.setName(ExecutableConstants.STEP_NAME_MATERIALIZE_HIVE_VIEW_IN_LOOKUP);
 
@@ -197,25 +195,21 @@ public class HiveMRInput implements IMRInput {
                 if (lookUpTableDesc.isView()) {
                     StringBuilder createIntermediateTableHql = new StringBuilder();
                     createIntermediateTableHql.append("DROP TABLE IF EXISTS " + intermediate + ";\n");
-                    createIntermediateTableHql
-                            .append("CREATE EXTERNAL TABLE IF NOT EXISTS " + intermediate + " LIKE " + identity + "\n");
+                    createIntermediateTableHql.append("CREATE EXTERNAL TABLE IF NOT EXISTS " + intermediate + " LIKE " + identity + "\n");
                     createIntermediateTableHql.append("LOCATION '" + jobWorkingDir + "/" + intermediate + "';\n");
-                    createIntermediateTableHql
-                            .append("INSERT OVERWRITE TABLE " + intermediate + " SELECT * FROM " + identity + ";\n");
+                    createIntermediateTableHql.append("INSERT OVERWRITE TABLE " + intermediate + " SELECT * FROM " + identity + ";\n");
                     hiveCmdBuilder.addStatement(createIntermediateTableHql.toString());
                     hiveViewIntermediateTables = hiveViewIntermediateTables + intermediate + ";";
                 }
             }
 
-            hiveViewIntermediateTables = hiveViewIntermediateTables.substring(0,
-                    hiveViewIntermediateTables.length() - 1);
+            hiveViewIntermediateTables = hiveViewIntermediateTables.substring(0, hiveViewIntermediateTables.length() - 1);
 
             step.setCmd(hiveCmdBuilder.build());
             return step;
         }
 
-        private AbstractExecutable createFlatHiveTableStep(String hiveInitStatements, String jobWorkingDir,
-                String cubeName) {
+        private AbstractExecutable createFlatHiveTableStep(String hiveInitStatements, String jobWorkingDir, String cubeName) {
             final String dropTableHql = JoinedFlatTable.generateDropTableStatement(flatDesc);
             final String createTableHql = JoinedFlatTable.generateCreateTableStatement(flatDesc, jobWorkingDir);
             String insertDataHqls = JoinedFlatTable.generateInsertDataStatement(flatDesc);
@@ -302,12 +296,10 @@ public class HiveMRInput implements IMRInput {
                 logger.debug("Row count of table '" + intermediateTable + "' is " + rowCount);
                 if (rowCount == 0) {
                     if (!config.isEmptySegmentAllowed()) {
-                        stepLogger.log("Detect upstream hive table is empty, "
-                                + "fail the job because \"kylin.job.allow-empty-segment\" = \"false\"");
+                        stepLogger.log("Detect upstream hive table is empty, " + "fail the job because \"kylin.job.allow-empty-segment\" = \"false\"");
                         return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog());
                     } else {
-                        return new ExecuteResult(ExecuteResult.State.SUCCEED,
-                                "Row count is 0, no need to redistribute");
+                        return new ExecuteResult(ExecuteResult.State.SUCCEED, "Row count is 0, no need to redistribute");
                     }
                 }
 
@@ -384,8 +376,7 @@ public class HiveMRInput implements IMRInput {
                 config.getCliCommandExecutor().execute(hiveCmdBuilder.build());
                 output.append("Hive table " + hiveTable + " is dropped. \n");
                 rmdirOnHDFS(getExternalDataPath());
-                output.append(
-                        "Hive table " + hiveTable + " external data path " + getExternalDataPath() + " is deleted. \n");
+                output.append("Hive table " + hiveTable + " external data path " + getExternalDataPath() + " is deleted. \n");
             }
             return output.toString();
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java
index 5fff000..14ed1f9 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java
@@ -20,6 +20,7 @@ package org.apache.kylin.source.hive;
 
 import java.io.IOException;
 
+
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.engine.mr.DFSFileTable;
@@ -77,7 +78,7 @@ public class HiveTable implements IReadableTable {
                 throw new IOException(e);
         }
     }
-
+    
     @Override
     public boolean exists() {
         return true;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java
index 089850a..fa9eb29 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java
@@ -34,8 +34,7 @@ class HiveTableMeta {
 
         @Override
         public String toString() {
-            return "HiveTableColumnMeta{" + "name='" + name + '\'' + ", dataType='" + dataType + '\'' + ", comment='"
-                    + comment + '\'' + '}';
+            return "HiveTableColumnMeta{" + "name='" + name + '\'' + ", dataType='" + dataType + '\'' + ", comment='" + comment + '\'' + '}';
         }
     }
 
@@ -53,9 +52,7 @@ class HiveTableMeta {
     List<HiveTableColumnMeta> allColumns;
     List<HiveTableColumnMeta> partitionColumns;
 
-    public HiveTableMeta(String tableName, String sdLocation, String sdInputFormat, String sdOutputFormat, String owner,
-            String tableType, int lastAccessTime, long fileSize, long fileNum, int skipHeaderLineCount,
-            boolean isNative, List<HiveTableColumnMeta> allColumns, List<HiveTableColumnMeta> partitionColumns) {
+    public HiveTableMeta(String tableName, String sdLocation, String sdInputFormat, String sdOutputFormat, String owner, String tableType, int lastAccessTime, long fileSize, long fileNum, int skipHeaderLineCount, boolean isNative, List<HiveTableColumnMeta> allColumns, List<HiveTableColumnMeta> partitionColumns) {
         this.tableName = tableName;
         this.sdLocation = sdLocation;
         this.sdInputFormat = sdInputFormat;
@@ -73,10 +70,6 @@ class HiveTableMeta {
 
     @Override
     public String toString() {
-        return "HiveTableMeta{" + "tableName='" + tableName + '\'' + ", sdLocation='" + sdLocation + '\''
-                + ", sdInputFormat='" + sdInputFormat + '\'' + ", sdOutputFormat='" + sdOutputFormat + '\''
-                + ", owner='" + owner + '\'' + ", tableType='" + tableType + '\'' + ", lastAccessTime=" + lastAccessTime
-                + ", fileSize=" + fileSize + ", fileNum=" + fileNum + ", isNative=" + isNative + ", allColumns="
-                + allColumns + ", partitionColumns=" + partitionColumns + '}';
+        return "HiveTableMeta{" + "tableName='" + tableName + '\'' + ", sdLocation='" + sdLocation + '\'' + ", sdInputFormat='" + sdInputFormat + '\'' + ", sdOutputFormat='" + sdOutputFormat + '\'' + ", owner='" + owner + '\'' + ", tableType='" + tableType + '\'' + ", lastAccessTime=" + lastAccessTime + ", fileSize=" + fileSize + ", fileNum=" + fileNum + ", isNative=" + isNative + ", allColumns=" + allColumns + ", partitionColumns=" + partitionColumns + '}';
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java
index 6fedd8b..073ded5 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java
@@ -106,7 +106,6 @@ public class HiveTableMetaBuilder {
     }
 
     public HiveTableMeta createHiveTableMeta() {
-        return new HiveTableMeta(tableName, sdLocation, sdInputFormat, sdOutputFormat, owner, tableType, lastAccessTime,
-                fileSize, fileNum, skipHeaderLineCount, isNative, allColumns, partitionColumns);
+        return new HiveTableMeta(tableName, sdLocation, sdInputFormat, sdOutputFormat, owner, tableType, lastAccessTime, fileSize, fileNum, skipHeaderLineCount, isNative, allColumns, partitionColumns);
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableReader.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableReader.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableReader.java
index 48e0ee3..75f322f 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableReader.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableReader.java
@@ -143,8 +143,7 @@ public class HiveTableReader implements TableReader {
         return "hive table reader for: " + dbName + "." + tableName;
     }
 
-    private static ReaderContext getHiveReaderContext(String database, String table, Map<String, String> partitionKV)
-            throws Exception {
+    private static ReaderContext getHiveReaderContext(String database, String table, Map<String, String> partitionKV) throws Exception {
         HiveConf hiveConf = new HiveConf(HiveTableReader.class);
         Iterator<Entry<String, String>> itr = hiveConf.iterator();
         Map<String, String> map = new HashMap<String, String>();
@@ -157,8 +156,7 @@ public class HiveTableReader implements TableReader {
         if (partitionKV == null || partitionKV.size() == 0) {
             entity = new ReadEntity.Builder().withDatabase(database).withTable(table).build();
         } else {
-            entity = new ReadEntity.Builder().withDatabase(database).withTable(table).withPartition(partitionKV)
-                    .build();
+            entity = new ReadEntity.Builder().withDatabase(database).withTable(table).withPartition(partitionKV).build();
         }
 
         HCatReader reader = DataTransferFactory.getHCatReader(entity, map);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java
index 9db65042..22bea46 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java
@@ -18,11 +18,11 @@
 
 package org.apache.kylin.source.hive;
 
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+
 import java.io.IOException;
 import java.util.List;
 
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-
 public interface IHiveClient {
     void executeHQL(String hql) throws CommandNeedRetryException, IOException;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityMapper.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityMapper.java b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityMapper.java
index 52730bd..ffd54db 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityMapper.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityMapper.java
@@ -71,7 +71,7 @@ public class ColumnCardinalityMapper<T> extends KylinMapper<T, Object, IntWritab
         ColumnDesc[] columns = tableDesc.getColumns();
         Collection<String[]> valuesCollection = tableInputFormat.parseMapperInput(value);
 
-        for (String[] values : valuesCollection) {
+        for (String[] values: valuesCollection) {
             for (int m = 0; m < columns.length; m++) {
                 String field = columns[m].getName();
                 String fieldValue = values[m];

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityReducer.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityReducer.java b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityReducer.java
index 3724ef7..0648960 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityReducer.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityReducer.java
@@ -49,8 +49,7 @@ public class ColumnCardinalityReducer extends KylinReducer<IntWritable, BytesWri
     }
 
     @Override
-    public void doReduce(IntWritable key, Iterable<BytesWritable> values, Context context)
-            throws IOException, InterruptedException {
+    public void doReduce(IntWritable key, Iterable<BytesWritable> values, Context context) throws IOException, InterruptedException {
         int skey = key.get();
         for (BytesWritable v : values) {
             ByteBuffer buffer = ByteBuffer.wrap(v.getBytes());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityJob.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityJob.java b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityJob.java
index 7179a66..f439ccb 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityJob.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityJob.java
@@ -50,8 +50,7 @@ public class HiveColumnCardinalityJob extends AbstractHadoopJob {
     public static final String JOB_TITLE = "Kylin Hive Column Cardinality Job";
 
     @SuppressWarnings("static-access")
-    protected static final Option OPTION_TABLE = OptionBuilder.withArgName("table name").hasArg().isRequired(true)
-            .withDescription("The hive table name").create("table");
+    protected static final Option OPTION_TABLE = OptionBuilder.withArgName("table name").hasArg().isRequired(true).withDescription("The hive table name").create("table");
 
     public HiveColumnCardinalityJob() {
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java
index 5f48523..246822c 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java
@@ -52,8 +52,7 @@ public class HiveColumnCardinalityUpdateJob extends AbstractHadoopJob {
     public static final String JOB_TITLE = "Kylin Hive Column Cardinality Update Job";
 
     @SuppressWarnings("static-access")
-    protected static final Option OPTION_TABLE = OptionBuilder.withArgName("table name").hasArg().isRequired(true)
-            .withDescription("The hive table name").create("table");
+    protected static final Option OPTION_TABLE = OptionBuilder.withArgName("table name").hasArg().isRequired(true).withDescription("The hive table name").create("table");
 
     private String table;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/DefaultTimeParser.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/DefaultTimeParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/DefaultTimeParser.java
index 21dac70..4bcd572 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/DefaultTimeParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/DefaultTimeParser.java
@@ -18,10 +18,10 @@
 
 package org.apache.kylin.source.kafka;
 
-import java.util.Map;
-
 import org.apache.commons.lang3.StringUtils;
 
+import java.util.Map;
+
 /**
  */
 public class DefaultTimeParser extends AbstractTimeParser {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
index 650f57e..50295c3 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
@@ -73,8 +73,7 @@ public class KafkaConfigManager {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
             if (event == Event.DROP)
                 removeKafkaConfigLocal(cacheKey);
             else
@@ -218,13 +217,11 @@ public class KafkaConfigManager {
 
     private void reloadAllKafkaConfig() throws IOException {
         ResourceStore store = getStore();
-        logger.info("Reloading Kafka Metadata from folder "
-                + store.getReadableResourcePath(ResourceStore.KAFKA_RESOURCE_ROOT));
+        logger.info("Reloading Kafka Metadata from folder " + store.getReadableResourcePath(ResourceStore.KAFKA_RESOURCE_ROOT));
 
         kafkaMap.clear();
 
-        List<String> paths = store.collectResourceRecursively(ResourceStore.KAFKA_RESOURCE_ROOT,
-                MetadataConstants.FILE_SURFIX);
+        List<String> paths = store.collectResourceRecursively(ResourceStore.KAFKA_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX);
         for (String path : paths) {
             KafkaConfig kafkaConfig;
             try {
@@ -234,8 +231,7 @@ public class KafkaConfigManager {
                 continue;
             }
             if (path.equals(kafkaConfig.getResourcePath()) == false) {
-                logger.error("Skip suspicious desc at " + path + ", " + kafkaConfig + " should be at "
-                        + kafkaConfig.getResourcePath());
+                logger.error("Skip suspicious desc at " + path + ", " + kafkaConfig + " should be at " + kafkaConfig.getResourcePath());
                 continue;
             }
             if (kafkaMap.containsKey(kafkaConfig.getName())) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
index 5815d53..3323afb 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
@@ -78,14 +78,13 @@ public class KafkaMRInput implements IMRInput {
     public IMRTableInputFormat getTableInputFormat(TableDesc table) {
         KafkaConfigManager kafkaConfigManager = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv());
         KafkaConfig kafkaConfig = kafkaConfigManager.getKafkaConfig(table.getIdentity());
-        List<TblColRef> columns = Lists.transform(Arrays.asList(table.getColumns()),
-                new Function<ColumnDesc, TblColRef>() {
-                    @Nullable
-                    @Override
-                    public TblColRef apply(ColumnDesc input) {
-                        return input.getRef();
-                    }
-                });
+        List<TblColRef> columns = Lists.transform(Arrays.asList(table.getColumns()), new Function<ColumnDesc, TblColRef>() {
+            @Nullable
+            @Override
+            public TblColRef apply(ColumnDesc input) {
+                return input.getRef();
+            }
+        });
 
         return new KafkaTableInputFormat(cubeSegment, columns, kafkaConfig, null);
     }
@@ -100,13 +99,11 @@ public class KafkaMRInput implements IMRInput {
         private StreamingParser streamingParser;
         private final JobEngineConfig conf;
 
-        public KafkaTableInputFormat(CubeSegment cubeSegment, List<TblColRef> columns, KafkaConfig kafkaConfig,
-                JobEngineConfig conf) {
+        public KafkaTableInputFormat(CubeSegment cubeSegment, List<TblColRef> columns, KafkaConfig kafkaConfig, JobEngineConfig conf) {
             this.cubeSegment = cubeSegment;
             this.conf = conf;
             try {
-                streamingParser = StreamingParser.getStreamingParser(kafkaConfig.getParserName(),
-                        kafkaConfig.getParserProperties(), columns);
+                streamingParser = StreamingParser.getStreamingParser(kafkaConfig.getParserName(), kafkaConfig.getParserProperties(), columns);
             } catch (ReflectiveOperationException e) {
                 throw new IllegalArgumentException(e);
             }
@@ -117,8 +114,7 @@ public class KafkaMRInput implements IMRInput {
             job.setInputFormatClass(SequenceFileInputFormat.class);
             String jobId = job.getConfiguration().get(BatchConstants.ARG_CUBING_JOB_ID);
             IJoinedFlatTableDesc flatHiveTableDesc = new CubeJoinedFlatTableDesc(cubeSegment);
-            String inputPath = JoinedFlatTable.getTableDir(flatHiveTableDesc,
-                    JobBuilderSupport.getJobWorkingDir(conf, jobId));
+            String inputPath = JoinedFlatTable.getTableDir(flatHiveTableDesc, JobBuilderSupport.getJobWorkingDir(conf, jobId));
             try {
                 FileInputFormat.addInputPath(job, new Path(inputPath));
             } catch (IOException e) {
@@ -130,10 +126,10 @@ public class KafkaMRInput implements IMRInput {
         public Collection<String[]> parseMapperInput(Object mapperInput) {
             Text text = (Text) mapperInput;
             ByteBuffer buffer = ByteBuffer.wrap(text.getBytes(), 0, text.getLength());
-            List<StreamingMessageRow> streamingMessageRowList = streamingParser.parse(buffer);
+            List<StreamingMessageRow>  streamingMessageRowList = streamingParser.parse(buffer);
             List<String[]> parsedDataCollection = new ArrayList<>();
 
-            for (StreamingMessageRow row : streamingMessageRowList) {
+            for (StreamingMessageRow row: streamingMessageRowList) {
                 parsedDataCollection.add(row.getData().toArray(new String[row.getData().size()]));
             }
 
@@ -162,19 +158,16 @@ public class KafkaMRInput implements IMRInput {
             MapReduceExecutable result = new MapReduceExecutable();
 
             IJoinedFlatTableDesc flatHiveTableDesc = new CubeJoinedFlatTableDesc(seg);
-            outputPath = JoinedFlatTable.getTableDir(flatHiveTableDesc,
-                    JobBuilderSupport.getJobWorkingDir(conf, jobId));
+            outputPath = JoinedFlatTable.getTableDir(flatHiveTableDesc, JobBuilderSupport.getJobWorkingDir(conf, jobId));
             result.setName("Save data from Kafka");
             result.setMapReduceJobClass(KafkaFlatTableJob.class);
             JobBuilderSupport jobBuilderSupport = new JobBuilderSupport(seg, "system");
             StringBuilder cmd = new StringBuilder();
             jobBuilderSupport.appendMapReduceParameters(cmd);
-            JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME,
-                    seg.getRealization().getName());
+            JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
             JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
             JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
-            JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
-                    "Kylin_Save_Kafka_Data_" + seg.getRealization().getName() + "_Step");
+            JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Save_Kafka_Data_" + seg.getRealization().getName() + "_Step");
 
             result.setMapReduceParams(cmd.toString());
             return result;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java
index fc0d50d..52d2e6f 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java
@@ -32,8 +32,8 @@ import org.apache.kylin.metadata.model.IBuildable;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TableExtDesc;
 import org.apache.kylin.metadata.streaming.StreamingConfig;
-import org.apache.kylin.source.IReadableTable;
 import org.apache.kylin.source.ISource;
+import org.apache.kylin.source.IReadableTable;
 import org.apache.kylin.source.ISourceMetadataExplorer;
 import org.apache.kylin.source.SourcePartition;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
@@ -71,25 +71,20 @@ public class KafkaSource implements ISource {
         if (result.getStartOffset() == 0) {
             final CubeSegment last = cube.getLastSegment();
             if (last != null) {
-                logger.debug("Last segment exists, continue from last segment " + last.getName() + "'s end position: "
-                        + last.getSourcePartitionOffsetEnd());
+                logger.debug("Last segment exists, continue from last segment " + last.getName() + "'s end position: " + last.getSourcePartitionOffsetEnd());
                 // from last seg's end position
                 result.setSourcePartitionOffsetStart(last.getSourcePartitionOffsetEnd());
-            } else if (cube.getDescriptor().getPartitionOffsetStart() != null
-                    && cube.getDescriptor().getPartitionOffsetStart().size() > 0) {
-                logger.debug("Last segment doesn't exist, use the start offset that be initiated previously: "
-                        + cube.getDescriptor().getPartitionOffsetStart());
+            } else if (cube.getDescriptor().getPartitionOffsetStart() != null && cube.getDescriptor().getPartitionOffsetStart().size() > 0) {
+                logger.debug("Last segment doesn't exist, use the start offset that be initiated previously: " + cube.getDescriptor().getPartitionOffsetStart());
                 result.setSourcePartitionOffsetStart(cube.getDescriptor().getPartitionOffsetStart());
             } else {
                 // from the topic's earliest offset;
-                logger.debug(
-                        "Last segment doesn't exist, and didn't initiate the start offset, will seek from topic's earliest offset.");
+                logger.debug("Last segment doesn't exist, and didn't initiate the start offset, will seek from topic's earliest offset.");
                 result.setSourcePartitionOffsetStart(KafkaClient.getEarliestOffsets(cube));
             }
         }
 
-        final KafkaConfig kafkaConfig = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv())
-                .getKafkaConfig(cube.getRootFactTable());
+        final KafkaConfig kafkaConfig = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv()).getKafkaConfig(cube.getRootFactTable());
         final String brokers = KafkaClient.getKafkaBrokers(kafkaConfig);
         final String topic = kafkaConfig.getTopic();
         try (final KafkaConsumer consumer = KafkaClient.getKafkaConsumer(brokers, cube.getName(), null)) {
@@ -113,9 +108,7 @@ public class KafkaSource implements ISource {
             for (Integer partitionId : latestOffsets.keySet()) {
                 if (result.getSourcePartitionOffsetStart().containsKey(partitionId)) {
                     if (result.getSourcePartitionOffsetStart().get(partitionId) > latestOffsets.get(partitionId)) {
-                        throw new IllegalArgumentException("Partition " + partitionId + " end offset ("
-                                + latestOffsets.get(partitionId) + ") is smaller than start offset ( "
-                                + result.getSourcePartitionOffsetStart().get(partitionId) + ")");
+                        throw new IllegalArgumentException("Partition " + partitionId + " end offset (" + latestOffsets.get(partitionId) + ") is smaller than start offset ( " + result.getSourcePartitionOffsetStart().get(partitionId) + ")");
                     }
                 } else {
                     throw new IllegalStateException("New partition added in between, retry.");
@@ -133,8 +126,7 @@ public class KafkaSource implements ISource {
         }
 
         if (totalStartOffset > totalEndOffset) {
-            throw new IllegalArgumentException(
-                    "Illegal offset: start: " + totalStartOffset + ", end: " + totalEndOffset);
+            throw new IllegalArgumentException("Illegal offset: start: " + totalStartOffset + ", end: " + totalEndOffset);
         }
 
         if (totalStartOffset == totalEndOffset) {
@@ -159,8 +151,7 @@ public class KafkaSource implements ISource {
 
         if (startOffset > 0) {
             if (sourcePartitionOffsetStart == null || sourcePartitionOffsetStart.size() == 0) {
-                throw new IllegalArgumentException(
-                        "When 'startOffset' is > 0, need provide each partition's start offset");
+                throw new IllegalArgumentException("When 'startOffset' is > 0, need provide each partition's start offset");
             }
 
             long totalOffset = 0;
@@ -169,15 +160,13 @@ public class KafkaSource implements ISource {
             }
 
             if (totalOffset != startOffset) {
-                throw new IllegalArgumentException(
-                        "Invalid 'sourcePartitionOffsetStart', doesn't match with 'startOffset'");
+                throw new IllegalArgumentException("Invalid 'sourcePartitionOffsetStart', doesn't match with 'startOffset'");
             }
         }
 
         if (endOffset > 0 && endOffset != Long.MAX_VALUE) {
             if (sourcePartitionOffsetEnd == null || sourcePartitionOffsetEnd.size() == 0) {
-                throw new IllegalArgumentException(
-                        "When 'endOffset' is not Long.MAX_VALUE, need provide each partition's start offset");
+                throw new IllegalArgumentException("When 'endOffset' is not Long.MAX_VALUE, need provide each partition's start offset");
             }
 
             long totalOffset = 0;
@@ -186,8 +175,7 @@ public class KafkaSource implements ISource {
             }
 
             if (totalOffset != endOffset) {
-                throw new IllegalArgumentException(
-                        "Invalid 'sourcePartitionOffsetEnd', doesn't match with 'endOffset'");
+                throw new IllegalArgumentException("Invalid 'sourcePartitionOffsetEnd', doesn't match with 'endOffset'");
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
index 1459c2d..2e3c11c 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
@@ -19,20 +19,20 @@
 package org.apache.kylin.source.kafka;
 
 import java.lang.reflect.Constructor;
-import java.nio.ByteBuffer;
 import java.util.List;
 import java.util.Map;
 
+import com.google.common.collect.Maps;
 import org.apache.commons.lang3.StringUtils;
+import java.nio.ByteBuffer;
 import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.common.util.StreamingMessageRow;
 import org.apache.kylin.common.util.TimeUtil;
 import org.apache.kylin.metadata.model.TblColRef;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Maps;
-
 /**
  * By convention stream parsers should have a constructor with (List<TblColRef> allColumns, Map properties) as params
  */
@@ -68,8 +68,7 @@ public abstract class StreamingParser {
 
     abstract public boolean filter(StreamingMessageRow streamingMessageRow);
 
-    public static StreamingParser getStreamingParser(String parserName, String parserProperties,
-            List<TblColRef> columns) throws ReflectiveOperationException {
+    public static StreamingParser getStreamingParser(String parserName, String parserProperties, List<TblColRef> columns) throws ReflectiveOperationException {
         if (!StringUtils.isEmpty(parserName)) {
             logger.info("Construct StreamingParse {} with properties {}", parserName, parserProperties);
             Class clazz = Class.forName(parserName);
@@ -77,8 +76,7 @@ public abstract class StreamingParser {
             Constructor constructor = clazz.getConstructor(List.class, Map.class);
             return (StreamingParser) constructor.newInstance(columns, properties);
         } else {
-            throw new IllegalStateException("invalid StreamingConfig, parserName " + parserName + ", parserProperties "
-                    + parserProperties + ".");
+            throw new IllegalStateException("invalid StreamingConfig, parserName " + parserName + ", parserProperties " + parserProperties + ".");
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
index beed6f7..de167b4 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
@@ -21,14 +21,15 @@ package org.apache.kylin.source.kafka;
 import java.io.IOException;
 import java.lang.reflect.Constructor;
 import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
+import java.util.ArrayList;
 import java.util.Map;
+import java.util.HashMap;
 import java.util.TreeMap;
+import java.util.Collections;
+import java.util.Arrays;
 
+import com.fasterxml.jackson.databind.DeserializationFeature;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.util.ByteBufferBackedInputStream;
 import org.apache.kylin.common.util.StreamingMessageRow;
@@ -36,7 +37,6 @@ import org.apache.kylin.metadata.model.TblColRef;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.JavaType;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.type.MapType;
@@ -68,8 +68,7 @@ public final class TimedJsonStreamParser extends StreamingParser {
     private final Map<String, Object> tempMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
     private final Map<String, String[]> nameMap = new HashMap<>();
 
-    private final JavaType mapType = MapType.construct(HashMap.class, SimpleType.construct(String.class),
-            SimpleType.construct(Object.class));
+    private final JavaType mapType = MapType.construct(HashMap.class, SimpleType.construct(String.class), SimpleType.construct(Object.class));
 
     private AbstractTimeParser streamTimeParser;
 
@@ -89,12 +88,10 @@ public final class TimedJsonStreamParser extends StreamingParser {
                 Constructor constructor = clazz.getConstructor(Map.class);
                 streamTimeParser = (AbstractTimeParser) constructor.newInstance(properties);
             } catch (Exception e) {
-                throw new IllegalStateException(
-                        "Invalid StreamingConfig, tsParser " + tsParser + ", parserProperties " + properties + ".", e);
+                throw new IllegalStateException("Invalid StreamingConfig, tsParser " + tsParser + ", parserProperties " + properties + ".", e);
             }
         } else {
-            throw new IllegalStateException(
-                    "Invalid StreamingConfig, tsParser " + tsParser + ", parserProperties " + properties + ".");
+            throw new IllegalStateException("Invalid StreamingConfig, tsParser " + tsParser + ", parserProperties " + properties + ".");
         }
         mapper = new ObjectMapper();
         mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
@@ -119,8 +116,7 @@ public final class TimedJsonStreamParser extends StreamingParser {
                 }
             }
 
-            StreamingMessageRow streamingMessageRow = new StreamingMessageRow(result, 0, t,
-                    Collections.<String, Object> emptyMap());
+            StreamingMessageRow streamingMessageRow = new StreamingMessageRow(result, 0, t, Collections.<String, Object>emptyMap());
             List<StreamingMessageRow> messageRowList = new ArrayList<StreamingMessageRow>();
             messageRowList.add(streamingMessageRow);
             return messageRowList;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/BrokerConfig.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/BrokerConfig.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/BrokerConfig.java
index 1e75763..fc3bba0 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/BrokerConfig.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/BrokerConfig.java
@@ -18,15 +18,15 @@
 
 package org.apache.kylin.source.kafka.config;
 
-import java.io.Serializable;
-
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonProperty;
 
+import java.io.Serializable;
+
 /**
  */
 @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
-public class BrokerConfig implements Serializable {
+public class BrokerConfig implements Serializable{
 
     @JsonProperty("id")
     private int id;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaClusterConfig.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaClusterConfig.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaClusterConfig.java
index 44be966..afe888f 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaClusterConfig.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaClusterConfig.java
@@ -39,8 +39,7 @@ import kafka.cluster.Broker;
  */
 @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
 public class KafkaClusterConfig extends RootPersistentEntity {
-    public static Serializer<KafkaClusterConfig> SERIALIZER = new JsonSerializer<KafkaClusterConfig>(
-            KafkaClusterConfig.class);
+    public static Serializer<KafkaClusterConfig> SERIALIZER = new JsonSerializer<KafkaClusterConfig>(KafkaClusterConfig.class);
 
     @JsonProperty("brokers")
     private List<BrokerConfig> brokerConfigs;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConsumerProperties.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConsumerProperties.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConsumerProperties.java
index b073921..cc32ed9 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConsumerProperties.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConsumerProperties.java
@@ -57,8 +57,7 @@ public class KafkaConsumerProperties {
                     KafkaConsumerProperties config = new KafkaConsumerProperties();
                     config.properties = config.loadKafkaConsumerProperties();
 
-                    logger.info("Initialized a new KafkaConsumerProperties from getInstanceFromEnv : "
-                            + System.identityHashCode(config));
+                    logger.info("Initialized a new KafkaConsumerProperties from getInstanceFromEnv : " + System.identityHashCode(config));
                     ENV_INSTANCE = config;
                 } catch (IllegalArgumentException e) {
                     throw new IllegalStateException("Failed to find KafkaConsumerProperties ", e);
@@ -82,14 +81,8 @@ public class KafkaConsumerProperties {
             configNames = ConsumerConfig.configNames();
         } catch (Error e) {
             // the Kafka configNames api is supported on 0.10.1.0+, in case NoSuchMethodException which is an Error, not Exception
-            String[] configNamesArray = ("metric.reporters, metadata.max.age.ms, partition.assignment.strategy, reconnect.backoff.ms,"
-                    + "sasl.kerberos.ticket.renew.window.factor, max.partition.fetch.bytes, bootstrap.servers, ssl.keystore.type,"
-                    + " enable.auto.commit, sasl.mechanism, interceptor.classes, exclude.internal.topics, ssl.truststore.password,"
-                    + " client.id, ssl.endpoint.identification.algorithm, max.poll.records, check.crcs, request.timeout.ms, heartbeat.interval.ms,"
-                    + " auto.commit.interval.ms, receive.buffer.bytes, ssl.truststore.type, ssl.truststore.location, ssl.keystore.password, fetch.min.bytes,"
-                    + " fetch.max.bytes, send.buffer.bytes, max.poll.interval.ms, value.deserializer, group.id, retry.backoff.ms,"
-                    + " ssl.secure.random.implementation, sasl.kerberos.kinit.cmd, sasl.kerberos.service.name, sasl.kerberos.ticket.renew.jitter, ssl.trustmanager.algorithm, ssl.key.password, fetch.max.wait.ms, sasl.kerberos.min.time.before.relogin, connections.max.idle.ms, session.timeout.ms, metrics.num.samples, key.deserializer, ssl.protocol, ssl.provider, ssl.enabled.protocols, ssl.keystore.location, ssl.cipher.suites, security.protocol, ssl.keymanager.algorithm, metrics.sample.window.ms, auto.offset.reset")
-                            .split(",");
+            String[] configNamesArray = ("metric.reporters, metadata.max.age.ms, partition.assignment.strategy, reconnect.backoff.ms," + "sasl.kerberos.ticket.renew.window.factor, max.partition.fetch.bytes, bootstrap.servers, ssl.keystore.type," + " enable.auto.commit, sasl.mechanism, interceptor.classes, exclude.internal.topics, ssl.truststore.password," + " client.id, ssl.endpoint.identification.algorithm, max.poll.records, check.crcs, request.timeout.ms, heartbeat.interval.ms," + " auto.commit.interval.ms, receive.buffer.bytes, ssl.truststore.type, ssl.truststore.location, ssl.keystore.password, fetch.min.bytes," + " fetch.max.bytes, send.buffer.bytes, max.poll.interval.ms, value.deserializer, group.id, retry.backoff.ms,"
+                    + " ssl.secure.random.implementation, sasl.kerberos.kinit.cmd, sasl.kerberos.service.name, sasl.kerberos.ticket.renew.jitter, ssl.trustmanager.algorithm, ssl.key.password, fetch.max.wait.ms, sasl.kerberos.min.time.before.relogin, connections.max.idle.ms, session.timeout.ms, metrics.num.samples, key.deserializer, ssl.protocol, ssl.provider, ssl.enabled.protocols, ssl.keystore.location, ssl.cipher.suites, security.protocol, ssl.keymanager.algorithm, metrics.sample.window.ms, auto.offset.reset").split(",");
             configNames.addAll(Arrays.asList(configNamesArray));
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
index b3a0f19..11466e5 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
@@ -97,8 +97,7 @@ public class KafkaFlatTableJob extends AbstractHadoopJob {
             String topic = kafkaConfig.getTopic();
 
             if (brokers == null || brokers.length() == 0 || topic == null) {
-                throw new IllegalArgumentException(
-                        "Invalid Kafka information, brokers " + brokers + ", topic " + topic);
+                throw new IllegalArgumentException("Invalid Kafka information, brokers " + brokers + ", topic " + topic);
             }
 
             JobEngineConfig jobEngineConfig = new JobEngineConfig(KylinConfig.getInstanceFromEnv());
@@ -144,7 +143,7 @@ public class KafkaFlatTableJob extends AbstractHadoopJob {
         job.getConfiguration().set(CONFIG_KAFKA_PARITION_MIN, minPartition.toString());
         job.getConfiguration().set(CONFIG_KAFKA_PARITION_MAX, maxPartition.toString());
 
-        for (Integer partition : offsetStart.keySet()) {
+        for(Integer partition: offsetStart.keySet()) {
             job.getConfiguration().set(CONFIG_KAFKA_PARITION_START + partition, offsetStart.get(partition).toString());
             job.getConfiguration().set(CONFIG_KAFKA_PARITION_END + partition, offsetEnd.get(partition).toString());
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java
index 71f823f..c996c5f 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java
@@ -69,11 +69,9 @@ public class KafkaInputFormat extends InputFormat<LongWritable, BytesWritable> {
 
         Properties kafkaProperties = KafkaConsumerProperties.extractKafkaConfigToProperties(conf);
         final List<InputSplit> splits = new ArrayList<InputSplit>();
-        try (KafkaConsumer<String, String> consumer = KafkaClient.getKafkaConsumer(brokers, consumerGroup,
-                kafkaProperties)) {
+        try (KafkaConsumer<String, String> consumer = KafkaClient.getKafkaConsumer(brokers, consumerGroup, kafkaProperties)) {
             final List<PartitionInfo> partitionInfos = consumer.partitionsFor(inputTopic);
-            Preconditions.checkArgument(partitionInfos.size() == startOffsetMap.size(),
-                    "partition number mismatch with server side");
+            Preconditions.checkArgument(partitionInfos.size() == startOffsetMap.size(), "partition number mismatch with server side");
             for (int i = 0; i < partitionInfos.size(); i++) {
                 final PartitionInfo partition = partitionInfos.get(i);
                 int partitionId = partition.partition();
@@ -82,8 +80,7 @@ public class KafkaInputFormat extends InputFormat<LongWritable, BytesWritable> {
                 }
 
                 if (endOffsetMap.get(partitionId) > startOffsetMap.get(partitionId)) {
-                    InputSplit split = new KafkaInputSplit(brokers, inputTopic, partitionId,
-                            startOffsetMap.get(partitionId), endOffsetMap.get(partitionId));
+                    InputSplit split = new KafkaInputSplit(brokers, inputTopic, partitionId, startOffsetMap.get(partitionId), endOffsetMap.get(partitionId));
                     splits.add(split);
                 }
             }
@@ -92,8 +89,7 @@ public class KafkaInputFormat extends InputFormat<LongWritable, BytesWritable> {
     }
 
     @Override
-    public RecordReader<LongWritable, BytesWritable> createRecordReader(InputSplit arg0, TaskAttemptContext arg1)
-            throws IOException, InterruptedException {
+    public RecordReader<LongWritable, BytesWritable> createRecordReader(InputSplit arg0, TaskAttemptContext arg1) throws IOException, InterruptedException {
         return new KafkaInputRecordReader();
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
index c1bb625..c22c72f 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
@@ -89,15 +89,13 @@ public class KafkaInputRecordReader extends RecordReader<LongWritable, BytesWrit
 
         Properties kafkaProperties = KafkaConsumerProperties.extractKafkaConfigToProperties(conf);
 
-        consumer = org.apache.kylin.source.kafka.util.KafkaClient.getKafkaConsumer(brokers, consumerGroup,
-                kafkaProperties);
+        consumer = org.apache.kylin.source.kafka.util.KafkaClient.getKafkaConsumer(brokers, consumerGroup, kafkaProperties);
 
         earliestOffset = this.split.getOffsetStart();
         latestOffset = this.split.getOffsetEnd();
         TopicPartition topicPartition = new TopicPartition(topic, partition);
         consumer.assign(Arrays.asList(topicPartition));
-        log.info("Split {} Topic: {} Broker: {} Partition: {} Start: {} End: {}",
-                new Object[] { this.split, topic, this.split.getBrokers(), partition, earliestOffset, latestOffset });
+        log.info("Split {} Topic: {} Broker: {} Partition: {} Start: {} End: {}", new Object[] { this.split, topic, this.split.getBrokers(), partition, earliestOffset, latestOffset });
     }
 
     @Override
@@ -122,9 +120,7 @@ public class KafkaInputRecordReader extends RecordReader<LongWritable, BytesWrit
             iterator = messages.iterator();
             if (!iterator.hasNext()) {
                 log.info("No more messages, stop");
-                throw new IOException(
-                        String.format("Unexpected ending of stream, expected ending offset %d, but end at %d",
-                                latestOffset, watermark));
+                throw new IOException(String.format("Unexpected ending of stream, expected ending offset %d, but end at %d", latestOffset, watermark));
             }
         }
 
@@ -143,8 +139,7 @@ public class KafkaInputRecordReader extends RecordReader<LongWritable, BytesWrit
         }
 
         log.error("Unexpected iterator end.");
-        throw new IOException(String.format("Unexpected ending of stream, expected ending offset %d, but end at %d",
-                latestOffset, watermark));
+        throw new IOException(String.format("Unexpected ending of stream, expected ending offset %d, but end at %d", latestOffset, watermark));
     }
 
     @Override
@@ -167,8 +162,7 @@ public class KafkaInputRecordReader extends RecordReader<LongWritable, BytesWrit
 
     @Override
     public void close() throws IOException {
-        log.info("{} num. processed messages {} ", topic + ":" + split.getBrokers() + ":" + partition,
-                numProcessedMessages);
+        log.info("{} num. processed messages {} ", topic + ":" + split.getBrokers() + ":" + partition, numProcessedMessages);
         consumer.close();
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputSplit.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputSplit.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputSplit.java
index c8a0110..3261399 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputSplit.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputSplit.java
@@ -72,7 +72,7 @@ public class KafkaInputSplit extends InputSplit implements Writable {
 
     @Override
     public String[] getLocations() throws IOException, InterruptedException {
-        return new String[] { brokers };
+        return new String[]{brokers};
     }
 
     public int getPartition() {
@@ -99,4 +99,4 @@ public class KafkaInputSplit extends InputSplit implements Writable {
     public String toString() {
         return brokers + "-" + topic + "-" + partition + "-" + offsetStart + "-" + offsetEnd;
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/MergeOffsetStep.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/MergeOffsetStep.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/MergeOffsetStep.java
index d357d91..914fca2 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/MergeOffsetStep.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/MergeOffsetStep.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.util.Collections;
 import java.util.List;
 
+import com.google.common.base.Preconditions;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
@@ -33,8 +34,6 @@ import org.apache.kylin.job.execution.ExecuteResult;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
-
 /**
  */
 public class MergeOffsetStep extends AbstractExecutable {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java
index 56d3687..bd8f90e 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java
@@ -17,11 +17,7 @@
 */
 package org.apache.kylin.source.kafka.util;
 
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
+import com.google.common.collect.Maps;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.common.PartitionInfo;
@@ -34,7 +30,10 @@ import org.apache.kylin.source.kafka.config.BrokerConfig;
 import org.apache.kylin.source.kafka.config.KafkaClusterConfig;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 
-import com.google.common.collect.Maps;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
 
 /**
  */
@@ -46,8 +45,7 @@ public class KafkaClient {
         return consumer;
     }
 
-    private static Properties constructDefaultKafkaConsumerProperties(String brokers, String consumerGroup,
-            Properties properties) {
+    private static Properties constructDefaultKafkaConsumerProperties(String brokers, String consumerGroup, Properties properties) {
         Properties props = new Properties();
         if (properties != null) {
             for (Map.Entry entry : properties.entrySet()) {
@@ -99,8 +97,7 @@ public class KafkaClient {
     }
 
     public static Map<Integer, Long> getLatestOffsets(final CubeInstance cubeInstance) {
-        final KafkaConfig kafkaConfig = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv())
-                .getKafkaConfig(cubeInstance.getRootFactTable());
+        final KafkaConfig kafkaConfig = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv()).getKafkaConfig(cubeInstance.getRootFactTable());
 
         final String brokers = KafkaClient.getKafkaBrokers(kafkaConfig);
         final String topic = kafkaConfig.getTopic();
@@ -116,9 +113,9 @@ public class KafkaClient {
         return startOffsets;
     }
 
+
     public static Map<Integer, Long> getEarliestOffsets(final CubeInstance cubeInstance) {
-        final KafkaConfig kafkaConfig = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv())
-                .getKafkaConfig(cubeInstance.getRootFactTable());
+        final KafkaConfig kafkaConfig = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv()).getKafkaConfig(cubeInstance.getRootFactTable());
 
         final String brokers = KafkaClient.getKafkaBrokers(kafkaConfig);
         final String topic = kafkaConfig.getTopic();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
index fc04f62..4b91e03 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
@@ -48,12 +48,9 @@ public class KafkaSampleProducer {
 
     private static final Logger logger = LoggerFactory.getLogger(KafkaSampleProducer.class);
     @SuppressWarnings("static-access")
-    private static final Option OPTION_TOPIC = OptionBuilder.withArgName("topic").hasArg().isRequired(true)
-            .withDescription("Kafka topic").create("topic");
-    private static final Option OPTION_BROKER = OptionBuilder.withArgName("broker").hasArg().isRequired(true)
-            .withDescription("Kafka broker").create("broker");
-    private static final Option OPTION_INTERVAL = OptionBuilder.withArgName("interval").hasArg().isRequired(false)
-            .withDescription("Simulated message interval in mili-seconds, default 1000").create("interval");
+    private static final Option OPTION_TOPIC = OptionBuilder.withArgName("topic").hasArg().isRequired(true).withDescription("Kafka topic").create("topic");
+    private static final Option OPTION_BROKER = OptionBuilder.withArgName("broker").hasArg().isRequired(true).withDescription("Kafka broker").create("broker");
+    private static final Option OPTION_INTERVAL = OptionBuilder.withArgName("interval").hasArg().isRequired(false).withDescription("Simulated message interval in mili-seconds, default 1000").create("interval");
 
     private static final ObjectMapper mapper = new ObjectMapper();
 
@@ -134,8 +131,7 @@ public class KafkaSampleProducer {
             user.put("age", rnd.nextInt(20) + 10);
             record.put("user", user);
             //send message
-            ProducerRecord<String, String> data = new ProducerRecord<>(topic, System.currentTimeMillis() + "",
-                    mapper.writeValueAsString(record));
+            ProducerRecord<String, String> data = new ProducerRecord<>(topic, System.currentTimeMillis() + "", mapper.writeValueAsString(record));
             System.out.println("Sending 1 message: " + JsonUtil.writeValueAsString(record));
             producer.send(data);
             Thread.sleep(interval);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/test/java/org/apache/kylin/source/kafka/TimedJsonStreamParserTest.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/test/java/org/apache/kylin/source/kafka/TimedJsonStreamParserTest.java b/source-kafka/src/test/java/org/apache/kylin/source/kafka/TimedJsonStreamParserTest.java
index 2339862..8dc840b 100644
--- a/source-kafka/src/test/java/org/apache/kylin/source/kafka/TimedJsonStreamParserTest.java
+++ b/source-kafka/src/test/java/org/apache/kylin/source/kafka/TimedJsonStreamParserTest.java
@@ -45,8 +45,7 @@ public class TimedJsonStreamParserTest extends LocalFileMetadataTestCase {
     private static String[] userNeedColNames;
     private static final String jsonFilePath = "src/test/resources/message.json";
     private static ObjectMapper mapper;
-    private final JavaType mapType = MapType.construct(HashMap.class, SimpleType.construct(String.class),
-            SimpleType.construct(Object.class));
+    private final JavaType mapType = MapType.construct(HashMap.class, SimpleType.construct(String.class), SimpleType.construct(Object.class));
 
     @BeforeClass
     public static void setUp() throws Exception {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/source-kafka/src/test/java/org/apache/kylin/source/kafka/config/KafkaConsumerPropertiesTest.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/test/java/org/apache/kylin/source/kafka/config/KafkaConsumerPropertiesTest.java b/source-kafka/src/test/java/org/apache/kylin/source/kafka/config/KafkaConsumerPropertiesTest.java
index cc94a35..208fdb6 100644
--- a/source-kafka/src/test/java/org/apache/kylin/source/kafka/config/KafkaConsumerPropertiesTest.java
+++ b/source-kafka/src/test/java/org/apache/kylin/source/kafka/config/KafkaConsumerPropertiesTest.java
@@ -52,17 +52,14 @@ public class KafkaConsumerPropertiesTest extends LocalFileMetadataTestCase {
         KafkaConsumerProperties kafkaConsumerProperties = KafkaConsumerProperties.getInstanceFromEnv();
         assertFalse(kafkaConsumerProperties.extractKafkaConfigToProperties().containsKey("acks"));
         assertTrue(kafkaConsumerProperties.extractKafkaConfigToProperties().containsKey("session.timeout.ms"));
-        assertEquals("30000",
-                kafkaConsumerProperties.extractKafkaConfigToProperties().getProperty("session.timeout.ms"));
+        assertEquals("30000", kafkaConsumerProperties.extractKafkaConfigToProperties().getProperty("session.timeout.ms"));
     }
 
     @Test
-    public void testLoadKafkaPropertiesAsHadoopJobConf()
-            throws IOException, ParserConfigurationException, SAXException {
+    public void testLoadKafkaPropertiesAsHadoopJobConf() throws IOException, ParserConfigurationException, SAXException {
         KafkaConsumerProperties kafkaConsumerProperties = KafkaConsumerProperties.getInstanceFromEnv();
         Configuration conf = new Configuration(false);
-        conf.addResource(new FileInputStream(new File(kafkaConsumerProperties.getKafkaConsumerHadoopJobConf())),
-                KafkaConsumerProperties.KAFKA_CONSUMER_FILE);
+        conf.addResource(new FileInputStream(new File(kafkaConsumerProperties.getKafkaConsumerHadoopJobConf())), KafkaConsumerProperties.KAFKA_CONSUMER_FILE);
         assertEquals("30000", conf.get("session.timeout.ms"));
 
         Properties prop = KafkaConsumerProperties.extractKafkaConfigToProperties(conf);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
index 5b4126f..6580107 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
@@ -104,8 +104,7 @@ public class HBaseConnection {
             int coreThreads = config.getHBaseCoreConnectionThreads();
             long keepAliveTime = config.getHBaseConnectionThreadPoolAliveSeconds();
             LinkedBlockingQueue<Runnable> workQueue = new LinkedBlockingQueue<Runnable>(maxThreads * 100);
-            ThreadPoolExecutor tpe = new ThreadPoolExecutor(coreThreads, maxThreads, keepAliveTime, TimeUnit.SECONDS,
-                    workQueue, //
+            ThreadPoolExecutor tpe = new ThreadPoolExecutor(coreThreads, maxThreads, keepAliveTime, TimeUnit.SECONDS, workQueue, //
                     Threads.newDaemonThreadFactory("kylin-coproc-"));
             tpe.allowCoreThreadTimeOut(true);
 
@@ -145,8 +144,7 @@ public class HBaseConnection {
     private static Configuration newHBaseConfiguration(StorageURL url) {
         // using a hbase:xxx URL is deprecated, instead hbase config is always loaded from hbase-site.xml in classpath
         if (!"hbase".equals(url.getScheme()))
-            throw new IllegalArgumentException(
-                    "to use hbase storage, pls set 'kylin.storage.url=hbase' in kylin.properties");
+            throw new IllegalArgumentException("to use hbase storage, pls set 'kylin.storage.url=hbase' in kylin.properties");
 
         Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
         addHBaseClusterNNHAConfiguration(conf);
@@ -165,7 +163,7 @@ public class HBaseConnection {
         if (StringUtils.isBlank(conf.get("hbase.fs.tmp.dir"))) {
             conf.set("hbase.fs.tmp.dir", "/tmp");
         }
-
+        
         for (Entry<String, String> entry : url.getAllParameters().entrySet()) {
             conf.set(entry.getKey(), entry.getValue());
         }
@@ -266,8 +264,7 @@ public class HBaseConnection {
         return tableExists(HBaseConnection.get(hbaseUrl), tableName);
     }
 
-    public static void createHTableIfNeeded(StorageURL hbaseUrl, String tableName, String... families)
-            throws IOException {
+    public static void createHTableIfNeeded(StorageURL hbaseUrl, String tableName, String... families) throws IOException {
         createHTableIfNeeded(HBaseConnection.get(hbaseUrl), tableName, families);
     }
 
@@ -280,7 +277,7 @@ public class HBaseConnection {
         TableName tableName = TableName.valueOf(table);
         DistributedLock lock = null;
         String lockPath = getLockPath(table);
-
+        
         try {
             if (tableExists(conn, table)) {
                 logger.debug("HTable '" + table + "' already exists");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index f5f40a1..a2e0229 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -149,11 +149,9 @@ public class HBaseResourceStore extends ResourceStore {
     @Override
     public String getMetaStoreUUID() throws IOException {
         if (!exists(ResourceStore.METASTORE_UUID_TAG)) {
-            putResource(ResourceStore.METASTORE_UUID_TAG, new StringEntity(createMetaStoreUUID()), 0,
-                    StringEntity.serializer);
+            putResource(ResourceStore.METASTORE_UUID_TAG, new StringEntity(createMetaStoreUUID()), 0, StringEntity.serializer);
         }
-        StringEntity entity = getResource(ResourceStore.METASTORE_UUID_TAG, StringEntity.class,
-                StringEntity.serializer);
+        StringEntity entity = getResource(ResourceStore.METASTORE_UUID_TAG, StringEntity.class, StringEntity.serializer);
         return entity.toString();
     }
 
@@ -204,8 +202,7 @@ public class HBaseResourceStore extends ResourceStore {
     }
 
     @Override
-    protected List<RawResource> getAllResourcesImpl(String folderPath, long timeStart, long timeEndExclusive)
-            throws IOException {
+    protected List<RawResource> getAllResourcesImpl(String folderPath, long timeStart, long timeEndExclusive) throws IOException {
         FilterList filter = generateTimeFilterList(timeStart, timeEndExclusive);
         final List<RawResource> result = Lists.newArrayList();
         try {
@@ -229,13 +226,11 @@ public class HBaseResourceStore extends ResourceStore {
     private FilterList generateTimeFilterList(long timeStart, long timeEndExclusive) {
         FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
         if (timeStart != Long.MIN_VALUE) {
-            SingleColumnValueFilter timeStartFilter = new SingleColumnValueFilter(B_FAMILY, B_COLUMN_TS,
-                    CompareFilter.CompareOp.GREATER_OR_EQUAL, Bytes.toBytes(timeStart));
+            SingleColumnValueFilter timeStartFilter = new SingleColumnValueFilter(B_FAMILY, B_COLUMN_TS, CompareFilter.CompareOp.GREATER_OR_EQUAL, Bytes.toBytes(timeStart));
             filterList.addFilter(timeStartFilter);
         }
         if (timeEndExclusive != Long.MAX_VALUE) {
-            SingleColumnValueFilter timeEndFilter = new SingleColumnValueFilter(B_FAMILY, B_COLUMN_TS,
-                    CompareFilter.CompareOp.LESS, Bytes.toBytes(timeEndExclusive));
+            SingleColumnValueFilter timeEndFilter = new SingleColumnValueFilter(B_FAMILY, B_COLUMN_TS, CompareFilter.CompareOp.LESS, Bytes.toBytes(timeEndExclusive));
             filterList.addFilter(timeEndFilter);
         }
         return filterList.getFilters().size() == 0 ? null : filterList;
@@ -296,8 +291,7 @@ public class HBaseResourceStore extends ResourceStore {
     }
 
     @Override
-    protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS)
-            throws IOException, IllegalStateException {
+    protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS) throws IOException, IllegalStateException {
         Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
         try {
             byte[] row = Bytes.toBytes(resPath);
@@ -305,12 +299,10 @@ public class HBaseResourceStore extends ResourceStore {
             Put put = buildPut(resPath, newTS, row, content, table);
 
             boolean ok = table.checkAndPut(row, B_FAMILY, B_COLUMN_TS, bOldTS, put);
-            logger.trace("Update row " + resPath + " from oldTs: " + oldTS + ", to newTs: " + newTS
-                    + ", operation result: " + ok);
+            logger.trace("Update row " + resPath + " from oldTs: " + oldTS + ", to newTs: " + newTS + ", operation result: " + ok);
             if (!ok) {
                 long real = getResourceTimestampImpl(resPath);
-                throw new IllegalStateException(
-                        "Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but it is " + real);
+                throw new IllegalStateException("Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but it is " + real);
             }
 
             return newTS;
@@ -363,8 +355,7 @@ public class HBaseResourceStore extends ResourceStore {
 
     }
 
-    private Result internalGetFromHTable(Table table, String path, boolean fetchContent, boolean fetchTimestamp)
-            throws IOException {
+    private Result internalGetFromHTable(Table table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
         byte[] rowkey = Bytes.toBytes(path);
 
         Get get = new Get(rowkey);
@@ -409,8 +400,7 @@ public class HBaseResourceStore extends ResourceStore {
     }
 
     private Put buildPut(String resPath, long ts, byte[] row, byte[] content, Table table) throws IOException {
-        int kvSizeLimit = Integer
-                .parseInt(getConnection().getConfiguration().get("hbase.client.keyvalue.maxsize", "10485760"));
+        int kvSizeLimit = Integer.parseInt(getConnection().getConfiguration().get("hbase.client.keyvalue.maxsize", "10485760"));
         if (content.length > kvSizeLimit) {
             writeLargeCellToHdfs(resPath, content, table);
             content = BytesUtil.EMPTY_BYTE_ARRAY;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java
index 0d44adc..fc6f878 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java
@@ -50,16 +50,14 @@ public class HBaseStorage implements IStorage {
             CubeInstance cubeInstance = (CubeInstance) realization;
             String cubeStorageQuery;
             if (cubeInstance.getStorageType() == IStorageAware.ID_HBASE) {//v2 query engine cannot go with v1 storage now
-                throw new IllegalStateException(
-                        "Storage Engine (id=" + IStorageAware.ID_HBASE + ") is not supported any more");
+                throw new IllegalStateException("Storage Engine (id=" + IStorageAware.ID_HBASE + ") is not supported any more");
             } else {
                 cubeStorageQuery = v2CubeStorageQuery;//by default use v2
             }
 
             IStorageQuery ret;
             try {
-                ret = (IStorageQuery) Class.forName(cubeStorageQuery).getConstructor(CubeInstance.class)
-                        .newInstance((CubeInstance) realization);
+                ret = (IStorageQuery) Class.forName(cubeStorageQuery).getConstructor(CubeInstance.class).newInstance((CubeInstance) realization);
             } catch (Exception e) {
                 throw new RuntimeException("Failed to initialize storage query for " + cubeStorageQuery, e);
             }
@@ -72,13 +70,11 @@ public class HBaseStorage implements IStorage {
 
     private static TblColRef getPartitionCol(IRealization realization) {
         String modelName = realization.getModel().getName();
-        DataModelDesc dataModelDesc = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv())
-                .getDataModelDesc(modelName);
+        DataModelDesc dataModelDesc = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv()).getDataModelDesc(modelName);
         PartitionDesc partitionDesc = dataModelDesc.getPartitionDesc();
         Preconditions.checkArgument(partitionDesc != null, "PartitionDesc for " + realization + " is null!");
         TblColRef partitionColRef = partitionDesc.getPartitionDateColumnRef();
-        Preconditions.checkArgument(partitionColRef != null,
-                "getPartitionDateColumnRef for " + realization + " is null");
+        Preconditions.checkArgument(partitionColRef != null, "getPartitionDateColumnRef for " + realization + " is null");
         return partitionColRef;
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/AggrKey.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/AggrKey.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/AggrKey.java
index 4d69925..25abdfb 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/AggrKey.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/AggrKey.java
@@ -124,8 +124,7 @@ public class AggrKey implements Comparable<AggrKey> {
             return comp;
 
         for (int i = 0; i < groupByMaskSet.length; i++) {
-            comp = BytesUtil.compareByteUnsigned(this.data[this.offset + groupByMaskSet[i]],
-                    o.data[o.offset + groupByMaskSet[i]]);
+            comp = BytesUtil.compareByteUnsigned(this.data[this.offset + groupByMaskSet[i]], o.data[o.offset + groupByMaskSet[i]]);
             if (comp != 0)
                 return comp;
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/AggregationCache.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/AggregationCache.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/AggregationCache.java
index 386564a..2a85894 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/AggregationCache.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/AggregationCache.java
@@ -77,16 +77,13 @@ public abstract class AggregationCache {
         int size = aggBufMap.size();
         long memUsage = (40L + rowMemBytes) * size;
         if (memUsage > MEMORY_USAGE_CAP) {
-            throw new RuntimeException("Kylin coprocessor memory usage goes beyond cap, (40 + " + rowMemBytes + ") * "
-                    + size + " > " + MEMORY_USAGE_CAP + ". Abort coprocessor.");
+            throw new RuntimeException("Kylin coprocessor memory usage goes beyond cap, (40 + " + rowMemBytes + ") * " + size + " > " + MEMORY_USAGE_CAP + ". Abort coprocessor.");
         }
 
         //If less than 5% of max memory
         long avail = MemoryBudgetController.getSystemAvailBytes();
         if (avail < (MEMOERY_MAX_BYTES / 20)) {
-            throw new RuntimeException(
-                    "Running Kylin coprocessor when too little memory is left. Abort coprocessor. Current available memory is "
-                            + avail + ". Max memory is " + MEMOERY_MAX_BYTES);
+            throw new RuntimeException("Running Kylin coprocessor when too little memory is left. Abort coprocessor. Current available memory is " + avail + ". Max memory is " + MEMOERY_MAX_BYTES);
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorFilter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorFilter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorFilter.java
index 2b3b91b..63e3bdb 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorFilter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorFilter.java
@@ -33,8 +33,7 @@ import org.apache.kylin.metadata.tuple.IEvaluatableTuple;
  */
 public class CoprocessorFilter {
 
-    public static CoprocessorFilter fromFilter(final IDimensionEncodingMap dimEncMap, TupleFilter rootFilter,
-            FilterDecorator.FilterConstantsTreatment filterConstantsTreatment) {
+    public static CoprocessorFilter fromFilter(final IDimensionEncodingMap dimEncMap, TupleFilter rootFilter, FilterDecorator.FilterConstantsTreatment filterConstantsTreatment) {
         // translate constants into dictionary IDs via a serialize copy
         FilterDecorator filterDecorator = new FilterDecorator(dimEncMap, filterConstantsTreatment);
         byte[] bytes = TupleFilterSerializer.serialize(rootFilter, filterDecorator, DictCodeSystem.INSTANCE);
@@ -44,8 +43,7 @@ public class CoprocessorFilter {
     }
 
     public static byte[] serialize(CoprocessorFilter o) {
-        return (o.filter == null) ? BytesUtil.EMPTY_BYTE_ARRAY
-                : TupleFilterSerializer.serialize(o.filter, DictCodeSystem.INSTANCE);
+        return (o.filter == null) ? BytesUtil.EMPTY_BYTE_ARRAY : TupleFilterSerializer.serialize(o.filter, DictCodeSystem.INSTANCE);
     }
 
     public static CoprocessorFilter deserialize(byte[] filterBytes) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorProjector.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorProjector.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorProjector.java
index 65215f6..f6332f4 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorProjector.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorProjector.java
@@ -36,8 +36,7 @@ import org.apache.kylin.metadata.model.TblColRef;
  */
 public class CoprocessorProjector {
 
-    public static CoprocessorProjector makeForObserver(final CubeSegment cubeSegment, final Cuboid cuboid,
-            final Collection<TblColRef> dimensionColumns) {
+    public static CoprocessorProjector makeForObserver(final CubeSegment cubeSegment, final Cuboid cuboid, final Collection<TblColRef> dimensionColumns) {
 
         RowKeyEncoder rowKeyMaskEncoder = new RowKeyEncoder(cubeSegment, cuboid) {
             @Override
@@ -46,8 +45,7 @@ public class CoprocessorProjector {
             }
 
             @Override
-            protected void fillColumnValue(TblColRef column, int columnLen, String valueStr, byte[] outputValue,
-                    int outputValueOffset) {
+            protected void fillColumnValue(TblColRef column, int columnLen, String valueStr, byte[] outputValue, int outputValueOffset) {
                 byte bits = dimensionColumns.contains(column) ? (byte) 0xff : 0x00;
                 Arrays.fill(outputValue, outputValueOffset, outputValueOffset + columnLen, bits);
             }
@@ -56,6 +54,7 @@ public class CoprocessorProjector {
         byte[] mask = rowKeyMaskEncoder.encode(new String[cuboid.getColumns().size()]);
         return new CoprocessorProjector(mask, dimensionColumns.size() != 0);
     }
+  
 
     public static byte[] serialize(CoprocessorProjector o) {
         ByteBuffer buf = ByteBuffer.allocate(BytesSerializer.SERIALIZE_BUFFER_SIZE);


[65/67] [abbrv] kylin git commit: minor, add guava dependency to module server-base

Posted by li...@apache.org.
minor, add guava dependency to module server-base


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/b6b71e86
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/b6b71e86
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/b6b71e86

Branch: refs/heads/master
Commit: b6b71e86b87982d0c986999122e4d8eb96187f3d
Parents: f6cdd62
Author: Cheng Wang <ch...@kyligence.io>
Authored: Sun Jun 4 20:24:25 2017 +0800
Committer: liyang-gmt8 <li...@apache.org>
Committed: Sun Jun 4 21:56:06 2017 +0800

----------------------------------------------------------------------
 server-base/pom.xml | 10 ++++++++++
 1 file changed, 10 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/b6b71e86/server-base/pom.xml
----------------------------------------------------------------------
diff --git a/server-base/pom.xml b/server-base/pom.xml
index c7247a5..8110027 100644
--- a/server-base/pom.xml
+++ b/server-base/pom.xml
@@ -63,6 +63,16 @@
             <artifactId>kylin-source-kafka</artifactId>
         </dependency>
 
+        <!--
+            The Hadoop provided guava could disappoint calcite's demand. 
+            E.g. the provided could be: /usr/hdp/2.4.0.0-169/hadoop/lib/guava-11.0.2.jar - com/google/common/collect/ImmutableSortedMap.class
+            While calcite requires guava 14.0 now.
+         -->
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <scope>compile</scope>
+        </dependency>
 
         <dependency>
             <groupId>net.sf.ehcache</groupId>


[47/67] [abbrv] kylin git commit: minor, refine scripts call

Posted by li...@apache.org.
minor, refine scripts call


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c1e2143d
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c1e2143d
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c1e2143d

Branch: refs/heads/master
Commit: c1e2143d06dfe7af714e60c0e5daa85b937ca59b
Parents: cfff185
Author: lidongsjtu <li...@apache.org>
Authored: Mon May 29 11:27:07 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Mon May 29 13:30:38 2017 +0800

----------------------------------------------------------------------
 build/bin/check-env.sh            | 4 +++-
 build/bin/find-hive-dependency.sh | 4 +++-
 build/bin/sample.sh               | 6 ++++--
 3 files changed, 10 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/c1e2143d/build/bin/check-env.sh
----------------------------------------------------------------------
diff --git a/build/bin/check-env.sh b/build/bin/check-env.sh
index a3170c7..7534d12 100644
--- a/build/bin/check-env.sh
+++ b/build/bin/check-env.sh
@@ -18,7 +18,9 @@
 #
 
 source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/header.sh
-source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/find-hadoop-conf-dir.sh
+
+## ${dir} assigned to $KYLIN_HOME/bin in header.sh
+source ${dir}/find-hadoop-conf-dir.sh
 
 if [ -z "${kylin_hadoop_conf_dir}" ]; then
     hadoop_conf_param=

http://git-wip-us.apache.org/repos/asf/kylin/blob/c1e2143d/build/bin/find-hive-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-hive-dependency.sh b/build/bin/find-hive-dependency.sh
index 6d8fa65..8841687 100644
--- a/build/bin/find-hive-dependency.sh
+++ b/build/bin/find-hive-dependency.sh
@@ -18,7 +18,9 @@
 #
 
 source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/header.sh
-source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/load-hive-conf.sh
+
+## ${dir} assigned to $KYLIN_HOME/bin in header.sh
+source ${dir}/load-hive-conf.sh
 
 echo Retrieving hive dependency...
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/c1e2143d/build/bin/sample.sh
----------------------------------------------------------------------
diff --git a/build/bin/sample.sh b/build/bin/sample.sh
index b9c4ea3..bc9ba5a 100644
--- a/build/bin/sample.sh
+++ b/build/bin/sample.sh
@@ -18,8 +18,10 @@
 #
 
 source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/header.sh
-source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/find-hadoop-conf-dir.sh
-source $(cd -P -- "$(dirname -- "$0")" && pwd -P)/load-hive-conf.sh
+
+## ${dir} assigned to $KYLIN_HOME/bin in header.sh
+source ${dir}/find-hadoop-conf-dir.sh
+source ${dir}/load-hive-conf.sh
 
 source ${dir}/check-env.sh "if-not-yet"
 job_jar=`find -L ${KYLIN_HOME}/lib/ -name kylin-job*.jar`


[31/67] [abbrv] kylin git commit: KYLIN-2619 refine Broadcaster

Posted by li...@apache.org.
KYLIN-2619 refine Broadcaster


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/87d5d8db
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/87d5d8db
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/87d5d8db

Branch: refs/heads/master
Commit: 87d5d8db276b590c86c473db5997414285b2d689
Parents: 3fbf90a
Author: shaofengshi <sh...@apache.org>
Authored: Sat May 27 14:27:39 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Sat May 27 16:20:07 2017 +0800

----------------------------------------------------------------------
 .../kylin/common/restclient/RestClient.java     | 10 +++----
 .../kylin/metadata/cachesync/Broadcaster.java   | 28 +++++++++++---------
 2 files changed, 18 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/87d5d8db/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java b/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
index fc34a6b..13490cb 100644
--- a/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
+++ b/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
@@ -33,7 +33,6 @@ import org.apache.http.HttpResponse;
 import org.apache.http.auth.AuthScope;
 import org.apache.http.auth.UsernamePasswordCredentials;
 import org.apache.http.client.CredentialsProvider;
-import org.apache.http.client.HttpClient;
 import org.apache.http.client.methods.HttpGet;
 import org.apache.http.client.methods.HttpPost;
 import org.apache.http.client.methods.HttpPut;
@@ -119,19 +118,16 @@ public class RestClient {
     }
 
     public void wipeCache(String entity, String event, String cacheKey) throws IOException {
-        wipeCache(client, baseUrl, entity, event, cacheKey);
-    }
-
-    public static void wipeCache(HttpClient client, String baseUrl, String entity, String event, String cacheKey) throws IOException {
         String url = baseUrl + "/cache/" + entity + "/" + cacheKey + "/" + event;
         HttpPut request = new HttpPut(url);
 
         try {
             HttpResponse response = client.execute(request);
-            String msg = EntityUtils.toString(response.getEntity());
 
-            if (response.getStatusLine().getStatusCode() != 200)
+            if (response.getStatusLine().getStatusCode() != 200) {
+                String msg = EntityUtils.toString(response.getEntity());
                 throw new IOException("Invalid response " + response.getStatusLine().getStatusCode() + " with cache wipe url " + url + "\n" + msg);
+            }
         } catch (Exception ex) {
             throw new IOException(ex);
         } finally {

http://git-wip-us.apache.org/repos/asf/kylin/blob/87d5d8db/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java b/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
index 35d2f42..4a8c6d3 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
@@ -29,14 +29,12 @@ import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.http.client.HttpClient;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.params.BasicHttpParams;
-import org.apache.http.params.HttpConnectionParams;
-import org.apache.http.params.HttpParams;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.restclient.RestClient;
 import org.apache.kylin.common.util.DaemonThreadFactory;
@@ -115,23 +113,27 @@ public class Broadcaster {
         Executors.newSingleThreadExecutor(new DaemonThreadFactory()).execute(new Runnable() {
             @Override
             public void run() {
-                final HttpParams httpParams = new BasicHttpParams();
-                HttpConnectionParams.setConnectionTimeout(httpParams, 3000);
+                final Map<String, RestClient> restClientMap = Maps.newHashMap();
+                final ExecutorService wipingCachePool = new ThreadPoolExecutor(1, 10, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
 
-                final HttpClient client = new DefaultHttpClient(httpParams);
-
-                final ExecutorService wipingCachePool = Executors.newFixedThreadPool(3, new DaemonThreadFactory());
                 while (true) {
                     try {
                         final BroadcastEvent broadcastEvent = broadcastEvents.takeFirst();
-                        logger.debug("Servers in the cluster: " + Arrays.toString(config.getRestServers()));
+                        String[] restServers = config.getRestServers();
+                        logger.info("Servers in the cluster: " + Arrays.toString(restServers));
+                        for (final String node : restServers) {
+                            if (restClientMap.containsKey(node) == false) {
+                                restClientMap.put(node, new RestClient(node));
+                            }
+                        }
+
                         logger.info("Announcing new broadcast event: " + broadcastEvent);
-                        for (final String address : config.getRestServers()) {
+                        for (final String node : restServers) {
                             wipingCachePool.execute(new Runnable() {
                                 @Override
                                 public void run() {
                                     try {
-                                        RestClient.wipeCache(client, RestClient.SCHEME_HTTP + address + RestClient.KYLIN_API_PATH,  broadcastEvent.getEntity(), broadcastEvent.getEvent(), broadcastEvent.getCacheKey());
+                                        restClientMap.get(node).wipeCache(broadcastEvent.getEntity(), broadcastEvent.getEvent(), broadcastEvent.getCacheKey());
                                     } catch (IOException e) {
                                         logger.warn("Thread failed during wipe cache at " + broadcastEvent, e);
                                     }


[18/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileAggFunc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileAggFunc.java b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileAggFunc.java
index 2470891..d3cec8f 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileAggFunc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileAggFunc.java
@@ -20,7 +20,7 @@ package org.apache.kylin.measure.percentile;
 
 import org.apache.kylin.measure.ParamAsMeasureCount;
 
-public class PercentileAggFunc implements ParamAsMeasureCount {
+public class PercentileAggFunc implements ParamAsMeasureCount{
     public static PercentileCounter init() {
         return null;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileCounter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileCounter.java b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileCounter.java
index dded4ce..f86a796 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileCounter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileCounter.java
@@ -107,7 +107,6 @@ public class PercentileCounter implements Serializable {
         out.writeInt(bound);
         out.write(buf.array(), 0, bound);
     }
-
     private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
         in.defaultReadObject();
         int bound = in.readInt();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileMeasureType.java
index b4ad5df..45ebe89 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/percentile/PercentileMeasureType.java
@@ -71,8 +71,7 @@ public class PercentileMeasureType extends MeasureType<PercentileCounter> {
             PercentileCounter current = new PercentileCounter(dataType.getPrecision());
 
             @Override
-            public PercentileCounter valueOf(String[] values, MeasureDesc measureDesc,
-                    Map<TblColRef, Dictionary<String>> dictionaryMap) {
+            public PercentileCounter valueOf(String[] values, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
                 PercentileCounter counter = current;
                 counter.clear();
                 for (String v : values) {
@@ -94,8 +93,7 @@ public class PercentileMeasureType extends MeasureType<PercentileCounter> {
         return true;
     }
 
-    static final Map<String, Class<?>> UDAF_MAP = ImmutableMap
-            .<String, Class<?>> of(PercentileMeasureType.FUNC_PERCENTILE, PercentileAggFunc.class);
+    static final Map<String, Class<?>> UDAF_MAP = ImmutableMap.<String, Class<?>> of(PercentileMeasureType.FUNC_PERCENTILE, PercentileAggFunc.class);
 
     @Override
     public Map<String, Class<?>> getRewriteCalciteAggrFunctions() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/raw/RawMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/raw/RawMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/raw/RawMeasureType.java
index c771e75..31f35e9 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/raw/RawMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/raw/RawMeasureType.java
@@ -108,8 +108,7 @@ public class RawMeasureType extends MeasureType<List<ByteArray>> {
 
             //encode measure value to dictionary
             @Override
-            public List<ByteArray> valueOf(String[] values, MeasureDesc measureDesc,
-                    Map<TblColRef, Dictionary<String>> dictionaryMap) {
+            public List<ByteArray> valueOf(String[] values, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
                 if (values.length != 1)
                     throw new IllegalArgumentException();
 
@@ -130,8 +129,7 @@ public class RawMeasureType extends MeasureType<List<ByteArray>> {
 
             //merge measure dictionary
             @Override
-            public List<ByteArray> reEncodeDictionary(List<ByteArray> value, MeasureDesc measureDesc,
-                    Map<TblColRef, Dictionary<String>> oldDicts, Map<TblColRef, Dictionary<String>> newDicts) {
+            public List<ByteArray> reEncodeDictionary(List<ByteArray> value, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> oldDicts, Map<TblColRef, Dictionary<String>> newDicts) {
                 TblColRef colRef = getRawColumn(measureDesc.getFunction());
                 Dictionary<String> sourceDict = oldDicts.get(colRef);
                 Dictionary<String> mergedDict = newDicts.get(colRef);
@@ -169,8 +167,7 @@ public class RawMeasureType extends MeasureType<List<ByteArray>> {
         return Collections.singletonList(literalCol);
     }
 
-    public CapabilityResult.CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions,
-            Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, MeasureDesc measureDesc) {
+    public CapabilityResult.CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions, Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, MeasureDesc measureDesc) {
         //is raw query
         if (!digest.isRawQuery)
             return null;
@@ -231,8 +228,7 @@ public class RawMeasureType extends MeasureType<List<ByteArray>> {
     }
 
     @Override
-    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo tupleInfo,
-            Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo tupleInfo, Map<TblColRef, Dictionary<String>> dictionaryMap) {
         final TblColRef literalCol = getRawColumn(function);
         final Dictionary<String> rawColDict = dictionaryMap.get(literalCol);
         final int literalTupleIdx = tupleInfo.hasColumn(literalCol) ? tupleInfo.getColumnIndex(literalCol) : -1;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/raw/RawSerializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/raw/RawSerializer.java b/core-metadata/src/main/java/org/apache/kylin/measure/raw/RawSerializer.java
index b3cabfa..68a0273 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/raw/RawSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/raw/RawSerializer.java
@@ -81,11 +81,9 @@ public class RawSerializer extends DataTypeSerializer<List<ByteArray>> {
             BytesUtil.writeVInt(values.size(), out);
             for (ByteArray array : values) {
                 if (!out.hasRemaining() || out.remaining() < array.length()) {
-                    throw new RuntimeException(
-                            "BufferOverflow! Please use one higher cardinality column for dimension column when build RAW cube!");
+                    throw new RuntimeException("BufferOverflow! Please use one higher cardinality column for dimension column when build RAW cube!");
                 }
-                BytesUtil.writeByteArray(
-                        BytesUtil.subarray(array.array(), array.offset(), array.offset() + array.length()), out);
+                BytesUtil.writeByteArray(BytesUtil.subarray(array.array(), array.offset(), array.offset() + array.length()), out);
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/topn/Counter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/Counter.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/Counter.java
index 23f32e0..d8fdc6e 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/Counter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/Counter.java
@@ -29,7 +29,7 @@ import java.io.Serializable;
  * 
  * @param <T>
  */
-public class Counter<T> implements Externalizable, Serializable {
+public class Counter<T> implements Externalizable, Serializable{
 
     protected T item;
     protected double count;
@@ -50,6 +50,7 @@ public class Counter<T> implements Externalizable, Serializable {
         this.count = count;
     }
 
+
     public T getItem() {
         return item;
     }
@@ -61,7 +62,6 @@ public class Counter<T> implements Externalizable, Serializable {
     public void setCount(double count) {
         this.count = count;
     }
-
     @Override
     public String toString() {
         return item + ":" + count;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
index 7f74bed..f936cb8 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
@@ -127,8 +127,7 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
             private boolean needReEncode = true;
 
             @Override
-            public TopNCounter<ByteArray> valueOf(String[] values, MeasureDesc measureDesc,
-                    Map<TblColRef, Dictionary<String>> dictionaryMap) {
+            public TopNCounter<ByteArray> valueOf(String[] values, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
                 double counter = values[0] == null ? 0 : Double.parseDouble(values[0]);
 
                 if (dimensionEncodings == null) {
@@ -147,23 +146,20 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
                 int offset = 0;
                 for (int i = 0; i < dimensionEncodings.length; i++) {
                     if (values[i + 1] == null) {
-                        Arrays.fill(key.array(), offset, offset + dimensionEncodings[i].getLengthOfEncoding(),
-                                DimensionEncoding.NULL);
+                        Arrays.fill(key.array(), offset, offset + dimensionEncodings[i].getLengthOfEncoding(), DimensionEncoding.NULL);
                     } else {
                         dimensionEncodings[i].encode(values[i + 1], key.array(), offset);
                     }
                     offset += dimensionEncodings[i].getLengthOfEncoding();
                 }
 
-                TopNCounter<ByteArray> topNCounter = new TopNCounter<ByteArray>(
-                        dataType.getPrecision() * TopNCounter.EXTRA_SPACE_RATE);
+                TopNCounter<ByteArray> topNCounter = new TopNCounter<ByteArray>(dataType.getPrecision() * TopNCounter.EXTRA_SPACE_RATE);
                 topNCounter.offer(key, counter);
                 return topNCounter;
             }
 
             @Override
-            public TopNCounter<ByteArray> reEncodeDictionary(TopNCounter<ByteArray> value, MeasureDesc measureDesc,
-                    Map<TblColRef, Dictionary<String>> oldDicts, Map<TblColRef, Dictionary<String>> newDicts) {
+            public TopNCounter<ByteArray> reEncodeDictionary(TopNCounter<ByteArray> value, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> oldDicts, Map<TblColRef, Dictionary<String>> newDicts) {
                 TopNCounter<ByteArray> topNCounter = value;
 
                 if (newDimensionEncodings == null) {
@@ -200,8 +196,7 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
                     int offset = c.getItem().offset();
                     int innerBuffOffset = 0;
                     for (int i = 0; i < dimensionEncodings.length; i++) {
-                        String dimValue = dimensionEncodings[i].decode(c.getItem().array(), offset,
-                                dimensionEncodings[i].getLengthOfEncoding());
+                        String dimValue = dimensionEncodings[i].decode(c.getItem().array(), offset, dimensionEncodings[i].getLengthOfEncoding());
                         newDimensionEncodings[i].encode(dimValue, newIdBuf, bufOffset + innerBuffOffset);
                         innerBuffOffset += newDimensionEncodings[i].getLengthOfEncoding();
                         offset += dimensionEncodings[i].getLengthOfEncoding();
@@ -237,8 +232,7 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
     }
 
     @Override
-    public CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions,
-            Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, MeasureDesc topN) {
+    public CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions, Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, MeasureDesc topN) {
         // TopN measure can (and only can) provide one numeric measure and one literal dimension
         // e.g. select seller, sum(gmv) from ... group by seller order by 2 desc limit 100
 
@@ -304,8 +298,7 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
         if (sum.isSum() == false)
             return false;
 
-        if (sum.getParameter() == null || sum.getParameter().getColRefs() == null
-                || sum.getParameter().getColRefs().size() == 0)
+        if (sum.getParameter() == null || sum.getParameter().getColRefs() == null || sum.getParameter().getColRefs().size() == 0)
             return false;
 
         TblColRef sumCol = sum.getParameter().getColRefs().get(0);
@@ -363,8 +356,7 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
     }
 
     @Override
-    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo tupleInfo,
-            Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo tupleInfo, Map<TblColRef, Dictionary<String>> dictionaryMap) {
         final List<TblColRef> literalCols = getTopNLiteralColumn(function);
         final TblColRef numericCol = getTopNNumericColumn(function);
         final int[] literalTupleIdx = new int[literalCols.size()];
@@ -407,8 +399,7 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
                 Counter<ByteArray> counter = topNCounterIterator.next();
                 int offset = counter.getItem().offset();
                 for (int i = 0; i < dimensionEncodings.length; i++) {
-                    String colValue = dimensionEncodings[i].decode(counter.getItem().array(), offset,
-                            dimensionEncodings[i].getLengthOfEncoding());
+                    String colValue = dimensionEncodings[i].decode(counter.getItem().array(), offset, dimensionEncodings[i].getLengthOfEncoding());
                     tuple.setDimensionValue(literalTupleIdx[i], colValue);
                     offset += dimensionEncodings[i].getLengthOfEncoding();
                 }
@@ -417,8 +408,7 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
         };
     }
 
-    private static DimensionEncoding[] getDimensionEncodings(FunctionDesc function, List<TblColRef> literalCols,
-            Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    private static DimensionEncoding[] getDimensionEncodings(FunctionDesc function, List<TblColRef> literalCols, Map<TblColRef, Dictionary<String>> dictionaryMap) {
         final DimensionEncoding[] dimensionEncodings = new DimensionEncoding[literalCols.size()];
         for (int i = 0; i < literalCols.size(); i++) {
             TblColRef colRef = literalCols.get(i);
@@ -434,17 +424,15 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
                     try {
                         encodingVersion = Integer.parseInt(encodingVersionStr);
                     } catch (NumberFormatException e) {
-                        throw new RuntimeException(TopNMeasureType.CONFIG_ENCODING_VERSION_PREFIX + colRef.getName()
-                                + " has to be an integer");
+                        throw new RuntimeException(TopNMeasureType.CONFIG_ENCODING_VERSION_PREFIX + colRef.getName() + " has to be an integer");
                     }
                 }
                 Object[] encodingConf = DimensionEncoding.parseEncodingConf(encoding);
                 String encodingName = (String) encodingConf[0];
                 String[] encodingArgs = (String[]) encodingConf[1];
 
-                encodingArgs = DateDimEnc.replaceEncodingArgs(encoding, encodingArgs, encodingName,
-                        literalCols.get(i).getType());
-
+                encodingArgs = DateDimEnc.replaceEncodingArgs(encoding, encodingArgs, encodingName, literalCols.get(i).getType());
+                
                 dimensionEncodings[i] = DimensionEncodingFactory.create(encodingName, encodingArgs, encodingVersion);
             }
         }
@@ -471,6 +459,7 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
         return FUNC_TOP_N.equalsIgnoreCase(functionDesc.getExpression());
     }
 
+
     /**
      * Get the encoding name and version for the given col from Measure FunctionDesc
      * @param functionDesc
@@ -479,12 +468,11 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
      */
     public static final Pair<String, String> getEncoding(FunctionDesc functionDesc, TblColRef tblColRef) {
         String encoding = functionDesc.getConfiguration().get(CONFIG_ENCODING_PREFIX + tblColRef.getIdentity());
-        String encodingVersion = functionDesc.getConfiguration()
-                .get(CONFIG_ENCODING_VERSION_PREFIX + tblColRef.getIdentity());
+        String encodingVersion =functionDesc.getConfiguration().get(CONFIG_ENCODING_VERSION_PREFIX + tblColRef.getIdentity());
         if (StringUtils.isEmpty(encoding)) {
             // for backward compatibility
             encoding = functionDesc.getConfiguration().get(CONFIG_ENCODING_PREFIX + tblColRef.getName());
-            encodingVersion = functionDesc.getConfiguration().get(CONFIG_ENCODING_VERSION_PREFIX + tblColRef.getName());
+            encodingVersion =functionDesc.getConfiguration().get(CONFIG_ENCODING_VERSION_PREFIX + tblColRef.getName());
         }
 
         return new Pair<>(encoding, encodingVersion);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
index 4965eab..f8e6832 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/MetadataManager.java
@@ -67,12 +67,9 @@ public class MetadataManager {
     private static final Logger logger = LoggerFactory.getLogger(MetadataManager.class);
 
     public static final Serializer<TableDesc> TABLE_SERIALIZER = new JsonSerializer<TableDesc>(TableDesc.class);
-    public static final Serializer<TableExtDesc> TABLE_EXT_SERIALIZER = new JsonSerializer<TableExtDesc>(
-            TableExtDesc.class);
-    public static final Serializer<DataModelDesc> MODELDESC_SERIALIZER = new JsonSerializer<DataModelDesc>(
-            DataModelDesc.class);
-    public static final Serializer<ExternalFilterDesc> EXTERNAL_FILTER_DESC_SERIALIZER = new JsonSerializer<ExternalFilterDesc>(
-            ExternalFilterDesc.class);
+    public static final Serializer<TableExtDesc> TABLE_EXT_SERIALIZER = new JsonSerializer<TableExtDesc>(TableExtDesc.class);
+    public static final Serializer<DataModelDesc> MODELDESC_SERIALIZER = new JsonSerializer<DataModelDesc>(DataModelDesc.class);
+    public static final Serializer<ExternalFilterDesc> EXTERNAL_FILTER_DESC_SERIALIZER = new JsonSerializer<ExternalFilterDesc>(ExternalFilterDesc.class);
 
     // static cached instances
     private static final ConcurrentMap<KylinConfig, MetadataManager> CACHE = new ConcurrentHashMap<KylinConfig, MetadataManager>();
@@ -324,8 +321,7 @@ public class MetadataManager {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
             if (event == Event.DROP)
                 srcTableMap.removeLocal(cacheKey);
             else
@@ -344,8 +340,7 @@ public class MetadataManager {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
             if (event == Event.DROP)
                 srcTableExdMap.removeLocal(cacheKey);
             else
@@ -372,8 +367,7 @@ public class MetadataManager {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
             if (event == Event.DROP)
                 dataModelDescMap.removeLocal(cacheKey);
             else
@@ -392,8 +386,7 @@ public class MetadataManager {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
             if (event == Event.DROP)
                 extFilterMap.removeLocal(cacheKey);
             else
@@ -403,13 +396,11 @@ public class MetadataManager {
 
     private void reloadAllTableExt() throws IOException {
         ResourceStore store = getStore();
-        logger.debug("Reloading Table_exd info from folder "
-                + store.getReadableResourcePath(ResourceStore.TABLE_EXD_RESOURCE_ROOT));
+        logger.debug("Reloading Table_exd info from folder " + store.getReadableResourcePath(ResourceStore.TABLE_EXD_RESOURCE_ROOT));
 
         srcTableExdMap.clear();
 
-        List<String> paths = store.collectResourceRecursively(ResourceStore.TABLE_EXD_RESOURCE_ROOT,
-                MetadataConstants.FILE_SURFIX);
+        List<String> paths = store.collectResourceRecursively(ResourceStore.TABLE_EXD_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX);
         for (String path : paths) {
             reloadTableExtAt(path);
         }
@@ -470,13 +461,11 @@ public class MetadataManager {
 
     private void reloadAllExternalFilter() throws IOException {
         ResourceStore store = getStore();
-        logger.debug("Reloading ExternalFilter from folder "
-                + store.getReadableResourcePath(ResourceStore.EXTERNAL_FILTER_RESOURCE_ROOT));
+        logger.debug("Reloading ExternalFilter from folder " + store.getReadableResourcePath(ResourceStore.EXTERNAL_FILTER_RESOURCE_ROOT));
 
         extFilterMap.clear();
 
-        List<String> paths = store.collectResourceRecursively(ResourceStore.EXTERNAL_FILTER_RESOURCE_ROOT,
-                MetadataConstants.FILE_SURFIX);
+        List<String> paths = store.collectResourceRecursively(ResourceStore.EXTERNAL_FILTER_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX);
         for (String path : paths) {
             reloadExternalFilterAt(path);
         }
@@ -486,13 +475,11 @@ public class MetadataManager {
 
     private void reloadAllSourceTable() throws IOException {
         ResourceStore store = getStore();
-        logger.debug("Reloading SourceTable from folder "
-                + store.getReadableResourcePath(ResourceStore.TABLE_RESOURCE_ROOT));
+        logger.debug("Reloading SourceTable from folder " + store.getReadableResourcePath(ResourceStore.TABLE_RESOURCE_ROOT));
 
         srcTableMap.clear();
 
-        List<String> paths = store.collectResourceRecursively(ResourceStore.TABLE_RESOURCE_ROOT,
-                MetadataConstants.FILE_SURFIX);
+        List<String> paths = store.collectResourceRecursively(ResourceStore.TABLE_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX);
         for (String path : paths) {
             reloadSourceTableAt(path);
         }
@@ -593,13 +580,11 @@ public class MetadataManager {
 
     private void reloadAllDataModel() throws IOException {
         ResourceStore store = getStore();
-        logger.debug("Reloading DataModel from folder "
-                + store.getReadableResourcePath(ResourceStore.DATA_MODEL_DESC_RESOURCE_ROOT));
+        logger.debug("Reloading DataModel from folder " + store.getReadableResourcePath(ResourceStore.DATA_MODEL_DESC_RESOURCE_ROOT));
 
         dataModelDescMap.clear();
 
-        List<String> paths = store.collectResourceRecursively(ResourceStore.DATA_MODEL_DESC_RESOURCE_ROOT,
-                MetadataConstants.FILE_SURFIX);
+        List<String> paths = store.collectResourceRecursively(ResourceStore.DATA_MODEL_DESC_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX);
         for (String path : paths) {
 
             try {
@@ -618,7 +603,7 @@ public class MetadataManager {
         ResourceStore store = getStore();
         try {
             DataModelDesc dataModelDesc = store.getResource(path, DataModelDesc.class, MODELDESC_SERIALIZER);
-
+            
             if (!dataModelDesc.isDraft())
                 dataModelDesc.init(config, this.getAllTablesMap(), this.ccInfoMap);
 
@@ -669,7 +654,7 @@ public class MetadataManager {
     }
 
     private DataModelDesc saveDataModelDesc(DataModelDesc dataModelDesc) throws IOException {
-
+        
         if (!dataModelDesc.isDraft())
             dataModelDesc.init(config, this.getAllTablesMap(), this.ccInfoMap);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryEntry.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryEntry.java b/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryEntry.java
index be913f1..71ce24b 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryEntry.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryEntry.java
@@ -44,8 +44,7 @@ public class BadQueryEntry extends RootPersistentEntity implements Comparable<Ba
     @JsonProperty("user")
     private String user;
 
-    public BadQueryEntry(String sql, String adj, long startTime, float runningSec, String server, String thread,
-            String user) {
+    public BadQueryEntry(String sql, String adj, long startTime, float runningSec, String server, String thread, String user) {
         this.updateRandomUuid();
         this.adj = adj;
         this.sql = sql;
@@ -152,7 +151,6 @@ public class BadQueryEntry extends RootPersistentEntity implements Comparable<Ba
 
     @Override
     public String toString() {
-        return "BadQueryEntry [ adj=" + adj + ", server=" + server + ", startTime="
-                + DateFormat.formatToTimeStr(startTime) + " ]";
+        return "BadQueryEntry [ adj=" + adj + ", server=" + server + ", startTime=" + DateFormat.formatToTimeStr(startTime) + " ]";
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java
index d02f84b..c7eb133 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManager.java
@@ -33,8 +33,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class BadQueryHistoryManager {
-    public static final Serializer<BadQueryHistory> BAD_QUERY_INSTANCE_SERIALIZER = new JsonSerializer<>(
-            BadQueryHistory.class);
+    public static final Serializer<BadQueryHistory> BAD_QUERY_INSTANCE_SERIALIZER = new JsonSerializer<>(BadQueryHistory.class);
     private static final Logger logger = LoggerFactory.getLogger(BadQueryHistoryManager.class);
 
     private static final ConcurrentMap<KylinConfig, BadQueryHistoryManager> CACHE = new ConcurrentHashMap<>();
@@ -78,8 +77,7 @@ public class BadQueryHistoryManager {
     }
 
     public BadQueryHistory getBadQueriesForProject(String project) throws IOException {
-        BadQueryHistory badQueryHistory = getStore().getResource(getResourcePathForProject(project),
-                BadQueryHistory.class, BAD_QUERY_INSTANCE_SERIALIZER);
+        BadQueryHistory badQueryHistory = getStore().getResource(getResourcePathForProject(project), BadQueryHistory.class, BAD_QUERY_INSTANCE_SERIALIZER);
         if (badQueryHistory == null) {
             badQueryHistory = new BadQueryHistory(project);
         }
@@ -119,16 +117,12 @@ public class BadQueryHistoryManager {
         return badQueryHistory;
     }
 
-    public BadQueryHistory addEntryToProject(String sql, long startTime, String adj, float runningSecs, String server,
-            String threadName, String user, String project) throws IOException {
-        return addEntryToProject(new BadQueryEntry(sql, adj, startTime, runningSecs, server, threadName, user),
-                project);
+    public BadQueryHistory addEntryToProject(String sql, long startTime, String adj, float runningSecs, String server, String threadName, String user, String project) throws IOException {
+        return addEntryToProject(new BadQueryEntry(sql, adj, startTime, runningSecs, server, threadName, user), project);
     }
 
-    public BadQueryHistory updateEntryToProject(String sql, long startTime, String adj, float runningSecs,
-            String server, String threadName, String user, String project) throws IOException {
-        return updateEntryToProject(new BadQueryEntry(sql, adj, startTime, runningSecs, server, threadName, user),
-                project);
+    public BadQueryHistory updateEntryToProject(String sql, long startTime, String adj, float runningSecs, String server, String threadName, String user, String project) throws IOException {
+        return updateEntryToProject(new BadQueryEntry(sql, adj, startTime, runningSecs, server, threadName, user), project);
     }
 
     public void removeBadQueryHistory(String project) throws IOException {
@@ -138,4 +132,4 @@ public class BadQueryHistoryManager {
     public String getResourcePathForProject(String project) {
         return ResourceStore.BAD_QUERY_RESOURCE_ROOT + "/" + project + MetadataConstants.FILE_SURFIX;
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java b/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
index 39ad830..1394f7b 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
@@ -116,8 +116,7 @@ public class Broadcaster {
                 for (String node : config.getRestServers()) {
                     restClients.add(new RestClient(node));
                 }
-                final ExecutorService wipingCachePool = Executors.newFixedThreadPool(restClients.size(),
-                        new DaemonThreadFactory());
+                final ExecutorService wipingCachePool = Executors.newFixedThreadPool(restClients.size(), new DaemonThreadFactory());
                 while (true) {
                     try {
                         final BroadcastEvent broadcastEvent = broadcastEvents.takeFirst();
@@ -127,8 +126,7 @@ public class Broadcaster {
                                 @Override
                                 public void run() {
                                     try {
-                                        restClient.wipeCache(broadcastEvent.getEntity(), broadcastEvent.getEvent(),
-                                                broadcastEvent.getCacheKey());
+                                        restClient.wipeCache(broadcastEvent.getEntity(), broadcastEvent.getEvent(), broadcastEvent.getCacheKey());
                                     } catch (IOException e) {
                                         logger.warn("Thread failed during wipe cache at " + broadcastEvent, e);
                                     }
@@ -187,8 +185,7 @@ public class Broadcaster {
         if (list == null)
             return;
 
-        logger.debug("Broadcasting metadata change: entity=" + entity + ", event=" + event + ", cacheKey=" + cacheKey
-                + ", listeners=" + list);
+        logger.debug("Broadcasting metadata change: entity=" + entity + ", event=" + event + ", cacheKey=" + cacheKey + ", listeners=" + list);
 
         // prevents concurrent modification exception
         list = Lists.newArrayList(list);
@@ -218,8 +215,7 @@ public class Broadcaster {
             break;
         }
 
-        logger.debug(
-                "Done broadcasting metadata change: entity=" + entity + ", event=" + event + ", cacheKey=" + cacheKey);
+        logger.debug("Done broadcasting metadata change: entity=" + entity + ", event=" + event + ", cacheKey=" + cacheKey);
     }
 
     /**
@@ -276,8 +272,7 @@ public class Broadcaster {
         public void onProjectDataChange(Broadcaster broadcaster, String project) throws IOException {
         }
 
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
         }
     }
 
@@ -341,8 +336,7 @@ public class Broadcaster {
 
         @Override
         public String toString() {
-            return Objects.toStringHelper(this).add("entity", entity).add("event", event).add("cacheKey", cacheKey)
-                    .toString();
+            return Objects.toStringHelper(this).add("entity", entity).add("event", event).add("cacheKey", cacheKey).toString();
         }
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BigDecimalSerializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BigDecimalSerializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BigDecimalSerializer.java
index 1533f88..b5043f5 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BigDecimalSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BigDecimalSerializer.java
@@ -49,9 +49,7 @@ public class BigDecimalSerializer extends DataTypeSerializer<BigDecimal> {
     public void serialize(BigDecimal value, ByteBuffer out) {
         if (value.scale() > type.getScale()) {
             if (avoidVerbose++ % 10000 == 0) {
-                logger.warn("value's scale has exceeded the " + type.getScale()
-                        + ", cut it off, to ensure encoded value do not exceed maxLength " + maxLength + " times:"
-                        + (avoidVerbose));
+                logger.warn("value's scale has exceeded the " + type.getScale() + ", cut it off, to ensure encoded value do not exceed maxLength " + maxLength + " times:" + (avoidVerbose));
             }
             value = value.setScale(type.getScale(), BigDecimal.ROUND_HALF_EVEN);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java
index a0bd892..3d485d2 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java
@@ -18,11 +18,11 @@
 
 package org.apache.kylin.metadata.datatype;
 
-import java.nio.ByteBuffer;
-
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.BooleanUtils;
 
+import java.nio.ByteBuffer;
+
 public class BooleanSerializer extends DataTypeSerializer<Long> {
 
     public final static String[] TRUE_VALUE_SET = { "true", "t", "on", "yes" };
@@ -58,8 +58,8 @@ public class BooleanSerializer extends DataTypeSerializer<Long> {
     @Override
     public Long valueOf(String str) {
         if (str == null)
-            return Long.valueOf(0L);
+           return Long.valueOf(0L);
         else
             return Long.valueOf(BooleanUtils.toInteger(ArrayUtils.contains(TRUE_VALUE_SET, str.toLowerCase())));
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
index bd7162c..83b2391 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
@@ -164,16 +164,14 @@ public class DataType implements Serializable {
                 try {
                     n = Integer.parseInt(parts[i]);
                 } catch (NumberFormatException e) {
-                    throw new IllegalArgumentException(
-                            "bad data type -- " + datatype + ", precision/scale not numeric");
+                    throw new IllegalArgumentException("bad data type -- " + datatype + ", precision/scale not numeric");
                 }
                 if (i == 0)
                     precision = n;
                 else if (i == 1)
                     scale = n;
                 else
-                    throw new IllegalArgumentException(
-                            "bad data type -- " + datatype + ", too many precision/scale parts");
+                    throw new IllegalArgumentException("bad data type -- " + datatype + ", too many precision/scale parts");
             }
         }
 
@@ -219,7 +217,7 @@ public class DataType implements Serializable {
     public boolean isTimeFamily() {
         return DATETIME_FAMILY.contains(name) && !isDate();
     }
-
+    
     public boolean isDate() {
         return name.equals("date");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataTypeSerializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataTypeSerializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataTypeSerializer.java
index af69e56..a4a35a4 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataTypeSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataTypeSerializer.java
@@ -18,15 +18,14 @@
 
 package org.apache.kylin.metadata.datatype;
 
+import com.google.common.collect.Maps;
+import org.apache.kylin.common.util.BytesSerializer;
+
 import java.io.IOException;
 import java.io.ObjectInputStream;
 import java.nio.ByteBuffer;
 import java.util.Map;
 
-import org.apache.kylin.common.util.BytesSerializer;
-
-import com.google.common.collect.Maps;
-
 /**
  * Note: the implementations MUST be thread-safe.
  */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DateTimeSerializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DateTimeSerializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DateTimeSerializer.java
index f4c97b6..5101766 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DateTimeSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DateTimeSerializer.java
@@ -18,10 +18,10 @@
 
 package org.apache.kylin.metadata.datatype;
 
-import java.nio.ByteBuffer;
-
 import org.apache.kylin.common.util.DateFormat;
 
+import java.nio.ByteBuffer;
+
 public class DateTimeSerializer extends DataTypeSerializer<Long> {
 
     public DateTimeSerializer(DataType type) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/Int4Serializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/Int4Serializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/Int4Serializer.java
index a2ddd41..0e82e11 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/Int4Serializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/Int4Serializer.java
@@ -18,10 +18,10 @@
 
 package org.apache.kylin.metadata.datatype;
 
-import java.nio.ByteBuffer;
-
 import org.apache.kylin.common.util.BytesUtil;
 
+import java.nio.ByteBuffer;
+
 /**
  */
 public class Int4Serializer extends DataTypeSerializer<Integer> {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/Long8Serializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/Long8Serializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/Long8Serializer.java
index fcc0675..7dd5aa7 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/Long8Serializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/Long8Serializer.java
@@ -18,10 +18,10 @@
 
 package org.apache.kylin.metadata.datatype;
 
-import java.nio.ByteBuffer;
-
 import org.apache.kylin.common.util.BytesUtil;
 
+import java.nio.ByteBuffer;
+
 /**
  */
 public class Long8Serializer extends DataTypeSerializer<Long> {
@@ -34,6 +34,7 @@ public class Long8Serializer extends DataTypeSerializer<Long> {
         BytesUtil.writeLong(value, out);
     }
 
+
     @Override
     public Long deserialize(ByteBuffer in) {
         return BytesUtil.readLong(in);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/LongSerializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/LongSerializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/LongSerializer.java
index f2c295b..605dcd7 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/LongSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/LongSerializer.java
@@ -18,10 +18,10 @@
 
 package org.apache.kylin.metadata.datatype;
 
-import java.nio.ByteBuffer;
-
 import org.apache.kylin.common.util.BytesUtil;
 
+import java.nio.ByteBuffer;
+
 /**
  */
 public class LongSerializer extends DataTypeSerializer<Long> {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java
index 06915da..767d868 100755
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java
@@ -54,8 +54,7 @@ public class BuiltInFunctionTupleFilter extends FunctionTupleFilter {
     }
 
     public BuiltInFunctionTupleFilter(String name, FilterOperatorEnum filterOperatorEnum) {
-        super(Lists.<TupleFilter> newArrayList(),
-                filterOperatorEnum == null ? FilterOperatorEnum.FUNCTION : filterOperatorEnum);
+        super(Lists.<TupleFilter> newArrayList(), filterOperatorEnum == null ? FilterOperatorEnum.FUNCTION : filterOperatorEnum);
         this.methodParams = Lists.newArrayList();
 
         if (name != null) {
@@ -92,8 +91,7 @@ public class BuiltInFunctionTupleFilter extends FunctionTupleFilter {
         if (columnContainerFilter instanceof ColumnTupleFilter)
             methodParams.set(colPosition, (Serializable) input);
         else if (columnContainerFilter instanceof BuiltInFunctionTupleFilter)
-            methodParams.set(colPosition,
-                    (Serializable) ((BuiltInFunctionTupleFilter) columnContainerFilter).invokeFunction(input));
+            methodParams.set(colPosition, (Serializable) ((BuiltInFunctionTupleFilter) columnContainerFilter).invokeFunction(input));
         return method.invoke(null, (Object[]) (methodParams.toArray()));
     }
 
@@ -130,8 +128,7 @@ public class BuiltInFunctionTupleFilter extends FunctionTupleFilter {
                 if (!Primitives.isWrapperType(clazz))
                     methodParams.add(constVal);
                 else
-                    methodParams.add((Serializable) clazz
-                            .cast(clazz.getDeclaredMethod("valueOf", String.class).invoke(null, constVal)));
+                    methodParams.add((Serializable) clazz.cast(clazz.getDeclaredMethod("valueOf", String.class).invoke(null, constVal)));
             } catch (Exception e) {
                 logger.warn("Reflection failed for methodParams. ", e);
                 isValidFunc = false;
@@ -194,4 +191,4 @@ public class BuiltInFunctionTupleFilter extends FunctionTupleFilter {
             isValidFunc = true;
         }
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CaseTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CaseTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CaseTupleFilter.java
index 9bf7349..9083212 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CaseTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CaseTupleFilter.java
@@ -85,8 +85,7 @@ public class CaseTupleFilter extends TupleFilter implements IOptimizeableTupleFi
 
     @Override
     public String toString() {
-        return "CaseTupleFilter [when=" + whenFilters + ", then=" + thenFilters + ", else=" + elseFilter + ", children="
-                + children + "]";
+        return "CaseTupleFilter [when=" + whenFilters + ", then=" + thenFilters + ", else=" + elseFilter + ", children=" + children + "]";
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ColumnTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ColumnTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ColumnTupleFilter.java
index 77d3c33..ecb8e61 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ColumnTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/ColumnTupleFilter.java
@@ -42,7 +42,7 @@ import org.apache.kylin.metadata.tuple.IEvaluatableTuple;
 public class ColumnTupleFilter extends TupleFilter {
 
     private static final String _QUALIFIED_ = "_QUALIFIED_";
-
+    
     private TblColRef columnRef;
     private Object tupleValue;
     private List<Object> values;
@@ -92,7 +92,7 @@ public class ColumnTupleFilter extends TupleFilter {
     @Override
     public void serialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
         TableRef tableRef = columnRef.getTableRef();
-
+        
         if (tableRef == null) {
             // un-qualified column
             String table = columnRef.getTable();
@@ -109,13 +109,13 @@ public class ColumnTupleFilter extends TupleFilter {
         } else {
             // qualified column (from model)
             BytesUtil.writeUTFString(_QUALIFIED_, buffer);
-
+            
             String model = tableRef.getModel().getName();
             BytesUtil.writeUTFString(model, buffer);
-
+            
             String alias = tableRef.getAlias();
             BytesUtil.writeUTFString(alias, buffer);
-
+            
             String col = columnRef.getName();
             BytesUtil.writeUTFString(col, buffer);
         }
@@ -126,17 +126,17 @@ public class ColumnTupleFilter extends TupleFilter {
     public void deserialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) {
 
         String tableName = BytesUtil.readUTFString(buffer);
-
+        
         if (_QUALIFIED_.equals(tableName)) {
             // qualified column (from model)
             String model = BytesUtil.readUTFString(buffer);
             String alias = BytesUtil.readUTFString(buffer);
             String col = BytesUtil.readUTFString(buffer);
-
+            
             KylinConfig config = KylinConfig.getInstanceFromEnv();
             DataModelDesc modelDesc = MetadataManager.getInstance(config).getDataModelDesc(model);
             this.columnRef = modelDesc.findColumn(alias, col);
-
+            
         } else {
             // un-qualified column
             TableDesc tableDesc = null;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CompareTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CompareTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CompareTupleFilter.java
index 4739fe3..4875217 100755
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CompareTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/CompareTupleFilter.java
@@ -105,8 +105,7 @@ public class CompareTupleFilter extends TupleFilter implements IOptimizeableTupl
     }
 
     private boolean needSwapOperator() {
-        return operator == FilterOperatorEnum.LT || operator == FilterOperatorEnum.GT
-                || operator == FilterOperatorEnum.LTE || operator == FilterOperatorEnum.GTE;
+        return operator == FilterOperatorEnum.LT || operator == FilterOperatorEnum.GT || operator == FilterOperatorEnum.LTE || operator == FilterOperatorEnum.GTE;
     }
 
     @Override
@@ -178,7 +177,7 @@ public class CompareTupleFilter extends TupleFilter implements IOptimizeableTupl
             else if (operator == FilterOperatorEnum.ISNULL)
                 return false;
         }
-
+        
         if (cs.isNull(firstCondValue)) {
             return false;
         }
@@ -226,8 +225,7 @@ public class CompareTupleFilter extends TupleFilter implements IOptimizeableTupl
     @Override
     public boolean isEvaluable() {
         return (column != null || (function != null && function.isEvaluable())) //
-                && (!conditionValues.isEmpty() || operator == FilterOperatorEnum.ISNOTNULL
-                        || operator == FilterOperatorEnum.ISNULL) //
+                && (!conditionValues.isEmpty() || operator == FilterOperatorEnum.ISNOTNULL || operator == FilterOperatorEnum.ISNULL) //
                 && secondColumn == null;
     }
 
@@ -237,13 +235,10 @@ public class CompareTupleFilter extends TupleFilter implements IOptimizeableTupl
             if (this.children != null && this.children.size() == 2 && //
                     this.children.get(0) instanceof ConstantTupleFilter && //
                     this.children.get(1) instanceof ConstantTupleFilter) {
-                if (((ConstantTupleFilter) this.children.get(0)).getValues()
-                        .equals(((ConstantTupleFilter) this.children.get(1)).getValues())) {
-                    return this.operator == FilterOperatorEnum.EQ ? CompareResultType.AlwaysTrue
-                            : CompareResultType.AlwaysFalse;
+                if (((ConstantTupleFilter) this.children.get(0)).getValues().equals(((ConstantTupleFilter) this.children.get(1)).getValues())) {
+                    return this.operator == FilterOperatorEnum.EQ ? CompareResultType.AlwaysTrue : CompareResultType.AlwaysFalse;
                 } else {
-                    return this.operator == FilterOperatorEnum.EQ ? CompareResultType.AlwaysFalse
-                            : CompareResultType.AlwaysTrue;
+                    return this.operator == FilterOperatorEnum.EQ ? CompareResultType.AlwaysFalse : CompareResultType.AlwaysTrue;
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FilterOptimizeTransformer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FilterOptimizeTransformer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FilterOptimizeTransformer.java
index de3ee1b..9ef9c2c 100755
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FilterOptimizeTransformer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/FilterOptimizeTransformer.java
@@ -66,8 +66,7 @@ public class FilterOptimizeTransformer implements ITupleFilterTransformer {
 
         if (logicalTupleFilter.getOperator() == TupleFilter.FilterOperatorEnum.OR) {
             @SuppressWarnings("unchecked")
-            ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) logicalTupleFilter.getChildren()
-                    .listIterator();
+            ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) logicalTupleFilter.getChildren().listIterator();
             while (childIterator.hasNext()) {
                 TupleFilter next = childIterator.next();
                 if (ConstantTupleFilter.TRUE == next) {
@@ -77,8 +76,7 @@ public class FilterOptimizeTransformer implements ITupleFilterTransformer {
             }
         } else if (logicalTupleFilter.getOperator() == TupleFilter.FilterOperatorEnum.AND) {
             @SuppressWarnings("unchecked")
-            ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) logicalTupleFilter.getChildren()
-                    .listIterator();
+            ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) logicalTupleFilter.getChildren().listIterator();
             while (childIterator.hasNext()) {
                 TupleFilter next = childIterator.next();
                 if (ConstantTupleFilter.FALSE == next) {
@@ -118,7 +116,7 @@ public class FilterOptimizeTransformer implements ITupleFilterTransformer {
             if (newFilters.size() == 1) {
                 return newFilters.get(0);
             }
-
+            
             CaseTupleFilter newCaseTupleFilter = new CaseTupleFilter();
             newCaseTupleFilter.addChildren(newFilters);
             return newCaseTupleFilter;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/LogicalTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/LogicalTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/LogicalTupleFilter.java
index 4d4931e..7893ed8 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/LogicalTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/LogicalTupleFilter.java
@@ -60,8 +60,7 @@ public class LogicalTupleFilter extends TupleFilter implements IOptimizeableTupl
     public TupleFilter reverse() {
         switch (operator) {
         case NOT:
-            throw new IllegalStateException(
-                    "NOT will be replaced in org.apache.kylin.query.relnode.OLAPFilterRel.TupleFilterVisitor");
+            throw new IllegalStateException("NOT will be replaced in org.apache.kylin.query.relnode.OLAPFilterRel.TupleFilterVisitor");
         case AND:
         case OR:
             LogicalTupleFilter reverse = new LogicalTupleFilter(REVERSE_OP_MAP.get(operator));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilter.java
index f7168df..5ba8726 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilter.java
@@ -42,9 +42,7 @@ public abstract class TupleFilter {
     static final Logger logger = LoggerFactory.getLogger(TupleFilter.class);
 
     public enum FilterOperatorEnum {
-        EQ(1), NEQ(2), GT(3), LT(4), GTE(5), LTE(6), ISNULL(7), ISNOTNULL(8), IN(9), NOTIN(10), AND(20), OR(21), NOT(
-                22), COLUMN(30), CONSTANT(31), DYNAMIC(
-                        32), EXTRACT(33), CASE(34), FUNCTION(35), MASSIN(36), EVAL_FUNC(37), UNSUPPORTED(38);
+        EQ(1), NEQ(2), GT(3), LT(4), GTE(5), LTE(6), ISNULL(7), ISNOTNULL(8), IN(9), NOTIN(10), AND(20), OR(21), NOT(22), COLUMN(30), CONSTANT(31), DYNAMIC(32), EXTRACT(33), CASE(34), FUNCTION(35), MASSIN(36), EVAL_FUNC(37), UNSUPPORTED(38);
 
         private final int value;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilterSerializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilterSerializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilterSerializer.java
index 7b27ce8..63153ef 100755
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilterSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/TupleFilterSerializer.java
@@ -78,8 +78,7 @@ public class TupleFilterSerializer {
         return result;
     }
 
-    private static void internalSerialize(TupleFilter filter, Decorator decorator, ByteBuffer buffer,
-            IFilterCodeSystem<?> cs) {
+    private static void internalSerialize(TupleFilter filter, Decorator decorator, ByteBuffer buffer, IFilterCodeSystem<?> cs) {
         if (decorator != null) { // give decorator a chance to manipulate the output filter
             filter = decorator.onSerialize(filter);
         }
@@ -204,8 +203,7 @@ public class TupleFilterSerializer {
             if (extendedTupleFilters.containsKey(op)) {
                 try {
                     filter = (TupleFilter) extendedTupleFilters.get(op).getConstructor().newInstance();
-                } catch (InstantiationException | IllegalAccessException | InvocationTargetException
-                        | NoSuchMethodException e) {
+                } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
                     throw new RuntimeException(e);
                 }
             } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/UDF/MassInTupleFilter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/UDF/MassInTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/UDF/MassInTupleFilter.java
index 55ddec0..e4e311e 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/UDF/MassInTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/UDF/MassInTupleFilter.java
@@ -114,8 +114,7 @@ public class MassInTupleFilter extends FunctionTupleFilter {
             super.addChild(child);
             ColumnTupleFilter columnFilter = (ColumnTupleFilter) child;
             if (this.column != null) {
-                throw new IllegalStateException("Duplicate columns! old is " + column.getName() + " and new is "
-                        + columnFilter.getColumn().getName());
+                throw new IllegalStateException("Duplicate columns! old is " + column.getName() + " and new is " + columnFilter.getColumn().getName());
             }
             this.column = columnFilter.getColumn();
 
@@ -125,8 +124,7 @@ public class MassInTupleFilter extends FunctionTupleFilter {
 
             if (filterTableName == null) {
                 filterTableName = (String) child.getValues().iterator().next();
-                ExternalFilterDesc externalFilterDesc = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv())
-                        .getExtFilterDesc(filterTableName);
+                ExternalFilterDesc externalFilterDesc = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv()).getExtFilterDesc(filterTableName);
                 if (externalFilterDesc == null) {
                     throw new IllegalArgumentException("External filter named " + filterTableName + " is not found");
                 }
@@ -134,8 +132,7 @@ public class MassInTupleFilter extends FunctionTupleFilter {
                 filterTableResourceIdentifier = externalFilterDesc.getFilterResourceIdentifier();
             }
         } else {
-            throw new IllegalStateException(
-                    "MassInTupleFilter only has two children: one ColumnTupleFilter and one ConstantTupleFilter");
+            throw new IllegalStateException("MassInTupleFilter only has two children: one ColumnTupleFilter and one ConstantTupleFilter");
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/UDF/MassInValueProviderFactory.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/UDF/MassInValueProviderFactory.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/UDF/MassInValueProviderFactory.java
index 524aa15..18da37a 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/UDF/MassInValueProviderFactory.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/UDF/MassInValueProviderFactory.java
@@ -22,6 +22,5 @@ import org.apache.kylin.metadata.filter.function.Functions;
 import org.apache.kylin.metadata.model.TblColRef;
 
 public interface MassInValueProviderFactory {
-    MassInValueProvider getProvider(Functions.FilterTableType filterTableType, String filterResourceIdentifier,
-            TblColRef col);
+    MassInValueProvider getProvider(Functions.FilterTableType filterTableType, String filterResourceIdentifier, TblColRef col);
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
index fc40af9..31ee297 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
@@ -29,10 +29,7 @@ import org.apache.commons.lang3.reflect.MethodUtils;
 import com.google.common.collect.ImmutableMap;
 
 public enum BuiltInMethod {
-    UPPER(BuiltInMethod.class, "upper", String.class), LOWER(BuiltInMethod.class, "lower", String.class), SUBSTRING(
-            BuiltInMethod.class, "substring", String.class, int.class,
-            int.class), CHAR_LENGTH(BuiltInMethod.class, "charLength", String.class), LIKE(BuiltInMethod.class, "like",
-                    String.class, String.class), INITCAP(BuiltInMethod.class, "initcap", String.class);
+    UPPER(BuiltInMethod.class, "upper", String.class), LOWER(BuiltInMethod.class, "lower", String.class), SUBSTRING(BuiltInMethod.class, "substring", String.class, int.class, int.class), CHAR_LENGTH(BuiltInMethod.class, "charLength", String.class), LIKE(BuiltInMethod.class, "like", String.class, String.class), INITCAP(BuiltInMethod.class, "initcap", String.class);
     public final Method method;
     public static final ImmutableMap<String, BuiltInMethod> MAP;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java
index 8bff0a4..861e530 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java
@@ -49,8 +49,7 @@ public class Functions {
         if (SUPPORTED_UDF.containsKey(name)) {
             try {
                 return (TupleFilter) SUPPORTED_UDF.get(name).getConstructor().newInstance();
-            } catch (InstantiationException | IllegalAccessException | InvocationTargetException
-                    | NoSuchMethodException e) {
+            } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
                 throw new RuntimeException("Failed to on constructing FunctionTupleFilter for " + name);
             }
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Like.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Like.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Like.java
index 2f94f13..a97244e 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Like.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Like.java
@@ -27,10 +27,7 @@ package org.apache.kylin.metadata.filter.function;
 public class Like {
     private static final String JAVA_REGEX_SPECIALS = "[]()|^-+*?{}$\\";
     private static final String SQL_SIMILAR_SPECIALS = "[]()|^-+*_%?{}";
-    private static final String[] REG_CHAR_CLASSES = { "[:ALPHA:]", "\\p{Alpha}", "[:alpha:]", "\\p{Alpha}",
-            "[:UPPER:]", "\\p{Upper}", "[:upper:]", "\\p{Upper}", "[:LOWER:]", "\\p{Lower}", "[:lower:]", "\\p{Lower}",
-            "[:DIGIT:]", "\\d", "[:digit:]", "\\d", "[:SPACE:]", " ", "[:space:]", " ", "[:WHITESPACE:]", "\\s",
-            "[:whitespace:]", "\\s", "[:ALNUM:]", "\\p{Alnum}", "[:alnum:]", "\\p{Alnum}" };
+    private static final String[] REG_CHAR_CLASSES = { "[:ALPHA:]", "\\p{Alpha}", "[:alpha:]", "\\p{Alpha}", "[:UPPER:]", "\\p{Upper}", "[:upper:]", "\\p{Upper}", "[:LOWER:]", "\\p{Lower}", "[:lower:]", "\\p{Lower}", "[:DIGIT:]", "\\d", "[:digit:]", "\\d", "[:SPACE:]", " ", "[:space:]", " ", "[:WHITESPACE:]", "\\s", "[:whitespace:]", "\\s", "[:ALNUM:]", "\\p{Alnum}", "[:alnum:]", "\\p{Alnum}" };
 
     private Like() {
     }
@@ -132,8 +129,7 @@ public class Like {
         return new RuntimeException("Invalid regular expression '" + pattern + "'");
     }
 
-    private static int sqlSimilarRewriteCharEnumeration(String sqlPattern, StringBuilder javaPattern, int pos,
-            char escapeChar) {
+    private static int sqlSimilarRewriteCharEnumeration(String sqlPattern, StringBuilder javaPattern, int pos, char escapeChar) {
         int i;
         for (i = pos + 1; i < sqlPattern.length(); i++) {
             char c = sqlPattern.charAt(i);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java
index 649e336..ef1e412 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java
@@ -132,8 +132,7 @@ public class LikeMatchers {
             return new OnePercentSignLikeMatcher(patternStr);
         } else if (count == 2 && patternStr.startsWith(PERCENT_SIGN) && patternStr.endsWith(PERCENT_SIGN)) {
             return new TwoPercentSignLikeMatcher(patternStr);
-        } else if (count == 3 && patternStr.startsWith(PERCENT_SIGN) && patternStr.endsWith(PERCENT_SIGN)
-                && !patternStr.contains(PERCENT_SIGN + PERCENT_SIGN)) {
+        } else if (count == 3 && patternStr.startsWith(PERCENT_SIGN) && patternStr.endsWith(PERCENT_SIGN) && !patternStr.contains(PERCENT_SIGN + PERCENT_SIGN)) {
             return new ThreePercentSignLikeMatcher(patternStr);
         } else {
             return new DefaultLikeMatcher(patternStr);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
index c09d455..5d15d56 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
@@ -82,8 +82,7 @@ public class ColumnDesc implements Serializable {
         this.index = other.index;
     }
 
-    public ColumnDesc(String id, String name, String datatype, String comment, String dataGen, String index,
-            String computedColumnExpr) {
+    public ColumnDesc(String id, String name, String datatype, String comment, String dataGen, String index, String computedColumnExpr) {
         this.id = id;
         this.name = name;
         this.datatype = datatype;
@@ -258,7 +257,6 @@ public class ColumnDesc implements Serializable {
 
     @Override
     public String toString() {
-        return "ColumnDesc{" + "id='" + id + '\'' + ", name='" + name + '\'' + ", datatype='" + datatype + '\''
-                + ", comment='" + comment + '\'' + '}';
+        return "ColumnDesc{" + "id='" + id + '\'' + ", name='" + name + '\'' + ", datatype='" + datatype + '\'' + ", comment='" + comment + '\'' + '}';
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
index 6b983bf..e759bdf 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
@@ -370,8 +370,7 @@ public class DataModelDesc extends RootPersistentEntity {
             @Override
             public ColumnDesc apply(@Nullable ComputedColumnDesc input) {
                 id.increment();
-                ColumnDesc columnDesc = new ColumnDesc(id.toString(), input.getColumnName(), input.getDatatype(),
-                        input.getComment(), null, null, input.getExpression());
+                ColumnDesc columnDesc = new ColumnDesc(id.toString(), input.getColumnName(), input.getDatatype(), input.getComment(), null, null, input.getExpression());
                 return columnDesc;
             }
         }).toArray(ColumnDesc.class);
@@ -434,8 +433,7 @@ public class DataModelDesc extends RootPersistentEntity {
     private void addAlias(TableRef ref) {
         String alias = ref.getAlias();
         if (aliasMap.containsKey(alias))
-            throw new IllegalStateException("Alias '" + alias + "' ref to multiple tables: " + ref.getTableIdentity()
-                    + ", " + aliasMap.get(alias).getTableIdentity());
+            throw new IllegalStateException("Alias '" + alias + "' ref to multiple tables: " + ref.getTableIdentity() + ", " + aliasMap.get(alias).getTableIdentity());
         aliasMap.put(alias, ref);
 
         TableDesc table = ref.getTableDesc();
@@ -472,22 +470,18 @@ public class DataModelDesc extends RootPersistentEntity {
             computedColumnDesc.init();
 
             if (ccSet.contains(computedColumnDesc.getFullName())) {
-                throw new IllegalArgumentException(
-                        String.format("More than one computed column named %s exist in model %s",
-                                computedColumnDesc.getFullName(), this.getName()));
+                throw new IllegalArgumentException(String.format("More than one computed column named %s exist in model %s", computedColumnDesc.getFullName(), this.getName()));
             } else {
                 ccSet.add(computedColumnDesc.getFullName());
             }
 
             CCInfo other = ccInfoMap.get(computedColumnDesc.getFullName());
             if (other == null || (other.dataModelDescs.size() == 1 && other.dataModelDescs.contains(this))) {
-                ccInfoMap.put(computedColumnDesc.getFullName(),
-                        new CCInfo(computedColumnDesc, Sets.<DataModelDesc> newHashSet(this)));
+                ccInfoMap.put(computedColumnDesc.getFullName(), new CCInfo(computedColumnDesc, Sets.<DataModelDesc> newHashSet(this)));
             } else if (other.computedColumnDesc.equals(computedColumnDesc)) {
                 other.dataModelDescs.add(this);
             } else {
-                throw new IllegalStateException(String.format(
-                        "Computed column named %s is already defined in other models: %s. Please change another name, or try to keep consistent definition", //
+                throw new IllegalStateException(String.format("Computed column named %s is already defined in other models: %s. Please change another name, or try to keep consistent definition", //
                         computedColumnDesc.getFullName(), other.dataModelDescs));
             }
         }
@@ -540,14 +534,11 @@ public class DataModelDesc extends RootPersistentEntity {
             if (pkCols.length == 0 || fkCols.length == 0)
                 throw new IllegalStateException("Missing join columns on table " + dimTable);
             if (pkCols.length != fkCols.length) {
-                throw new IllegalStateException("Primary keys(" + dimTable + ")" + Arrays.toString(pks)
-                        + " are not consistent with Foreign keys(" + fkTable + ") " + Arrays.toString(fks));
+                throw new IllegalStateException("Primary keys(" + dimTable + ")" + Arrays.toString(pks) + " are not consistent with Foreign keys(" + fkTable + ") " + Arrays.toString(fks));
             }
             for (int i = 0; i < fkCols.length; i++) {
                 if (!fkCols[i].getDatatype().equals(pkCols[i].getDatatype())) {
-                    logger.warn("PK " + dimTable + "." + pkCols[i].getName() + "." + pkCols[i].getDatatype()
-                            + " are not consistent with FK " + fkTable + "." + fkCols[i].getName() + "."
-                            + fkCols[i].getDatatype());
+                    logger.warn("PK " + dimTable + "." + pkCols[i].getName() + "." + pkCols[i].getDatatype() + " are not consistent with FK " + fkTable + "." + fkCols[i].getName() + "." + fkCols[i].getDatatype());
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java
index 1fb25d0..115b154 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java
@@ -89,8 +89,7 @@ public class ExternalFilterDesc extends RootPersistentEntity implements ISourceA
 
     @Override
     public String toString() {
-        return "ExternalFilterDesc [ name=" + name + " filter table resource identifier "
-                + this.filterResourceIdentifier + "]";
+        return "ExternalFilterDesc [ name=" + name + " filter table resource identifier " + this.filterResourceIdentifier + "]";
     }
 
     /** create a mockup table for unit test */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java
index cba8f09..e969be2 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java
@@ -308,8 +308,7 @@ public class FunctionDesc implements Serializable {
 
     @Override
     public String toString() {
-        return "FunctionDesc [expression=" + expression + ", parameter=" + parameter + ", returnType=" + returnType
-                + "]";
+        return "FunctionDesc [expression=" + expression + ", parameter=" + parameter + ", returnType=" + returnType + "]";
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISegment.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISegment.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISegment.java
index 2364bad..d46ea96 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISegment.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISegment.java
@@ -37,7 +37,7 @@ public interface ISegment {
     public SegmentStatusEnum getStatus();
 
     public long getLastBuildTime();
-
+    
     public void validate() throws IllegalStateException;
-
+    
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinDesc.java
index 27f4025..eb82ace 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinDesc.java
@@ -18,14 +18,14 @@
 
 package org.apache.kylin.metadata.model;
 
-import java.io.Serializable;
-import java.util.Arrays;
-
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 
+import java.io.Serializable;
+import java.util.Arrays;
+
 /**
  */
 @JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
@@ -55,15 +55,15 @@ public class JoinDesc implements Serializable {
     public boolean isInnerJoin() {
         return "INNER".equalsIgnoreCase(type);
     }
-
+    
     public boolean isLeftJoin() {
         return "LEFT".equalsIgnoreCase(type);
     }
-
+    
     public String getType() {
         return type;
     }
-
+    
     public void setType(String type) {
         this.type = type;
     }
@@ -105,7 +105,7 @@ public class JoinDesc implements Serializable {
     private void checkSameTable(TblColRef[] cols) {
         if (cols == null || cols.length == 0)
             return;
-
+        
         TableRef tableRef = cols[0].getTableRef();
         for (int i = 1; i < cols.length; i++)
             Preconditions.checkState(tableRef == cols[i].getTableRef());
@@ -114,14 +114,13 @@ public class JoinDesc implements Serializable {
     public TableRef getPKSide() {
         return primaryKeyColumns[0].getTableRef();
     }
-
+    
     public TableRef getFKSide() {
         return foreignKeyColumns[0].getTableRef();
     }
 
     public void sortByFK() {
-        Preconditions.checkState(primaryKey.length == foreignKey.length && primaryKey.length == primaryKeyColumns.length
-                && foreignKey.length == foreignKeyColumns.length);
+        Preconditions.checkState(primaryKey.length == foreignKey.length && primaryKey.length == primaryKeyColumns.length && foreignKey.length == foreignKeyColumns.length);
         boolean cont = true;
         int n = foreignKey.length;
         for (int i = 0; i < n - 1 && cont; i++) {
@@ -144,7 +143,7 @@ public class JoinDesc implements Serializable {
         arr[j] = arr[jj];
         arr[jj] = tmp;
     }
-
+    
     private void swap(TblColRef[] arr, int j, int jj) {
         TblColRef tmp = arr[j];
         arr[j] = arr[jj];
@@ -186,23 +185,23 @@ public class JoinDesc implements Serializable {
     public boolean matches(JoinDesc other) {
         if (other == null)
             return false;
-
+        
         if (!this.type.equalsIgnoreCase(other.getType()))
             return false;
-
+        
         // note pk/fk are sorted, sortByFK()
         if (!this.columnDescEquals(foreignKeyColumns, other.foreignKeyColumns))
             return false;
         if (!this.columnDescEquals(primaryKeyColumns, other.primaryKeyColumns))
             return false;
-
+        
         return true;
     }
 
     private boolean columnDescEquals(TblColRef[] a, TblColRef[] b) {
         if (a.length != b.length)
             return false;
-
+        
         for (int i = 0; i < a.length; i++) {
             if (a[i].getColumnDesc().equals(b[i].getColumnDesc()) == false)
                 return false;
@@ -212,8 +211,7 @@ public class JoinDesc implements Serializable {
 
     @Override
     public String toString() {
-        return "JoinDesc [type=" + type + ", primary_key=" + Arrays.toString(primaryKey) + ", foreign_key="
-                + Arrays.toString(foreignKey) + "]";
+        return "JoinDesc [type=" + type + ", primary_key=" + Arrays.toString(primaryKey) + ", foreign_key=" + Arrays.toString(foreignKey) + "]";
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinTableDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinTableDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinTableDesc.java
index 1c0d30a..56c90bd 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinTableDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinTableDesc.java
@@ -37,14 +37,14 @@ public class JoinTableDesc implements Serializable {
     @JsonProperty("kind")
     @JsonInclude(JsonInclude.Include.NON_NULL)
     private TableKind kind = TableKind.LOOKUP;
-
+    
     @JsonProperty("alias")
     @JsonInclude(JsonInclude.Include.NON_NULL)
     private String alias;
-
+    
     @JsonProperty("join")
     private JoinDesc join;
-
+    
     private TableRef tableRef;
 
     public String getTable() {
@@ -58,15 +58,15 @@ public class JoinTableDesc implements Serializable {
     public TableKind getKind() {
         return kind;
     }
-
+    
     void setAlias(String alias) {
         this.alias = alias;
     }
-
+    
     public String getAlias() {
         return alias;
     }
-
+    
     public JoinDesc getJoin() {
         return join;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinsTree.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinsTree.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinsTree.java
index ab70cc7..4e7e8b8 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinsTree.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/JoinsTree.java
@@ -73,8 +73,7 @@ public class JoinsTree implements Serializable {
         return matchUp.size();
     }
 
-    private boolean matchInTree(Chain chain, JoinsTree another, Map<String, String> constraints,
-            Map<String, String> matchUp) {
+    private boolean matchInTree(Chain chain, JoinsTree another, Map<String, String> constraints, Map<String, String> matchUp) {
         String thisAlias = chain.table.getAlias();
         if (matchUp.containsKey(thisAlias))
             return true;
@@ -104,8 +103,7 @@ public class JoinsTree implements Serializable {
 
         boolean matches = false;
         if (chain.join == null) {
-            matches = anotherChain.join == null
-                    && chain.table.getTableDesc().getIdentity().equals(anotherChain.table.getTableDesc().getIdentity());
+            matches = anotherChain.join == null && chain.table.getTableDesc().getIdentity().equals(anotherChain.table.getTableDesc().getIdentity());
         } else {
             matches = chain.join.matches(anotherChain.join) && matchChain(chain.fkSide, anotherChain.fkSide, matchUp);
         }


[04/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java
index 36dfd3f..b9f2771 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java
@@ -4,5782 +4,5588 @@
 package org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated;
 
 public final class CubeVisitProtos {
-    private CubeVisitProtos() {
-    }
-
-    public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
-    }
-
-    public interface CubeVisitRequestOrBuilder extends com.google.protobuf.MessageOrBuilder {
-
-        // required bytes gtScanRequest = 1;
-        /**
-         * <code>required bytes gtScanRequest = 1;</code>
-         */
-        boolean hasGtScanRequest();
-
-        /**
-         * <code>required bytes gtScanRequest = 1;</code>
-         */
-        com.google.protobuf.ByteString getGtScanRequest();
-
-        // required bytes hbaseRawScan = 2;
-        /**
-         * <code>required bytes hbaseRawScan = 2;</code>
-         */
-        boolean hasHbaseRawScan();
-
-        /**
-         * <code>required bytes hbaseRawScan = 2;</code>
-         */
-        com.google.protobuf.ByteString getHbaseRawScan();
-
-        // required int32 rowkeyPreambleSize = 3;
-        /**
-         * <code>required int32 rowkeyPreambleSize = 3;</code>
-         */
-        boolean hasRowkeyPreambleSize();
-
-        /**
-         * <code>required int32 rowkeyPreambleSize = 3;</code>
-         */
-        int getRowkeyPreambleSize();
-
-        // repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;
-        /**
-         * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
-         */
-        java.util.List<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList> getHbaseColumnsToGTList();
-
-        /**
-         * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
-         */
-        org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList getHbaseColumnsToGT(
-                int index);
-
-        /**
-         * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
-         */
-        int getHbaseColumnsToGTCount();
+  private CubeVisitProtos() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface CubeVisitRequestOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required bytes gtScanRequest = 1;
+    /**
+     * <code>required bytes gtScanRequest = 1;</code>
+     */
+    boolean hasGtScanRequest();
+    /**
+     * <code>required bytes gtScanRequest = 1;</code>
+     */
+    com.google.protobuf.ByteString getGtScanRequest();
 
-        /**
-         * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
-         */
-        java.util.List<? extends org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder> getHbaseColumnsToGTOrBuilderList();
+    // required bytes hbaseRawScan = 2;
+    /**
+     * <code>required bytes hbaseRawScan = 2;</code>
+     */
+    boolean hasHbaseRawScan();
+    /**
+     * <code>required bytes hbaseRawScan = 2;</code>
+     */
+    com.google.protobuf.ByteString getHbaseRawScan();
 
-        /**
-         * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
-         */
-        org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder getHbaseColumnsToGTOrBuilder(
-                int index);
+    // required int32 rowkeyPreambleSize = 3;
+    /**
+     * <code>required int32 rowkeyPreambleSize = 3;</code>
+     */
+    boolean hasRowkeyPreambleSize();
+    /**
+     * <code>required int32 rowkeyPreambleSize = 3;</code>
+     */
+    int getRowkeyPreambleSize();
 
-        // required string kylinProperties = 5;
-        /**
-         * <code>required string kylinProperties = 5;</code>
-         *
-         * <pre>
-         * kylin properties
-         * </pre>
-         */
-        boolean hasKylinProperties();
+    // repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;
+    /**
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
+     */
+    java.util.List<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList> 
+        getHbaseColumnsToGTList();
+    /**
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
+     */
+    org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList getHbaseColumnsToGT(int index);
+    /**
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
+     */
+    int getHbaseColumnsToGTCount();
+    /**
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
+     */
+    java.util.List<? extends org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder> 
+        getHbaseColumnsToGTOrBuilderList();
+    /**
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
+     */
+    org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder getHbaseColumnsToGTOrBuilder(
+        int index);
 
-        /**
-         * <code>required string kylinProperties = 5;</code>
-         *
-         * <pre>
-         * kylin properties
-         * </pre>
-         */
-        java.lang.String getKylinProperties();
+    // required string kylinProperties = 5;
+    /**
+     * <code>required string kylinProperties = 5;</code>
+     *
+     * <pre>
+     * kylin properties
+     * </pre>
+     */
+    boolean hasKylinProperties();
+    /**
+     * <code>required string kylinProperties = 5;</code>
+     *
+     * <pre>
+     * kylin properties
+     * </pre>
+     */
+    java.lang.String getKylinProperties();
+    /**
+     * <code>required string kylinProperties = 5;</code>
+     *
+     * <pre>
+     * kylin properties
+     * </pre>
+     */
+    com.google.protobuf.ByteString
+        getKylinPropertiesBytes();
 
-        /**
-         * <code>required string kylinProperties = 5;</code>
-         *
-         * <pre>
-         * kylin properties
-         * </pre>
-         */
-        com.google.protobuf.ByteString getKylinPropertiesBytes();
+    // optional string queryId = 6;
+    /**
+     * <code>optional string queryId = 6;</code>
+     */
+    boolean hasQueryId();
+    /**
+     * <code>optional string queryId = 6;</code>
+     */
+    java.lang.String getQueryId();
+    /**
+     * <code>optional string queryId = 6;</code>
+     */
+    com.google.protobuf.ByteString
+        getQueryIdBytes();
 
-        // optional string queryId = 6;
-        /**
-         * <code>optional string queryId = 6;</code>
-         */
-        boolean hasQueryId();
+    // optional bool spillEnabled = 7 [default = true];
+    /**
+     * <code>optional bool spillEnabled = 7 [default = true];</code>
+     */
+    boolean hasSpillEnabled();
+    /**
+     * <code>optional bool spillEnabled = 7 [default = true];</code>
+     */
+    boolean getSpillEnabled();
 
-        /**
-         * <code>optional string queryId = 6;</code>
-         */
-        java.lang.String getQueryId();
+    // optional int64 maxScanBytes = 8;
+    /**
+     * <code>optional int64 maxScanBytes = 8;</code>
+     *
+     * <pre>
+     * 0 means no limit
+     * </pre>
+     */
+    boolean hasMaxScanBytes();
+    /**
+     * <code>optional int64 maxScanBytes = 8;</code>
+     *
+     * <pre>
+     * 0 means no limit
+     * </pre>
+     */
+    long getMaxScanBytes();
+  }
+  /**
+   * Protobuf type {@code CubeVisitRequest}
+   */
+  public static final class CubeVisitRequest extends
+      com.google.protobuf.GeneratedMessage
+      implements CubeVisitRequestOrBuilder {
+    // Use CubeVisitRequest.newBuilder() to construct.
+    private CubeVisitRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private CubeVisitRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
 
-        /**
-         * <code>optional string queryId = 6;</code>
-         */
-        com.google.protobuf.ByteString getQueryIdBytes();
+    private static final CubeVisitRequest defaultInstance;
+    public static CubeVisitRequest getDefaultInstance() {
+      return defaultInstance;
+    }
 
-        // optional bool spillEnabled = 7 [default = true];
-        /**
-         * <code>optional bool spillEnabled = 7 [default = true];</code>
-         */
-        boolean hasSpillEnabled();
+    public CubeVisitRequest getDefaultInstanceForType() {
+      return defaultInstance;
+    }
 
-        /**
-         * <code>optional bool spillEnabled = 7 [default = true];</code>
-         */
-        boolean getSpillEnabled();
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private CubeVisitRequest(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              bitField0_ |= 0x00000001;
+              gtScanRequest_ = input.readBytes();
+              break;
+            }
+            case 18: {
+              bitField0_ |= 0x00000002;
+              hbaseRawScan_ = input.readBytes();
+              break;
+            }
+            case 24: {
+              bitField0_ |= 0x00000004;
+              rowkeyPreambleSize_ = input.readInt32();
+              break;
+            }
+            case 34: {
+              if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+                hbaseColumnsToGT_ = new java.util.ArrayList<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList>();
+                mutable_bitField0_ |= 0x00000008;
+              }
+              hbaseColumnsToGT_.add(input.readMessage(org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.PARSER, extensionRegistry));
+              break;
+            }
+            case 42: {
+              bitField0_ |= 0x00000008;
+              kylinProperties_ = input.readBytes();
+              break;
+            }
+            case 50: {
+              bitField0_ |= 0x00000010;
+              queryId_ = input.readBytes();
+              break;
+            }
+            case 56: {
+              bitField0_ |= 0x00000020;
+              spillEnabled_ = input.readBool();
+              break;
+            }
+            case 64: {
+              bitField0_ |= 0x00000040;
+              maxScanBytes_ = input.readInt64();
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
+          hbaseColumnsToGT_ = java.util.Collections.unmodifiableList(hbaseColumnsToGT_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_descriptor;
+    }
 
-        // optional int64 maxScanBytes = 8;
-        /**
-         * <code>optional int64 maxScanBytes = 8;</code>
-         *
-         * <pre>
-         * 0 means no limit
-         * </pre>
-         */
-        boolean hasMaxScanBytes();
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.class, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.Builder.class);
+    }
 
-        /**
-         * <code>optional int64 maxScanBytes = 8;</code>
-         *
-         * <pre>
-         * 0 means no limit
-         * </pre>
-         */
-        long getMaxScanBytes();
+    public static com.google.protobuf.Parser<CubeVisitRequest> PARSER =
+        new com.google.protobuf.AbstractParser<CubeVisitRequest>() {
+      public CubeVisitRequest parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new CubeVisitRequest(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<CubeVisitRequest> getParserForType() {
+      return PARSER;
     }
 
+    public interface IntListOrBuilder
+        extends com.google.protobuf.MessageOrBuilder {
+
+      // repeated int32 ints = 1;
+      /**
+       * <code>repeated int32 ints = 1;</code>
+       */
+      java.util.List<java.lang.Integer> getIntsList();
+      /**
+       * <code>repeated int32 ints = 1;</code>
+       */
+      int getIntsCount();
+      /**
+       * <code>repeated int32 ints = 1;</code>
+       */
+      int getInts(int index);
+    }
     /**
-     * Protobuf type {@code CubeVisitRequest}
+     * Protobuf type {@code CubeVisitRequest.IntList}
      */
-    public static final class CubeVisitRequest extends com.google.protobuf.GeneratedMessage
-            implements CubeVisitRequestOrBuilder {
-        // Use CubeVisitRequest.newBuilder() to construct.
-        private CubeVisitRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
-            super(builder);
-            this.unknownFields = builder.getUnknownFields();
+    public static final class IntList extends
+        com.google.protobuf.GeneratedMessage
+        implements IntListOrBuilder {
+      // Use IntList.newBuilder() to construct.
+      private IntList(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+        super(builder);
+        this.unknownFields = builder.getUnknownFields();
+      }
+      private IntList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+      private static final IntList defaultInstance;
+      public static IntList getDefaultInstance() {
+        return defaultInstance;
+      }
+
+      public IntList getDefaultInstanceForType() {
+        return defaultInstance;
+      }
+
+      private final com.google.protobuf.UnknownFieldSet unknownFields;
+      @java.lang.Override
+      public final com.google.protobuf.UnknownFieldSet
+          getUnknownFields() {
+        return this.unknownFields;
+      }
+      private IntList(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        initFields();
+        int mutable_bitField0_ = 0;
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+            com.google.protobuf.UnknownFieldSet.newBuilder();
+        try {
+          boolean done = false;
+          while (!done) {
+            int tag = input.readTag();
+            switch (tag) {
+              case 0:
+                done = true;
+                break;
+              default: {
+                if (!parseUnknownField(input, unknownFields,
+                                       extensionRegistry, tag)) {
+                  done = true;
+                }
+                break;
+              }
+              case 8: {
+                if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+                  ints_ = new java.util.ArrayList<java.lang.Integer>();
+                  mutable_bitField0_ |= 0x00000001;
+                }
+                ints_.add(input.readInt32());
+                break;
+              }
+              case 10: {
+                int length = input.readRawVarint32();
+                int limit = input.pushLimit(length);
+                if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
+                  ints_ = new java.util.ArrayList<java.lang.Integer>();
+                  mutable_bitField0_ |= 0x00000001;
+                }
+                while (input.getBytesUntilLimit() > 0) {
+                  ints_.add(input.readInt32());
+                }
+                input.popLimit(limit);
+                break;
+              }
+            }
+          }
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          throw e.setUnfinishedMessage(this);
+        } catch (java.io.IOException e) {
+          throw new com.google.protobuf.InvalidProtocolBufferException(
+              e.getMessage()).setUnfinishedMessage(this);
+        } finally {
+          if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+            ints_ = java.util.Collections.unmodifiableList(ints_);
+          }
+          this.unknownFields = unknownFields.build();
+          makeExtensionsImmutable();
         }
-
-        private CubeVisitRequest(boolean noInit) {
-            this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance();
+      }
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_IntList_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_IntList_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.class, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder.class);
+      }
+
+      public static com.google.protobuf.Parser<IntList> PARSER =
+          new com.google.protobuf.AbstractParser<IntList>() {
+        public IntList parsePartialFrom(
+            com.google.protobuf.CodedInputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws com.google.protobuf.InvalidProtocolBufferException {
+          return new IntList(input, extensionRegistry);
+        }
+      };
+
+      @java.lang.Override
+      public com.google.protobuf.Parser<IntList> getParserForType() {
+        return PARSER;
+      }
+
+      // repeated int32 ints = 1;
+      public static final int INTS_FIELD_NUMBER = 1;
+      private java.util.List<java.lang.Integer> ints_;
+      /**
+       * <code>repeated int32 ints = 1;</code>
+       */
+      public java.util.List<java.lang.Integer>
+          getIntsList() {
+        return ints_;
+      }
+      /**
+       * <code>repeated int32 ints = 1;</code>
+       */
+      public int getIntsCount() {
+        return ints_.size();
+      }
+      /**
+       * <code>repeated int32 ints = 1;</code>
+       */
+      public int getInts(int index) {
+        return ints_.get(index);
+      }
+
+      private void initFields() {
+        ints_ = java.util.Collections.emptyList();
+      }
+      private byte memoizedIsInitialized = -1;
+      public final boolean isInitialized() {
+        byte isInitialized = memoizedIsInitialized;
+        if (isInitialized != -1) return isInitialized == 1;
+
+        memoizedIsInitialized = 1;
+        return true;
+      }
+
+      public void writeTo(com.google.protobuf.CodedOutputStream output)
+                          throws java.io.IOException {
+        getSerializedSize();
+        for (int i = 0; i < ints_.size(); i++) {
+          output.writeInt32(1, ints_.get(i));
+        }
+        getUnknownFields().writeTo(output);
+      }
+
+      private int memoizedSerializedSize = -1;
+      public int getSerializedSize() {
+        int size = memoizedSerializedSize;
+        if (size != -1) return size;
+
+        size = 0;
+        {
+          int dataSize = 0;
+          for (int i = 0; i < ints_.size(); i++) {
+            dataSize += com.google.protobuf.CodedOutputStream
+              .computeInt32SizeNoTag(ints_.get(i));
+          }
+          size += dataSize;
+          size += 1 * getIntsList().size();
+        }
+        size += getUnknownFields().getSerializedSize();
+        memoizedSerializedSize = size;
+        return size;
+      }
+
+      private static final long serialVersionUID = 0L;
+      @java.lang.Override
+      protected java.lang.Object writeReplace()
+          throws java.io.ObjectStreamException {
+        return super.writeReplace();
+      }
+
+      @java.lang.Override
+      public boolean equals(final java.lang.Object obj) {
+        if (obj == this) {
+         return true;
+        }
+        if (!(obj instanceof org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList)) {
+          return super.equals(obj);
+        }
+        org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList other = (org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList) obj;
+
+        boolean result = true;
+        result = result && getIntsList()
+            .equals(other.getIntsList());
+        result = result &&
+            getUnknownFields().equals(other.getUnknownFields());
+        return result;
+      }
+
+      private int memoizedHashCode = 0;
+      @java.lang.Override
+      public int hashCode() {
+        if (memoizedHashCode != 0) {
+          return memoizedHashCode;
+        }
+        int hash = 41;
+        hash = (19 * hash) + getDescriptorForType().hashCode();
+        if (getIntsCount() > 0) {
+          hash = (37 * hash) + INTS_FIELD_NUMBER;
+          hash = (53 * hash) + getIntsList().hashCode();
+        }
+        hash = (29 * hash) + getUnknownFields().hashCode();
+        memoizedHashCode = hash;
+        return hash;
+      }
+
+      public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
+          com.google.protobuf.ByteString data)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data);
+      }
+      public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
+          com.google.protobuf.ByteString data,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data, extensionRegistry);
+      }
+      public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(byte[] data)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data);
+      }
+      public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
+          byte[] data,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data, extensionRegistry);
+      }
+      public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(java.io.InputStream input)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input);
+      }
+      public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
+          java.io.InputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input, extensionRegistry);
+      }
+      public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseDelimitedFrom(java.io.InputStream input)
+          throws java.io.IOException {
+        return PARSER.parseDelimitedFrom(input);
+      }
+      public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseDelimitedFrom(
+          java.io.InputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      }
+      public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
+          com.google.protobuf.CodedInputStream input)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input);
+      }
+      public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input, extensionRegistry);
+      }
+
+      public static Builder newBuilder() { return Builder.create(); }
+      public Builder newBuilderForType() { return newBuilder(); }
+      public static Builder newBuilder(org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList prototype) {
+        return newBuilder().mergeFrom(prototype);
+      }
+      public Builder toBuilder() { return newBuilder(this); }
+
+      @java.lang.Override
+      protected Builder newBuilderForType(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        Builder builder = new Builder(parent);
+        return builder;
+      }
+      /**
+       * Protobuf type {@code CubeVisitRequest.IntList}
+       */
+      public static final class Builder extends
+          com.google.protobuf.GeneratedMessage.Builder<Builder>
+         implements org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder {
+        public static final com.google.protobuf.Descriptors.Descriptor
+            getDescriptor() {
+          return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_IntList_descriptor;
         }
 
-        private static final CubeVisitRequest defaultInstance;
+        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+            internalGetFieldAccessorTable() {
+          return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_IntList_fieldAccessorTable
+              .ensureFieldAccessorsInitialized(
+                  org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.class, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder.class);
+        }
 
-        public static CubeVisitRequest getDefaultInstance() {
-            return defaultInstance;
+        // Construct using org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.newBuilder()
+        private Builder() {
+          maybeForceBuilderInitialization();
         }
 
-        public CubeVisitRequest getDefaultInstanceForType() {
-            return defaultInstance;
+        private Builder(
+            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          super(parent);
+          maybeForceBuilderInitialization();
+        }
+        private void maybeForceBuilderInitialization() {
+          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          }
+        }
+        private static Builder create() {
+          return new Builder();
         }
 
-        private final com.google.protobuf.UnknownFieldSet unknownFields;
+        public Builder clear() {
+          super.clear();
+          ints_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+          return this;
+        }
 
-        @java.lang.Override
-        public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
-            return this.unknownFields;
-        }
-
-        private CubeVisitRequest(com.google.protobuf.CodedInputStream input,
-                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-                throws com.google.protobuf.InvalidProtocolBufferException {
-            initFields();
-            int mutable_bitField0_ = 0;
-            com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet
-                    .newBuilder();
-            try {
-                boolean done = false;
-                while (!done) {
-                    int tag = input.readTag();
-                    switch (tag) {
-                    case 0:
-                        done = true;
-                        break;
-                    default: {
-                        if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
-                            done = true;
-                        }
-                        break;
-                    }
-                    case 10: {
-                        bitField0_ |= 0x00000001;
-                        gtScanRequest_ = input.readBytes();
-                        break;
-                    }
-                    case 18: {
-                        bitField0_ |= 0x00000002;
-                        hbaseRawScan_ = input.readBytes();
-                        break;
-                    }
-                    case 24: {
-                        bitField0_ |= 0x00000004;
-                        rowkeyPreambleSize_ = input.readInt32();
-                        break;
-                    }
-                    case 34: {
-                        if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
-                            hbaseColumnsToGT_ = new java.util.ArrayList<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList>();
-                            mutable_bitField0_ |= 0x00000008;
-                        }
-                        hbaseColumnsToGT_.add(input.readMessage(
-                                org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.PARSER,
-                                extensionRegistry));
-                        break;
-                    }
-                    case 42: {
-                        bitField0_ |= 0x00000008;
-                        kylinProperties_ = input.readBytes();
-                        break;
-                    }
-                    case 50: {
-                        bitField0_ |= 0x00000010;
-                        queryId_ = input.readBytes();
-                        break;
-                    }
-                    case 56: {
-                        bitField0_ |= 0x00000020;
-                        spillEnabled_ = input.readBool();
-                        break;
-                    }
-                    case 64: {
-                        bitField0_ |= 0x00000040;
-                        maxScanBytes_ = input.readInt64();
-                        break;
-                    }
-                    }
-                }
-            } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-                throw e.setUnfinishedMessage(this);
-            } catch (java.io.IOException e) {
-                throw new com.google.protobuf.InvalidProtocolBufferException(e.getMessage()).setUnfinishedMessage(this);
-            } finally {
-                if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
-                    hbaseColumnsToGT_ = java.util.Collections.unmodifiableList(hbaseColumnsToGT_);
-                }
-                this.unknownFields = unknownFields.build();
-                makeExtensionsImmutable();
-            }
+        public Builder clone() {
+          return create().mergeFrom(buildPartial());
         }
 
-        public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
-            return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_descriptor;
+        public com.google.protobuf.Descriptors.Descriptor
+            getDescriptorForType() {
+          return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_IntList_descriptor;
         }
 
-        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() {
-            return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_fieldAccessorTable
-                    .ensureFieldAccessorsInitialized(
-                            org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.class,
-                            org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.Builder.class);
+        public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList getDefaultInstanceForType() {
+          return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.getDefaultInstance();
         }
 
-        public static com.google.protobuf.Parser<CubeVisitRequest> PARSER = new com.google.protobuf.AbstractParser<CubeVisitRequest>() {
-            public CubeVisitRequest parsePartialFrom(com.google.protobuf.CodedInputStream input,
-                    com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-                    throws com.google.protobuf.InvalidProtocolBufferException {
-                return new CubeVisitRequest(input, extensionRegistry);
-            }
-        };
+        public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList build() {
+          org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList result = buildPartial();
+          if (!result.isInitialized()) {
+            throw newUninitializedMessageException(result);
+          }
+          return result;
+        }
 
-        @java.lang.Override
-        public com.google.protobuf.Parser<CubeVisitRequest> getParserForType() {
-            return PARSER;
+        public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList buildPartial() {
+          org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList result = new org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList(this);
+          int from_bitField0_ = bitField0_;
+          if (((bitField0_ & 0x00000001) == 0x00000001)) {
+            ints_ = java.util.Collections.unmodifiableList(ints_);
+            bitField0_ = (bitField0_ & ~0x00000001);
+          }
+          result.ints_ = ints_;
+          onBuilt();
+          return result;
         }
 
-        public interface IntListOrBuilder extends com.google.protobuf.MessageOrBuilder {
+        public Builder mergeFrom(com.google.protobuf.Message other) {
+          if (other instanceof org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList) {
+            return mergeFrom((org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList)other);
+          } else {
+            super.mergeFrom(other);
+            return this;
+          }
+        }
 
-            // repeated int32 ints = 1;
-            /**
-             * <code>repeated int32 ints = 1;</code>
-             */
-            java.util.List<java.lang.Integer> getIntsList();
+        public Builder mergeFrom(org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList other) {
+          if (other == org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.getDefaultInstance()) return this;
+          if (!other.ints_.isEmpty()) {
+            if (ints_.isEmpty()) {
+              ints_ = other.ints_;
+              bitField0_ = (bitField0_ & ~0x00000001);
+            } else {
+              ensureIntsIsMutable();
+              ints_.addAll(other.ints_);
+            }
+            onChanged();
+          }
+          this.mergeUnknownFields(other.getUnknownFields());
+          return this;
+        }
 
-            /**
-             * <code>repeated int32 ints = 1;</code>
-             */
-            int getIntsCount();
+        public final boolean isInitialized() {
+          return true;
+        }
 
-            /**
-             * <code>repeated int32 ints = 1;</code>
-             */
-            int getInts(int index);
+        public Builder mergeFrom(
+            com.google.protobuf.CodedInputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws java.io.IOException {
+          org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parsedMessage = null;
+          try {
+            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+            parsedMessage = (org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList) e.getUnfinishedMessage();
+            throw e;
+          } finally {
+            if (parsedMessage != null) {
+              mergeFrom(parsedMessage);
+            }
+          }
+          return this;
         }
+        private int bitField0_;
 
+        // repeated int32 ints = 1;
+        private java.util.List<java.lang.Integer> ints_ = java.util.Collections.emptyList();
+        private void ensureIntsIsMutable() {
+          if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+            ints_ = new java.util.ArrayList<java.lang.Integer>(ints_);
+            bitField0_ |= 0x00000001;
+           }
+        }
         /**
-         * Protobuf type {@code CubeVisitRequest.IntList}
+         * <code>repeated int32 ints = 1;</code>
          */
-        public static final class IntList extends com.google.protobuf.GeneratedMessage implements IntListOrBuilder {
-            // Use IntList.newBuilder() to construct.
-            private IntList(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
-                super(builder);
-                this.unknownFields = builder.getUnknownFields();
-            }
+        public java.util.List<java.lang.Integer>
+            getIntsList() {
+          return java.util.Collections.unmodifiableList(ints_);
+        }
+        /**
+         * <code>repeated int32 ints = 1;</code>
+         */
+        public int getIntsCount() {
+          return ints_.size();
+        }
+        /**
+         * <code>repeated int32 ints = 1;</code>
+         */
+        public int getInts(int index) {
+          return ints_.get(index);
+        }
+        /**
+         * <code>repeated int32 ints = 1;</code>
+         */
+        public Builder setInts(
+            int index, int value) {
+          ensureIntsIsMutable();
+          ints_.set(index, value);
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>repeated int32 ints = 1;</code>
+         */
+        public Builder addInts(int value) {
+          ensureIntsIsMutable();
+          ints_.add(value);
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>repeated int32 ints = 1;</code>
+         */
+        public Builder addAllInts(
+            java.lang.Iterable<? extends java.lang.Integer> values) {
+          ensureIntsIsMutable();
+          super.addAll(values, ints_);
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>repeated int32 ints = 1;</code>
+         */
+        public Builder clearInts() {
+          ints_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+          onChanged();
+          return this;
+        }
 
-            private IntList(boolean noInit) {
-                this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance();
-            }
+        // @@protoc_insertion_point(builder_scope:CubeVisitRequest.IntList)
+      }
 
-            private static final IntList defaultInstance;
+      static {
+        defaultInstance = new IntList(true);
+        defaultInstance.initFields();
+      }
 
-            public static IntList getDefaultInstance() {
-                return defaultInstance;
-            }
+      // @@protoc_insertion_point(class_scope:CubeVisitRequest.IntList)
+    }
 
-            public IntList getDefaultInstanceForType() {
-                return defaultInstance;
-            }
+    private int bitField0_;
+    // required bytes gtScanRequest = 1;
+    public static final int GTSCANREQUEST_FIELD_NUMBER = 1;
+    private com.google.protobuf.ByteString gtScanRequest_;
+    /**
+     * <code>required bytes gtScanRequest = 1;</code>
+     */
+    public boolean hasGtScanRequest() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required bytes gtScanRequest = 1;</code>
+     */
+    public com.google.protobuf.ByteString getGtScanRequest() {
+      return gtScanRequest_;
+    }
 
-            private final com.google.protobuf.UnknownFieldSet unknownFields;
+    // required bytes hbaseRawScan = 2;
+    public static final int HBASERAWSCAN_FIELD_NUMBER = 2;
+    private com.google.protobuf.ByteString hbaseRawScan_;
+    /**
+     * <code>required bytes hbaseRawScan = 2;</code>
+     */
+    public boolean hasHbaseRawScan() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>required bytes hbaseRawScan = 2;</code>
+     */
+    public com.google.protobuf.ByteString getHbaseRawScan() {
+      return hbaseRawScan_;
+    }
 
-            @java.lang.Override
-            public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
-                return this.unknownFields;
-            }
+    // required int32 rowkeyPreambleSize = 3;
+    public static final int ROWKEYPREAMBLESIZE_FIELD_NUMBER = 3;
+    private int rowkeyPreambleSize_;
+    /**
+     * <code>required int32 rowkeyPreambleSize = 3;</code>
+     */
+    public boolean hasRowkeyPreambleSize() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>required int32 rowkeyPreambleSize = 3;</code>
+     */
+    public int getRowkeyPreambleSize() {
+      return rowkeyPreambleSize_;
+    }
 
-            private IntList(com.google.protobuf.CodedInputStream input,
-                    com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-                    throws com.google.protobuf.InvalidProtocolBufferException {
-                initFields();
-                int mutable_bitField0_ = 0;
-                com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet
-                        .newBuilder();
-                try {
-                    boolean done = false;
-                    while (!done) {
-                        int tag = input.readTag();
-                        switch (tag) {
-                        case 0:
-                            done = true;
-                            break;
-                        default: {
-                            if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
-                                done = true;
-                            }
-                            break;
-                        }
-                        case 8: {
-                            if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
-                                ints_ = new java.util.ArrayList<java.lang.Integer>();
-                                mutable_bitField0_ |= 0x00000001;
-                            }
-                            ints_.add(input.readInt32());
-                            break;
-                        }
-                        case 10: {
-                            int length = input.readRawVarint32();
-                            int limit = input.pushLimit(length);
-                            if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
-                                ints_ = new java.util.ArrayList<java.lang.Integer>();
-                                mutable_bitField0_ |= 0x00000001;
-                            }
-                            while (input.getBytesUntilLimit() > 0) {
-                                ints_.add(input.readInt32());
-                            }
-                            input.popLimit(limit);
-                            break;
-                        }
-                        }
-                    }
-                } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-                    throw e.setUnfinishedMessage(this);
-                } catch (java.io.IOException e) {
-                    throw new com.google.protobuf.InvalidProtocolBufferException(e.getMessage())
-                            .setUnfinishedMessage(this);
-                } finally {
-                    if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
-                        ints_ = java.util.Collections.unmodifiableList(ints_);
-                    }
-                    this.unknownFields = unknownFields.build();
-                    makeExtensionsImmutable();
-                }
-            }
+    // repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;
+    public static final int HBASECOLUMNSTOGT_FIELD_NUMBER = 4;
+    private java.util.List<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList> hbaseColumnsToGT_;
+    /**
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
+     */
+    public java.util.List<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList> getHbaseColumnsToGTList() {
+      return hbaseColumnsToGT_;
+    }
+    /**
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
+     */
+    public java.util.List<? extends org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder> 
+        getHbaseColumnsToGTOrBuilderList() {
+      return hbaseColumnsToGT_;
+    }
+    /**
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
+     */
+    public int getHbaseColumnsToGTCount() {
+      return hbaseColumnsToGT_.size();
+    }
+    /**
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
+     */
+    public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList getHbaseColumnsToGT(int index) {
+      return hbaseColumnsToGT_.get(index);
+    }
+    /**
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
+     */
+    public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder getHbaseColumnsToGTOrBuilder(
+        int index) {
+      return hbaseColumnsToGT_.get(index);
+    }
 
-            public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
-                return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_IntList_descriptor;
-            }
+    // required string kylinProperties = 5;
+    public static final int KYLINPROPERTIES_FIELD_NUMBER = 5;
+    private java.lang.Object kylinProperties_;
+    /**
+     * <code>required string kylinProperties = 5;</code>
+     *
+     * <pre>
+     * kylin properties
+     * </pre>
+     */
+    public boolean hasKylinProperties() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>required string kylinProperties = 5;</code>
+     *
+     * <pre>
+     * kylin properties
+     * </pre>
+     */
+    public java.lang.String getKylinProperties() {
+      java.lang.Object ref = kylinProperties_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          kylinProperties_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>required string kylinProperties = 5;</code>
+     *
+     * <pre>
+     * kylin properties
+     * </pre>
+     */
+    public com.google.protobuf.ByteString
+        getKylinPropertiesBytes() {
+      java.lang.Object ref = kylinProperties_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        kylinProperties_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
 
-            protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() {
-                return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_IntList_fieldAccessorTable
-                        .ensureFieldAccessorsInitialized(
-                                org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.class,
-                                org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder.class);
-            }
+    // optional string queryId = 6;
+    public static final int QUERYID_FIELD_NUMBER = 6;
+    private java.lang.Object queryId_;
+    /**
+     * <code>optional string queryId = 6;</code>
+     */
+    public boolean hasQueryId() {
+      return ((bitField0_ & 0x00000010) == 0x00000010);
+    }
+    /**
+     * <code>optional string queryId = 6;</code>
+     */
+    public java.lang.String getQueryId() {
+      java.lang.Object ref = queryId_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          queryId_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string queryId = 6;</code>
+     */
+    public com.google.protobuf.ByteString
+        getQueryIdBytes() {
+      java.lang.Object ref = queryId_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        queryId_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
 
-            public static com.google.protobuf.Parser<IntList> PARSER = new com.google.protobuf.AbstractParser<IntList>() {
-                public IntList parsePartialFrom(com.google.protobuf.CodedInputStream input,
-                        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-                        throws com.google.protobuf.InvalidProtocolBufferException {
-                    return new IntList(input, extensionRegistry);
-                }
-            };
+    // optional bool spillEnabled = 7 [default = true];
+    public static final int SPILLENABLED_FIELD_NUMBER = 7;
+    private boolean spillEnabled_;
+    /**
+     * <code>optional bool spillEnabled = 7 [default = true];</code>
+     */
+    public boolean hasSpillEnabled() {
+      return ((bitField0_ & 0x00000020) == 0x00000020);
+    }
+    /**
+     * <code>optional bool spillEnabled = 7 [default = true];</code>
+     */
+    public boolean getSpillEnabled() {
+      return spillEnabled_;
+    }
 
-            @java.lang.Override
-            public com.google.protobuf.Parser<IntList> getParserForType() {
-                return PARSER;
-            }
+    // optional int64 maxScanBytes = 8;
+    public static final int MAXSCANBYTES_FIELD_NUMBER = 8;
+    private long maxScanBytes_;
+    /**
+     * <code>optional int64 maxScanBytes = 8;</code>
+     *
+     * <pre>
+     * 0 means no limit
+     * </pre>
+     */
+    public boolean hasMaxScanBytes() {
+      return ((bitField0_ & 0x00000040) == 0x00000040);
+    }
+    /**
+     * <code>optional int64 maxScanBytes = 8;</code>
+     *
+     * <pre>
+     * 0 means no limit
+     * </pre>
+     */
+    public long getMaxScanBytes() {
+      return maxScanBytes_;
+    }
 
-            // repeated int32 ints = 1;
-            public static final int INTS_FIELD_NUMBER = 1;
-            private java.util.List<java.lang.Integer> ints_;
+    private void initFields() {
+      gtScanRequest_ = com.google.protobuf.ByteString.EMPTY;
+      hbaseRawScan_ = com.google.protobuf.ByteString.EMPTY;
+      rowkeyPreambleSize_ = 0;
+      hbaseColumnsToGT_ = java.util.Collections.emptyList();
+      kylinProperties_ = "";
+      queryId_ = "";
+      spillEnabled_ = true;
+      maxScanBytes_ = 0L;
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasGtScanRequest()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasHbaseRawScan()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasRowkeyPreambleSize()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasKylinProperties()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
 
-            /**
-             * <code>repeated int32 ints = 1;</code>
-             */
-            public java.util.List<java.lang.Integer> getIntsList() {
-                return ints_;
-            }
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeBytes(1, gtScanRequest_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeBytes(2, hbaseRawScan_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeInt32(3, rowkeyPreambleSize_);
+      }
+      for (int i = 0; i < hbaseColumnsToGT_.size(); i++) {
+        output.writeMessage(4, hbaseColumnsToGT_.get(i));
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeBytes(5, getKylinPropertiesBytes());
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        output.writeBytes(6, getQueryIdBytes());
+      }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        output.writeBool(7, spillEnabled_);
+      }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        output.writeInt64(8, maxScanBytes_);
+      }
+      getUnknownFields().writeTo(output);
+    }
 
-            /**
-             * <code>repeated int32 ints = 1;</code>
-             */
-            public int getIntsCount() {
-                return ints_.size();
-            }
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(1, gtScanRequest_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, hbaseRawScan_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(3, rowkeyPreambleSize_);
+      }
+      for (int i = 0; i < hbaseColumnsToGT_.size(); i++) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(4, hbaseColumnsToGT_.get(i));
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(5, getKylinPropertiesBytes());
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(6, getQueryIdBytes());
+      }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBoolSize(7, spillEnabled_);
+      }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt64Size(8, maxScanBytes_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
 
-            /**
-             * <code>repeated int32 ints = 1;</code>
-             */
-            public int getInts(int index) {
-                return ints_.get(index);
-            }
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
 
-            private void initFields() {
-                ints_ = java.util.Collections.emptyList();
-            }
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest)) {
+        return super.equals(obj);
+      }
+      org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest other = (org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest) obj;
+
+      boolean result = true;
+      result = result && (hasGtScanRequest() == other.hasGtScanRequest());
+      if (hasGtScanRequest()) {
+        result = result && getGtScanRequest()
+            .equals(other.getGtScanRequest());
+      }
+      result = result && (hasHbaseRawScan() == other.hasHbaseRawScan());
+      if (hasHbaseRawScan()) {
+        result = result && getHbaseRawScan()
+            .equals(other.getHbaseRawScan());
+      }
+      result = result && (hasRowkeyPreambleSize() == other.hasRowkeyPreambleSize());
+      if (hasRowkeyPreambleSize()) {
+        result = result && (getRowkeyPreambleSize()
+            == other.getRowkeyPreambleSize());
+      }
+      result = result && getHbaseColumnsToGTList()
+          .equals(other.getHbaseColumnsToGTList());
+      result = result && (hasKylinProperties() == other.hasKylinProperties());
+      if (hasKylinProperties()) {
+        result = result && getKylinProperties()
+            .equals(other.getKylinProperties());
+      }
+      result = result && (hasQueryId() == other.hasQueryId());
+      if (hasQueryId()) {
+        result = result && getQueryId()
+            .equals(other.getQueryId());
+      }
+      result = result && (hasSpillEnabled() == other.hasSpillEnabled());
+      if (hasSpillEnabled()) {
+        result = result && (getSpillEnabled()
+            == other.getSpillEnabled());
+      }
+      result = result && (hasMaxScanBytes() == other.hasMaxScanBytes());
+      if (hasMaxScanBytes()) {
+        result = result && (getMaxScanBytes()
+            == other.getMaxScanBytes());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
 
-            private byte memoizedIsInitialized = -1;
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasGtScanRequest()) {
+        hash = (37 * hash) + GTSCANREQUEST_FIELD_NUMBER;
+        hash = (53 * hash) + getGtScanRequest().hashCode();
+      }
+      if (hasHbaseRawScan()) {
+        hash = (37 * hash) + HBASERAWSCAN_FIELD_NUMBER;
+        hash = (53 * hash) + getHbaseRawScan().hashCode();
+      }
+      if (hasRowkeyPreambleSize()) {
+        hash = (37 * hash) + ROWKEYPREAMBLESIZE_FIELD_NUMBER;
+        hash = (53 * hash) + getRowkeyPreambleSize();
+      }
+      if (getHbaseColumnsToGTCount() > 0) {
+        hash = (37 * hash) + HBASECOLUMNSTOGT_FIELD_NUMBER;
+        hash = (53 * hash) + getHbaseColumnsToGTList().hashCode();
+      }
+      if (hasKylinProperties()) {
+        hash = (37 * hash) + KYLINPROPERTIES_FIELD_NUMBER;
+        hash = (53 * hash) + getKylinProperties().hashCode();
+      }
+      if (hasQueryId()) {
+        hash = (37 * hash) + QUERYID_FIELD_NUMBER;
+        hash = (53 * hash) + getQueryId().hashCode();
+      }
+      if (hasSpillEnabled()) {
+        hash = (37 * hash) + SPILLENABLED_FIELD_NUMBER;
+        hash = (53 * hash) + hashBoolean(getSpillEnabled());
+      }
+      if (hasMaxScanBytes()) {
+        hash = (37 * hash) + MAXSCANBYTES_FIELD_NUMBER;
+        hash = (53 * hash) + hashLong(getMaxScanBytes());
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
 
-            public final boolean isInitialized() {
-                byte isInitialized = memoizedIsInitialized;
-                if (isInitialized != -1)
-                    return isInitialized == 1;
+    public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
 
-                memoizedIsInitialized = 1;
-                return true;
-            }
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
 
-            public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
-                getSerializedSize();
-                for (int i = 0; i < ints_.size(); i++) {
-                    output.writeInt32(1, ints_.get(i));
-                }
-                getUnknownFields().writeTo(output);
-            }
-
-            private int memoizedSerializedSize = -1;
-
-            public int getSerializedSize() {
-                int size = memoizedSerializedSize;
-                if (size != -1)
-                    return size;
-
-                size = 0;
-                {
-                    int dataSize = 0;
-                    for (int i = 0; i < ints_.size(); i++) {
-                        dataSize += com.google.protobuf.CodedOutputStream.computeInt32SizeNoTag(ints_.get(i));
-                    }
-                    size += dataSize;
-                    size += 1 * getIntsList().size();
-                }
-                size += getUnknownFields().getSerializedSize();
-                memoizedSerializedSize = size;
-                return size;
-            }
-
-            private static final long serialVersionUID = 0L;
-
-            @java.lang.Override
-            protected java.lang.Object writeReplace() throws java.io.ObjectStreamException {
-                return super.writeReplace();
-            }
-
-            @java.lang.Override
-            public boolean equals(final java.lang.Object obj) {
-                if (obj == this) {
-                    return true;
-                }
-                if (!(obj instanceof org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList)) {
-                    return super.equals(obj);
-                }
-                org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList other = (org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList) obj;
-
-                boolean result = true;
-                result = result && getIntsList().equals(other.getIntsList());
-                result = result && getUnknownFields().equals(other.getUnknownFields());
-                return result;
-            }
-
-            private int memoizedHashCode = 0;
-
-            @java.lang.Override
-            public int hashCode() {
-                if (memoizedHashCode != 0) {
-                    return memoizedHashCode;
-                }
-                int hash = 41;
-                hash = (19 * hash) + getDescriptorForType().hashCode();
-                if (getIntsCount() > 0) {
-                    hash = (37 * hash) + INTS_FIELD_NUMBER;
-                    hash = (53 * hash) + getIntsList().hashCode();
-                }
-                hash = (29 * hash) + getUnknownFields().hashCode();
-                memoizedHashCode = hash;
-                return hash;
-            }
-
-            public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
-                    com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException {
-                return PARSER.parseFrom(data);
-            }
-
-            public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
-                    com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-                    throws com.google.protobuf.InvalidProtocolBufferException {
-                return PARSER.parseFrom(data, extensionRegistry);
-            }
-
-            public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
-                    byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
-                return PARSER.parseFrom(data);
-            }
-
-            public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
-                    byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-                    throws com.google.protobuf.InvalidProtocolBufferException {
-                return PARSER.parseFrom(data, extensionRegistry);
-            }
-
-            public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
-                    java.io.InputStream input) throws java.io.IOException {
-                return PARSER.parseFrom(input);
-            }
-
-            public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
-                    java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-                    throws java.io.IOException {
-                return PARSER.parseFrom(input, extensionRegistry);
-            }
-
-            public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseDelimitedFrom(
-                    java.io.InputStream input) throws java.io.IOException {
-                return PARSER.parseDelimitedFrom(input);
-            }
-
-            public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseDelimitedFrom(
-                    java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-                    throws java.io.IOException {
-                return PARSER.parseDelimitedFrom(input, extensionRegistry);
-            }
-
-            public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
-                    com.google.protobuf.CodedInputStream input) throws java.io.IOException {
-                return PARSER.parseFrom(input);
-            }
-
-            public static org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parseFrom(
-                    com.google.protobuf.CodedInputStream input,
-                    com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
-                return PARSER.parseFrom(input, extensionRegistry);
-            }
-
-            public static Builder newBuilder() {
-                return Builder.create();
-            }
-
-            public Builder newBuilderForType() {
-                return newBuilder();
-            }
-
-            public static Builder newBuilder(
-                    org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList prototype) {
-                return newBuilder().mergeFrom(prototype);
-            }
-
-            public Builder toBuilder() {
-                return newBuilder(this);
-            }
-
-            @java.lang.Override
-            protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-                Builder builder = new Builder(parent);
-                return builder;
-            }
-
-            /**
-             * Protobuf type {@code CubeVisitRequest.IntList}
-             */
-            public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements
-                    org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder {
-                public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
-                    return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_IntList_descriptor;
-                }
-
-                protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() {
-                    return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_IntList_fieldAccessorTable
-                            .ensureFieldAccessorsInitialized(
-                                    org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.class,
-                                    org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder.class);
-                }
-
-                // Construct using org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.newBuilder()
-                private Builder() {
-                    maybeForceBuilderInitialization();
-                }
-
-                private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-                    super(parent);
-                    maybeForceBuilderInitialization();
-                }
-
-                private void maybeForceBuilderInitialization() {
-                    if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-                    }
-                }
-
-                private static Builder create() {
-                    return new Builder();
-                }
-
-                public Builder clear() {
-                    super.clear();
-                    ints_ = java.util.Collections.emptyList();
-                    bitField0_ = (bitField0_ & ~0x00000001);
-                    return this;
-                }
-
-                public Builder clone() {
-                    return create().mergeFrom(buildPartial());
-                }
-
-                public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
-                    return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_IntList_descriptor;
-                }
-
-                public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList getDefaultInstanceForType() {
-                    return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList
-                            .getDefaultInstance();
-                }
-
-                public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList build() {
-                    org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList result = buildPartial();
-                    if (!result.isInitialized()) {
-                        throw newUninitializedMessageException(result);
-                    }
-                    return result;
-                }
-
-                public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList buildPartial() {
-                    org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList result = new org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList(
-                            this);
-                    int from_bitField0_ = bitField0_;
-                    if (((bitField0_ & 0x00000001) == 0x00000001)) {
-                        ints_ = java.util.Collections.unmodifiableList(ints_);
-                        bitField0_ = (bitField0_ & ~0x00000001);
-                    }
-                    result.ints_ = ints_;
-                    onBuilt();
-                    return result;
-                }
-
-                public Builder mergeFrom(com.google.protobuf.Message other) {
-                    if (other instanceof org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList) {
-                        return mergeFrom(
-                                (org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList) other);
-                    } else {
-                        super.mergeFrom(other);
-                        return this;
-                    }
-                }
-
-                public Builder mergeFrom(
-                        org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList other) {
-                    if (other == org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList
-                            .getDefaultInstance())
-                        return this;
-                    if (!other.ints_.isEmpty()) {
-                        if (ints_.isEmpty()) {
-                            ints_ = other.ints_;
-                            bitField0_ = (bitField0_ & ~0x00000001);
-                        } else {
-                            ensureIntsIsMutable();
-                            ints_.addAll(other.ints_);
-                        }
-                        onChanged();
-                    }
-                    this.mergeUnknownFields(other.getUnknownFields());
-                    return this;
-                }
-
-                public final boolean isInitialized() {
-                    return true;
-                }
-
-                public Builder mergeFrom(com.google.protobuf.CodedInputStream input,
-                        com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
-                    org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList parsedMessage = null;
-                    try {
-                        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-                    } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-                        parsedMessage = (org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList) e
-                                .getUnfinishedMessage();
-                        throw e;
-                    } finally {
-                        if (parsedMessage != null) {
-                            mergeFrom(parsedMessage);
-                        }
-                    }
-                    return this;
-                }
-
-                private int bitField0_;
-
-                // repeated int32 ints = 1;
-                private java.util.List<java.lang.Integer> ints_ = java.util.Collections.emptyList();
-
-                private void ensureIntsIsMutable() {
-                    if (!((bitField0_ & 0x00000001) == 0x00000001)) {
-                        ints_ = new java.util.ArrayList<java.lang.Integer>(ints_);
-                        bitField0_ |= 0x00000001;
-                    }
-                }
-
-                /**
-                 * <code>repeated int32 ints = 1;</code>
-                 */
-                public java.util.List<java.lang.Integer> getIntsList() {
-                    return java.util.Collections.unmodifiableList(ints_);
-                }
-
-                /**
-                 * <code>repeated int32 ints = 1;</code>
-                 */
-                public int getIntsCount() {
-                    return ints_.size();
-                }
-
-                /**
-                 * <code>repeated int32 ints = 1;</code>
-                 */
-                public int getInts(int index) {
-                    return ints_.get(index);
-                }
-
-                /**
-                 * <code>repeated int32 ints = 1;</code>
-                 */
-                public Builder setInts(int index, int value) {
-                    ensureIntsIsMutable();
-                    ints_.set(index, value);
-                    onChanged();
-                    return this;
-                }
-
-                /**
-                 * <code>repeated int32 ints = 1;</code>
-                 */
-                public Builder addInts(int value) {
-                    ensureIntsIsMutable();
-                    ints_.add(value);
-                    onChanged();
-                    return this;
-                }
-
-                /**
-                 * <code>repeated int32 ints = 1;</code>
-                 */
-                public Builder addAllInts(java.lang.Iterable<? extends java.lang.Integer> values) {
-                    ensureIntsIsMutable();
-                    super.addAll(values, ints_);
-                    onChanged();
-                    return this;
-                }
-
-                /**
-                 * <code>repeated int32 ints = 1;</code>
-                 */
-                public Builder clearInts() {
-                    ints_ = java.util.Collections.emptyList();
-                    bitField0_ = (bitField0_ & ~0x00000001);
-                    onChanged();
-                    return this;
-                }
-
-                // @@protoc_insertion_point(builder_scope:CubeVisitRequest.IntList)
-            }
-
-            static {
-                defaultInstance = new IntList(true);
-                defaultInstance.initFields();
-            }
-
-            // @@protoc_insertion_point(class_scope:CubeVisitRequest.IntList)
-        }
-
-        private int bitField0_;
-        // required bytes gtScanRequest = 1;
-        public static final int GTSCANREQUEST_FIELD_NUMBER = 1;
-        private com.google.protobuf.ByteString gtScanRequest_;
-
-        /**
-         * <code>required bytes gtScanRequest = 1;</code>
-         */
-        public boolean hasGtScanRequest() {
-            return ((bitField0_ & 0x00000001) == 0x00000001);
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code CubeVisitRequest}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequestOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.internal_static_CubeVisitRequest_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.class, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.Builder.class);
+      }
+
+      // Construct using org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          getHbaseColumnsToGTFieldBuilder();
         }
-
-        /**
-         * <code>required bytes gtScanRequest = 1;</code>
-         */
-        public com.google.protobuf.ByteString getGtScanRequest() {
-            return gtScanRequest_;
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        gtScanRequest_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        hbaseRawScan_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_

<TRUNCATED>

[28/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
Revert "reformat code"

This reverts commit c5b37744e7e6c5334788bd2d4b006876cf3b2924.


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/19585846
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/19585846
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/19585846

Branch: refs/heads/master
Commit: 195858461c19d654faf3036df44d4d1f9889654a
Parents: f955960
Author: Hongbin Ma <ma...@apache.org>
Authored: Fri May 26 20:15:46 2017 +0800
Committer: Roger Shi <ro...@gmail.com>
Committed: Fri May 26 20:41:41 2017 +0800

----------------------------------------------------------------------
 .../apache/kylin/RealizationRegistryTest.java   |     8 +-
 .../kylin/job/DeployLocalMetaToRemoteTest.java  |     3 +-
 .../java/org/apache/kylin/job/DeployUtil.java   |     6 +-
 .../kylin/job/streaming/Kafka10DataLoader.java  |    18 +-
 .../kylin/job/streaming/StreamDataLoader.java   |     5 +-
 .../enumerable/EnumerableWindowBridge.java      |     6 +-
 .../rel/rules/OLAPJoinPushThroughJoinRule.java  |    21 +-
 .../rel/rules/OLAPJoinPushThroughJoinRule2.java |    30 +-
 .../apache/calcite/sql2rel/RelFieldTrimmer.java |     5 +-
 .../calcite/sql2rel/SqlToRelConverter.java      |  2816 +++--
 .../java/org/apache/calcite/tools/Programs.java |   582 +-
 .../common/BackwardCompatibilityConfig.java     |    16 +-
 .../org/apache/kylin/common/KylinConfig.java    |    12 +-
 .../apache/kylin/common/KylinConfigBase.java    |    20 +-
 .../org/apache/kylin/common/KylinVersion.java   |     6 +-
 .../org/apache/kylin/common/QueryContext.java   |     2 +-
 .../kylin/common/lock/DistributedLock.java      |    11 +-
 .../common/lock/DistributedLockFactory.java     |     2 +-
 .../common/persistence/FileResourceStore.java   |    12 +-
 .../kylin/common/persistence/ResourceStore.java |    33 +-
 .../kylin/common/persistence/ResourceTool.java  |     6 +-
 .../kylin/common/persistence/StringEntity.java  |     6 +-
 .../kylin/common/restclient/RestClient.java     |    17 +-
 .../org/apache/kylin/common/util/Bytes.java     |    89 +-
 .../apache/kylin/common/util/BytesSplitter.java |     3 +-
 .../org/apache/kylin/common/util/ClassUtil.java |    24 +-
 .../kylin/common/util/ClasspathScanner.java     |    12 +-
 .../kylin/common/util/CompressionUtils.java     |     6 +-
 .../org/apache/kylin/common/util/DBUtils.java   |     2 +-
 .../apache/kylin/common/util/DateFormat.java    |     2 +-
 .../apache/kylin/common/util/HadoopUtil.java    |     8 +-
 .../kylin/common/util/ImplementationSwitch.java |     6 +-
 .../org/apache/kylin/common/util/JsonUtil.java  |    21 +-
 .../apache/kylin/common/util/MailService.java   |     8 +-
 .../common/util/MemoryBudgetController.java     |     9 +-
 .../kylin/common/util/OrderedProperties.java    |     3 +-
 .../org/apache/kylin/common/util/Primes.java    |    31 +-
 .../org/apache/kylin/common/util/RangeUtil.java |     3 +-
 .../org/apache/kylin/common/util/SortUtil.java  |     3 +-
 .../apache/kylin/common/util/ZipFileUtils.java  |    10 +-
 .../org/apache/kylin/common/StorageURLTest.java |     2 +-
 .../persistence/LocalFileResourceStoreTest.java |     6 +-
 .../kylin/common/restclient/RestClientTest.java |     4 +-
 .../org/apache/kylin/common/util/BasicTest.java |     6 +-
 .../kylin/common/util/CacheBuilderTest.java     |    13 +-
 .../apache/kylin/common/util/ClassUtilTest.java |     6 +-
 .../kylin/common/util/HiveCmdBuilderTest.java   |    13 +-
 .../util/HotLoadKylinPropertiesTestCase.java    |     8 +-
 .../common/util/InstallJarIntoMavenTest.java    |     4 +-
 .../apache/kylin/common/util/JacksonTest.java   |     9 +-
 .../common/util/LocalFileMetadataTestCase.java  |     2 +-
 .../org/apache/kylin/common/util/RangeTest.java |     6 +-
 .../apache/kylin/common/util/TimeUtilTest.java  |     6 +-
 .../org/apache/kylin/GTForwardingScanner.java   |     8 +-
 .../kylin/cube/CubeCapabilityChecker.java       |    25 +-
 .../org/apache/kylin/cube/CubeDescManager.java  |    18 +-
 .../org/apache/kylin/cube/CubeInstance.java     |     6 +-
 .../java/org/apache/kylin/cube/CubeManager.java |   100 +-
 .../java/org/apache/kylin/cube/CubeSegment.java |     6 +-
 .../apache/kylin/cube/RawQueryLastHacker.java   |     3 +-
 .../kylin/cube/cli/CubeSignatureRefresher.java  |     3 +-
 .../kylin/cube/cli/DictionaryGeneratorCLI.java  |    14 +-
 .../cube/cuboid/AggregationGroupScheduler.java  |     2 +-
 .../org/apache/kylin/cube/cuboid/Cuboid.java    |    22 +-
 .../org/apache/kylin/cube/cuboid/CuboidCLI.java |     6 +-
 .../kylin/cube/cuboid/CuboidScheduler.java      |    10 +-
 .../gridtable/AsymmetricRecordComparator.java   |     4 +-
 .../kylin/cube/gridtable/CubeCodeSystem.java    |     3 +-
 .../kylin/cube/gridtable/CubeGridTable.java     |     3 +-
 .../cube/gridtable/ScanRangePlannerBase.java    |     9 +-
 .../inmemcubing/AbstractInMemCubeBuilder.java   |     6 +-
 .../cube/inmemcubing/ConcurrentDiskStore.java   |     3 +-
 .../cube/inmemcubing/DoggedCubeBuilder.java     |    17 +-
 .../cube/inmemcubing/InMemCubeBuilder.java      |    54 +-
 .../InMemCubeBuilderInputConverter.java         |     3 +-
 .../cube/inmemcubing/InMemCubeBuilderUtils.java |    11 +-
 .../kylin/cube/inmemcubing/MemDiskStore.java    |    18 +-
 .../kylin/cube/kv/AbstractRowKeyEncoder.java    |     4 +-
 .../org/apache/kylin/cube/kv/CubeDimEncMap.java |     8 +-
 .../apache/kylin/cube/kv/FuzzyMaskEncoder.java  |     3 +-
 .../apache/kylin/cube/kv/RowKeyColumnIO.java    |     3 +-
 .../org/apache/kylin/cube/kv/RowKeyEncoder.java |    26 +-
 .../kylin/cube/model/AggregationGroup.java      |     3 +-
 .../org/apache/kylin/cube/model/CubeDesc.java   |    72 +-
 .../cube/model/CubeJoinedFlatTableDesc.java     |     6 +-
 .../cube/model/CubeJoinedFlatTableEnrich.java   |     4 +-
 .../apache/kylin/cube/model/DimensionDesc.java  |     6 +-
 .../kylin/cube/model/HBaseColumnDesc.java       |     9 +-
 .../kylin/cube/model/HBaseColumnFamilyDesc.java |     7 +-
 .../kylin/cube/model/HBaseMappingDesc.java      |     7 +-
 .../apache/kylin/cube/model/RowKeyColDesc.java  |     2 +-
 .../kylin/cube/model/v1_4_0/CubeDesc.java       |    22 +-
 .../kylin/cube/model/v1_4_0/DimensionDesc.java  |     7 +-
 .../kylin/cube/model/v1_4_0/RowKeyColDesc.java  |     3 +-
 .../kylin/cube/model/v1_4_0/RowKeyDesc.java     |     6 +-
 .../model/validation/CubeMetadataValidator.java |     3 +-
 .../validation/rule/AggregationGroupRule.java   |    34 +-
 .../model/validation/rule/DictionaryRule.java   |     4 +-
 .../model/validation/rule/FunctionRule.java     |    25 +-
 .../validation/rule/StreamingCubeRule.java      |     9 +-
 .../upgrade/V1_5_1/CubeDescUpgrade_v_1_5_1.java |     9 +-
 .../V1_5_1/CubeMetadataUpgrade_v_1_5_1.java     |    17 +-
 .../upgrade/common/CubeMetadataUpgrade.java     |    10 +-
 .../common/MetadataVersionRefresher.java        |     3 +-
 .../entry/CubeMetadataUpgradeEntry_v_1_5_1.java |    13 +-
 .../v1_4_0/CubeMetadataUpgrade_v_1_4_0.java     |     3 +-
 .../org/apache/kylin/cube/util/CubingUtils.java |    15 +-
 .../kylin/gridtable/GTAggregateScanner.java     |    15 +-
 .../java/org/apache/kylin/gridtable/GTInfo.java |     6 +-
 .../org/apache/kylin/gridtable/GTRecord.java    |     3 +-
 .../org/apache/kylin/gridtable/GTRowBlock.java  |     3 +-
 .../apache/kylin/gridtable/GTScanRequest.java   |    24 +-
 .../kylin/gridtable/GTScanRequestBuilder.java   |    12 +-
 .../gridtable/GTStreamAggregateScanner.java     |    13 +-
 .../java/org/apache/kylin/gridtable/GTUtil.java |    13 +-
 .../apache/kylin/gridtable/UnitTestSupport.java |    18 +-
 .../gridtable/benchmark/GTScannerBenchmark.java |     7 +-
 .../benchmark/GTScannerBenchmark2.java          |     7 +-
 .../kylin/cube/AggregationGroupRuleTest.java    |    32 +-
 .../org/apache/kylin/cube/CubeDescTest.java     |    59 +-
 .../org/apache/kylin/cube/CubeManagerTest.java  |    59 +-
 .../kylin/cube/CubeSpecificConfigTest.java      |     4 +-
 .../apache/kylin/cube/RowKeyAttrRuleTest.java   |     4 +-
 .../kylin/cube/common/BytesSplitterTest.java    |    15 +-
 .../kylin/cube/common/RowKeySplitterTest.java   |     6 +-
 .../kylin/cube/cuboid/CombinationTest.java      |     3 +-
 .../kylin/cube/cuboid/CuboidSchedulerTest.java  |    26 +-
 .../inmemcubing/ConcurrentDiskStoreTest.java    |     3 +-
 .../cube/inmemcubing/MemDiskStoreTest.java      |     3 +-
 .../apache/kylin/cube/kv/RowKeyDecoderTest.java |    15 +-
 .../apache/kylin/cube/kv/RowKeyEncoderTest.java |    24 +-
 .../validation/rule/DictionaryRuleTest.java     |    18 +-
 .../model/validation/rule/FunctionRuleTest.java |     3 +-
 .../kylin/cube/project/ProjectManagerTest.java  |     6 +-
 .../gridtable/AggregationCacheMemSizeTest.java  |    48 +-
 .../gridtable/AggregationCacheSpillTest.java    |    12 +-
 .../gridtable/DimEncodingPreserveOrderTest.java |    10 +-
 .../kylin/gridtable/GTScanReqSerDerTest.java    |     3 +-
 .../kylin/gridtable/SimpleGridTableTest.java    |     7 +-
 .../metadata/measure/MeasureCodecTest.java      |     3 +-
 .../metadata/measure/TopNMeasureTypeTest.java   |     9 +-
 .../apache/kylin/dict/AppendTrieDictionary.java |    64 +-
 .../kylin/dict/BuiltInFunctionTransformer.java  |    18 +-
 .../org/apache/kylin/dict/ByteComparator.java   |     4 +-
 .../org/apache/kylin/dict/CacheDictionary.java  |     4 +-
 .../apache/kylin/dict/DateStrDictionary.java    |     5 +-
 .../apache/kylin/dict/DictionaryGenerator.java  |    30 +-
 .../org/apache/kylin/dict/DictionaryInfo.java   |    10 +-
 .../apache/kylin/dict/DictionaryManager.java    |    51 +-
 .../kylin/dict/GlobalDictionaryBuilder.java     |     3 +-
 .../apache/kylin/dict/IDictionaryBuilder.java   |     4 +-
 .../kylin/dict/IDictionaryValueEnumerator.java  |     2 +-
 .../dict/IterableDictionaryValueEnumerator.java |     2 +-
 .../dict/MultipleDictionaryValueEnumerator.java |     2 +-
 .../kylin/dict/Number2BytesConverter.java       |    16 +-
 .../org/apache/kylin/dict/NumberDictionary.java |     8 +-
 .../kylin/dict/NumberDictionaryBuilder.java     |     3 +
 .../dict/NumberDictionaryForestBuilder.java     |     6 +-
 .../kylin/dict/TableColumnValueEnumerator.java  |     3 +-
 .../dict/TableColumnValueSortedEnumerator.java  |     7 +-
 .../org/apache/kylin/dict/TrieDictionary.java   |    11 +-
 .../kylin/dict/TrieDictionaryBuilder.java       |    27 +-
 .../apache/kylin/dict/TrieDictionaryForest.java |    17 +-
 .../kylin/dict/TrieDictionaryForestBuilder.java |    13 +-
 .../kylin/dict/global/AppendDictNode.java       |    27 +-
 .../kylin/dict/global/AppendDictSlice.java      |    21 +-
 .../kylin/dict/global/AppendDictSliceKey.java   |     4 +-
 .../global/AppendTrieDictionaryBuilder.java     |    27 +-
 .../global/AppendTrieDictionaryChecker.java     |     4 +-
 .../kylin/dict/global/GlobalDictHDFSStore.java  |    31 +-
 .../kylin/dict/global/GlobalDictMetadata.java   |    10 +-
 .../kylin/dict/global/GlobalDictStore.java      |     8 +-
 .../apache/kylin/dict/lookup/LookupTable.java   |     6 +-
 .../apache/kylin/dict/lookup/SnapshotCLI.java   |     3 +-
 .../kylin/dict/lookup/SnapshotManager.java      |     9 +-
 .../apache/kylin/dict/lookup/SnapshotTable.java |     7 +-
 .../kylin/dict/DictionaryManagerTest.java       |    33 +-
 .../kylin/dict/DictionaryProviderTest.java      |    17 +-
 .../apache/kylin/dict/MockupReadableTable.java  |     2 +-
 .../MultipleDictionaryValueEnumeratorTest.java  |    45 +-
 .../apache/kylin/dict/NumberDictionaryTest.java |     9 +-
 .../dict/TrieDictionaryForestBenchmark.java     |    16 +-
 .../kylin/dict/TrieDictionaryForestTest.java    |    25 +-
 .../apache/kylin/dict/TrieDictionaryTest.java   |    19 +-
 .../dict/global/AppendTrieDictionaryTest.java   |    37 +-
 .../kylin/dict/lookup/LookupTableTest.java      |     8 +-
 .../org/apache/kylin/engine/EngineFactory.java  |     4 +-
 .../apache/kylin/engine/IBatchCubingEngine.java |     4 +-
 .../java/org/apache/kylin/job/JobInstance.java  |     3 +-
 .../org/apache/kylin/job/JoinedFlatTable.java   |    15 +-
 .../kylin/job/common/PatternedLogger.java       |    10 +-
 .../kylin/job/common/ShellExecutable.java       |     7 +-
 .../kylin/job/constant/ExecutableConstants.java |     8 +-
 .../kylin/job/constant/JobStatusEnum.java       |     3 +-
 .../kylin/job/constant/JobStepStatusEnum.java   |     3 +-
 .../org/apache/kylin/job/dao/ExecutableDao.java |    12 +-
 .../kylin/job/engine/JobEngineConfig.java       |     3 +-
 .../exception/IllegalStateTranferException.java |     3 +-
 .../kylin/job/execution/AbstractExecutable.java |     9 +-
 .../job/execution/DefaultChainedExecutable.java |     7 +-
 .../kylin/job/execution/ExecutableManager.java  |    17 +-
 .../kylin/job/execution/ExecutableState.java    |    16 +-
 .../job/impl/threadpool/DefaultScheduler.java   |    12 +-
 .../impl/threadpool/DistributedScheduler.java   |    19 +-
 .../java/org/apache/kylin/job/lock/JobLock.java |     2 +-
 .../apache/kylin/job/ExecutableManagerTest.java |     3 +-
 .../apache/kylin/job/JobEngineConfigTest.java   |     8 +-
 .../apache/kylin/job/SelfStopExecutable.java    |     4 +-
 .../job/impl/threadpool/BaseSchedulerTest.java  |     9 +-
 .../apache/kylin/dimension/BooleanDimEnc.java   |     5 +-
 .../org/apache/kylin/dimension/DateDimEnc.java  |     3 +-
 .../kylin/dimension/DictionaryDimEnc.java       |     3 +-
 .../apache/kylin/dimension/FixedLenDimEnc.java  |     6 +-
 .../kylin/dimension/FixedLenHexDimEnc.java      |    11 +-
 .../org/apache/kylin/dimension/IntDimEnc.java   |     8 +-
 .../apache/kylin/dimension/IntegerDimEnc.java   |    14 +-
 .../kylin/dimension/OneMoreByteVLongDimEnc.java |    24 +-
 .../kylin/measure/BufferedMeasureCodec.java     |     6 +-
 .../kylin/measure/MeasureAggregators.java       |     3 +-
 .../org/apache/kylin/measure/MeasureCodec.java  |     6 +-
 .../apache/kylin/measure/MeasureIngester.java   |    12 +-
 .../org/apache/kylin/measure/MeasureType.java   |    16 +-
 .../kylin/measure/MeasureTypeFactory.java       |    21 +-
 .../kylin/measure/basic/BasicMeasureType.java   |    12 +-
 .../kylin/measure/basic/BigDecimalIngester.java |     3 +-
 .../BitmapIntersectDistinctCountAggFunc.java    |     5 +-
 .../kylin/measure/bitmap/BitmapMeasureType.java |    20 +-
 .../kylin/measure/bitmap/BitmapSerializer.java  |     6 +-
 .../measure/bitmap/RoaringBitmapCounter.java    |    11 +-
 .../bitmap/RoaringBitmapCounterFactory.java     |     7 +-
 .../dim/DimCountDistinctMeasureType.java        |     3 +-
 .../ExtendedColumnMeasureType.java              |    12 +-
 .../kylin/measure/hllc/HLLCMeasureType.java     |    12 +-
 .../kylin/measure/hllc/HLLCSerializer.java      |     6 +-
 .../apache/kylin/measure/hllc/HLLCounter.java   |    60 +-
 .../kylin/measure/hllc/HLLCounterOld.java       |     5 +-
 .../measure/hllc/HLLDistinctCountAggFunc.java   |     3 +-
 .../measure/hllc/HyperLogLogPlusTable.java      |   846 +-
 .../measure/percentile/PercentileAggFunc.java   |     2 +-
 .../measure/percentile/PercentileCounter.java   |     1 -
 .../percentile/PercentileMeasureType.java       |     6 +-
 .../kylin/measure/raw/RawMeasureType.java       |    12 +-
 .../apache/kylin/measure/raw/RawSerializer.java |     6 +-
 .../org/apache/kylin/measure/topn/Counter.java  |     4 +-
 .../kylin/measure/topn/TopNMeasureType.java     |    44 +-
 .../apache/kylin/metadata/MetadataManager.java  |    49 +-
 .../kylin/metadata/badquery/BadQueryEntry.java  |     6 +-
 .../badquery/BadQueryHistoryManager.java        |    20 +-
 .../kylin/metadata/cachesync/Broadcaster.java   |    18 +-
 .../metadata/datatype/BigDecimalSerializer.java |     4 +-
 .../metadata/datatype/BooleanSerializer.java    |     8 +-
 .../kylin/metadata/datatype/DataType.java       |     8 +-
 .../metadata/datatype/DataTypeSerializer.java   |     7 +-
 .../metadata/datatype/DateTimeSerializer.java   |     4 +-
 .../kylin/metadata/datatype/Int4Serializer.java |     4 +-
 .../metadata/datatype/Long8Serializer.java      |     5 +-
 .../kylin/metadata/datatype/LongSerializer.java |     4 +-
 .../filter/BuiltInFunctionTupleFilter.java      |    11 +-
 .../kylin/metadata/filter/CaseTupleFilter.java  |     3 +-
 .../metadata/filter/ColumnTupleFilter.java      |    16 +-
 .../metadata/filter/CompareTupleFilter.java     |    17 +-
 .../filter/FilterOptimizeTransformer.java       |     8 +-
 .../metadata/filter/LogicalTupleFilter.java     |     3 +-
 .../kylin/metadata/filter/TupleFilter.java      |     4 +-
 .../metadata/filter/TupleFilterSerializer.java  |     6 +-
 .../metadata/filter/UDF/MassInTupleFilter.java  |     9 +-
 .../filter/UDF/MassInValueProviderFactory.java  |     3 +-
 .../metadata/filter/function/BuiltInMethod.java |     5 +-
 .../metadata/filter/function/Functions.java     |     3 +-
 .../kylin/metadata/filter/function/Like.java    |     8 +-
 .../metadata/filter/function/LikeMatchers.java  |     3 +-
 .../apache/kylin/metadata/model/ColumnDesc.java |     6 +-
 .../kylin/metadata/model/DataModelDesc.java     |    23 +-
 .../metadata/model/ExternalFilterDesc.java      |     3 +-
 .../kylin/metadata/model/FunctionDesc.java      |     3 +-
 .../apache/kylin/metadata/model/ISegment.java   |     4 +-
 .../apache/kylin/metadata/model/JoinDesc.java   |    32 +-
 .../kylin/metadata/model/JoinTableDesc.java     |    12 +-
 .../apache/kylin/metadata/model/JoinsTree.java  |     6 +-
 .../kylin/metadata/model/MeasureDesc.java       |     3 +-
 .../metadata/model/ModelDimensionDesc.java      |     4 +-
 .../kylin/metadata/model/PartitionDesc.java     |    71 +-
 .../kylin/metadata/model/SegmentStatusEnum.java |     2 +-
 .../apache/kylin/metadata/model/Segments.java   |    22 +-
 .../apache/kylin/metadata/model/TableDesc.java  |     8 +-
 .../kylin/metadata/model/TableExtDesc.java      |     6 +-
 .../kylin/metadata/project/ProjectInstance.java |     6 +-
 .../kylin/metadata/project/ProjectL2Cache.java  |    14 +-
 .../kylin/metadata/project/ProjectManager.java  |    35 +-
 .../metadata/project/RealizationEntry.java      |     6 +-
 .../kylin/metadata/querymeta/ColumnMeta.java    |     6 +-
 .../metadata/querymeta/ColumnMetaWithType.java  |     6 +-
 .../metadata/querymeta/SelectedColumnMeta.java  |     5 +-
 .../kylin/metadata/querymeta/TableMeta.java     |     4 +-
 .../metadata/querymeta/TableMetaWithType.java   |     4 +-
 .../realization/RealizationRegistry.java        |     6 +-
 .../realization/RealizationStatusEnum.java      |     2 +-
 .../kylin/metadata/realization/SQLDigest.java   |     2 +-
 .../metadata/realization/SQLDigestUtil.java     |     6 +-
 .../metadata/realization/StreamSQLDigest.java   |    24 +-
 .../metadata/streaming/StreamingManager.java    |    20 +-
 .../org/apache/kylin/metadata/tuple/Tuple.java  |     3 +-
 .../java/org/apache/kylin/source/ISource.java   |     2 +-
 .../kylin/source/ISourceMetadataExplorer.java   |     6 +-
 .../org/apache/kylin/source/SourceFactory.java  |     2 +-
 .../apache/kylin/source/SourcePartition.java    |     8 +-
 .../kylin/source/datagen/ColumnGenConfig.java   |    28 +-
 .../kylin/source/datagen/ColumnGenerator.java   |     4 +-
 .../source/datagen/ModelDataGenerator.java      |    28 +-
 .../kylin/source/datagen/TableGenConfig.java    |    16 +-
 .../org/apache/kylin/source/datagen/Util.java   |     2 +-
 .../kylin/dimension/FixedLenHexDimEncTest.java  |     5 +-
 .../kylin/dimension/IntegerDimEncTest.java      |    10 +-
 .../dimension/OneMoreByteVLongDimEncTest.java   |     2 +
 .../measure/AggregatorMemEstimateTest.java      |    13 +-
 .../measure/bitmap/BitmapAggregatorTest.java    |     9 +-
 .../kylin/measure/bitmap/BitmapCounterTest.java |    10 +-
 .../measure/bitmap/BitmapSerializerTest.java    |    14 +-
 .../kylin/measure/hllc/HLLCMeasureTypeTest.java |    13 +-
 .../percentile/PercentileCounterTest.java       |     2 +-
 .../kylin/measure/topn/TopNCounterTest.java     |    27 +-
 .../badquery/BadQueryHistoryManagerTest.java    |    15 +-
 .../kylin/metadata/model/DataModelDescTest.java |     9 +-
 .../DefaultPartitionConditionBuilderTest.java   |    18 +-
 .../kylin/metadata/model/JoinsTreeTest.java     |     4 +-
 .../kylin/source/datagen/DataGenTest.java       |     4 +-
 .../apache/kylin/storage/StorageContext.java    |     6 +-
 .../kylin/storage/adhoc/AdHocRunnerBase.java    |     9 +-
 .../storage/gtrecord/CubeScanRangePlanner.java  |    34 +-
 .../storage/gtrecord/CubeSegmentScanner.java    |    16 +-
 .../storage/gtrecord/CubeTupleConverter.java    |    24 +-
 .../gtrecord/GTCubeStorageQueryBase.java        |    55 +-
 .../gtrecord/GTCubeStorageQueryRequest.java     |     3 +-
 .../gtrecord/PartitionResultIterator.java       |     7 +-
 .../kylin/storage/gtrecord/ScannerWorker.java   |    12 +-
 .../gtrecord/SegmentCubeTupleIterator.java      |     9 +-
 .../gtrecord/SequentialCubeTupleIterator.java   |    12 +-
 .../SortMergedPartitionResultIterator.java      |    20 +-
 .../storage/gtrecord/SortedIteratorMerger.java  |    17 +-
 .../gtrecord/StorageResponseGTScatter.java      |    27 +-
 .../kylin/storage/hybrid/HybridInstance.java    |    11 +-
 .../kylin/storage/hybrid/HybridManager.java     |    15 +-
 .../storage/hybrid/HybridStorageQuery.java      |     3 +-
 .../storage/translate/ColumnValueRange.java     |     3 +-
 .../translate/DerivedFilterTranslator.java      |    12 +-
 .../kylin/storage/translate/HBaseKeyRange.java  |    13 +-
 .../apache/kylin/storage/StorageMockUtils.java  |     6 +-
 .../apache/kylin/storage/cache/EhcacheTest.java |    15 +-
 .../storage/gtrecord/DictGridTableTest.java     |   109 +-
 .../SortedIteratorMergerWithLimitTest.java      |    33 +-
 .../kylin/engine/mr/BatchCubingJobBuilder.java  |    12 +-
 .../kylin/engine/mr/BatchCubingJobBuilder2.java |    17 +-
 .../kylin/engine/mr/BatchMergeJobBuilder.java   |     9 +-
 .../kylin/engine/mr/BatchMergeJobBuilder2.java  |     6 +-
 .../kylin/engine/mr/ByteArrayWritable.java      |     3 +-
 .../org/apache/kylin/engine/mr/CubingJob.java   |    27 +-
 .../apache/kylin/engine/mr/DFSFileTable.java    |     2 +-
 .../kylin/engine/mr/DFSFileTableReader.java     |     3 +-
 .../org/apache/kylin/engine/mr/IMROutput2.java  |     3 +-
 .../kylin/engine/mr/JobBuilderSupport.java      |    11 +-
 .../org/apache/kylin/engine/mr/KylinMapper.java |    12 +-
 .../apache/kylin/engine/mr/KylinReducer.java    |    12 +-
 .../java/org/apache/kylin/engine/mr/MRUtil.java |     3 +-
 .../kylin/engine/mr/SortedColumnDFSFile.java    |     8 +-
 .../engine/mr/SortedColumnDFSFileReader.java    |     7 +-
 .../engine/mr/common/AbstractHadoopJob.java     |    79 +-
 .../engine/mr/common/BaseCuboidBuilder.java     |    19 +-
 .../kylin/engine/mr/common/BatchConstants.java  |     1 +
 .../kylin/engine/mr/common/CubeStatsReader.java |    29 +-
 .../kylin/engine/mr/common/CubeStatsWriter.java |     8 +-
 .../kylin/engine/mr/common/CuboidShardUtil.java |     3 +-
 .../common/DefaultSslProtocolSocketFactory.java |     9 +-
 .../kylin/engine/mr/common/HadoopCmdOutput.java |     5 +-
 .../engine/mr/common/HadoopShellExecutable.java |     6 +-
 .../engine/mr/common/HadoopStatusChecker.java   |     6 +-
 .../engine/mr/common/JobInfoConverter.java      |     4 +-
 .../engine/mr/common/MapReduceExecutable.java   |     6 +-
 .../kylin/engine/mr/common/NDCuboidBuilder.java |     7 +-
 .../engine/mr/steps/BaseCuboidMapperBase.java   |    12 +-
 .../engine/mr/steps/CreateDictionaryJob.java    |     6 +-
 .../engine/mr/steps/CubingExecutableUtil.java   |    40 +-
 .../apache/kylin/engine/mr/steps/CuboidJob.java |     6 +-
 .../kylin/engine/mr/steps/CuboidReducer.java    |     3 +-
 .../mr/steps/FactDistinctColumnsCombiner.java   |     6 +-
 .../engine/mr/steps/FactDistinctColumnsJob.java |    17 +-
 .../mr/steps/FactDistinctColumnsMapper.java     |    20 +-
 .../mr/steps/FactDistinctColumnsMapperBase.java |     3 +-
 .../mr/steps/FactDistinctColumnsReducer.java    |    37 +-
 .../engine/mr/steps/HiveToBaseCuboidMapper.java |     2 +-
 .../kylin/engine/mr/steps/InMemCuboidJob.java   |     3 +-
 .../engine/mr/steps/InMemCuboidMapper.java      |     8 +-
 .../engine/mr/steps/InMemCuboidReducer.java     |     3 +-
 .../mr/steps/MapContextGTRecordWriter.java      |     3 +-
 .../engine/mr/steps/MergeCuboidMapper.java      |    16 +-
 .../engine/mr/steps/MergeDictionaryStep.java    |     6 +-
 .../engine/mr/steps/MergeStatisticsStep.java    |    13 +-
 .../engine/mr/steps/MetadataCleanupJob.java     |    16 +-
 .../kylin/engine/mr/steps/NDCuboidMapper.java   |    11 +-
 .../kylin/engine/mr/steps/ReducerNumSizing.java |    13 +-
 .../mr/steps/RowKeyDistributionCheckerJob.java  |     3 +-
 .../steps/RowKeyDistributionCheckerMapper.java  |     2 +-
 .../steps/RowKeyDistributionCheckerReducer.java |     3 +-
 .../engine/mr/steps/SaveStatisticsStep.java     |    10 +-
 .../engine/mr/steps/SelfDefineSortableKey.java  |     8 +-
 .../mr/steps/UpdateCubeInfoAfterBuildStep.java  |     3 +-
 .../mr/steps/UpdateCubeInfoAfterMergeStep.java  |     3 +-
 .../kylin/engine/mr/SortedColumnReaderTest.java |     2 +-
 .../apache/kylin/engine/mr/TableReaderTest.java |     6 +-
 .../kylin/engine/mr/steps/CubeReducerTest.java  |    27 +-
 .../engine/mr/steps/MergeCuboidJobTest.java     |     3 +-
 .../engine/mr/steps/MergeCuboidMapperTest.java  |     8 +-
 .../kylin/engine/mr/steps/NDCuboidJobTest.java  |     6 +-
 .../engine/mr/steps/NDCuboidMapperTest.java     |    15 +-
 .../mr/steps/NewCubeSamplingMethodTest.java     |    39 +-
 .../mr/steps/NumberDictionaryForestTest.java    |    19 +-
 .../mr/steps/SelfDefineSortableKeyTest.java     |    20 +-
 .../spark/SparkBatchCubingJobBuilder2.java      |     3 +-
 .../kylin/engine/spark/SparkCountDemo.java      |    25 +-
 .../apache/kylin/engine/spark/SparkCubing.java  |   333 +-
 .../kylin/engine/spark/SparkCubingByLayer.java  |   165 +-
 .../kylin/engine/spark/SparkExecutable.java     |    10 +-
 .../kylin/engine/spark/util/IteratorUtils.java  |     3 +-
 .../spark/cube/BufferedCuboidWriterTest.java    |     3 +-
 .../engine/spark/util/KyroMappingGenerator.java |    17 +-
 .../main/java/org/apache/kylin/jdbc/Driver.java |     6 +-
 .../org/apache/kylin/jdbc/IRemoteClient.java    |     3 +-
 .../java/org/apache/kylin/jdbc/KylinClient.java |    32 +-
 .../org/apache/kylin/jdbc/KylinConnection.java  |    12 +-
 .../org/apache/kylin/jdbc/KylinJdbcFactory.java |    19 +-
 .../java/org/apache/kylin/jdbc/KylinMeta.java   |    53 +-
 .../kylin/jdbc/KylinPreparedStatement.java      |     3 +-
 .../org/apache/kylin/jdbc/KylinResultSet.java   |     3 +-
 .../org/apache/kylin/jdbc/KylinStatement.java   |     3 +-
 .../apache/kylin/jdbc/json/SQLResponseStub.java |     2 +-
 .../java/org/apache/kylin/jdbc/DriverTest.java  |    10 +-
 .../java/org/apache/kylin/jdbc/DummyClient.java |     3 +-
 .../apache/kylin/jdbc/SQLResonseStubTest.java   |     8 +-
 .../kylin/cube/ITDictionaryManagerTest.java     |     3 +-
 .../inmemcubing/ITDoggedCubeBuilderTest.java    |     5 +-
 .../inmemcubing/ITInMemCubeBuilderTest.java     |    26 +-
 .../dict/ITGlobalDictionaryBuilderTest.java     |    15 +-
 .../org/apache/kylin/jdbc/ITJDBCDriverTest.java |    10 +-
 .../kylin/job/BaseTestDistributedScheduler.java |    10 +-
 .../kylin/provision/BuildCubeWithEngine.java    |    13 +-
 .../kylin/provision/BuildCubeWithStream.java    |    17 +-
 .../org/apache/kylin/provision/MockKafka.java   |    14 +-
 .../java/org/apache/kylin/query/H2Database.java |     9 +-
 .../apache/kylin/query/HackedDbUnitAssert.java  |    52 +-
 .../apache/kylin/query/ITCombinationTest.java   |     3 +-
 .../apache/kylin/query/ITKylinQueryTest.java    |    12 +-
 .../apache/kylin/query/ITMassInQueryTest.java   |     3 +-
 .../org/apache/kylin/query/KylinTestBase.java   |    31 +-
 .../hive/ITHiveSourceTableLoaderTest.java       |     8 +-
 .../hbase/ITAclTableMigrationToolTest.java      |     9 +-
 .../storage/hbase/ITHBaseResourceStoreTest.java |     9 +-
 .../kylin/storage/hbase/ITStorageTest.java      |     9 +-
 .../hbase/ITZookeeperDistributedLockTest.java   |     2 +-
 .../storage/hdfs/ITHDFSResourceStoreTest.java   |     1 +
 .../kylin/storage/hdfs/ITLockManagerTest.java   |    32 +-
 .../java/org/apache/kylin/query/QueryCli.java   |     6 +-
 .../org/apache/kylin/query/QueryDataSource.java |     4 +-
 .../query/enumerator/LookupTableEnumerator.java |     3 +-
 .../kylin/query/enumerator/OLAPEnumerator.java  |     3 +-
 .../optrule/AggregateMultipleExpandRule.java    |    17 +-
 .../optrule/AggregateProjectReduceRule.java     |     5 +-
 .../kylin/query/optrule/OLAPAggregateRule.java  |     3 +-
 .../kylin/query/optrule/OLAPFilterRule.java     |     3 +-
 .../kylin/query/optrule/OLAPJoinRule.java       |     3 +-
 .../kylin/query/optrule/OLAPSortRule.java       |     4 +-
 .../kylin/query/optrule/OLAPUnionRule.java      |     4 +-
 .../kylin/query/optrule/OLAPWindowRule.java     |     3 +-
 .../kylin/query/relnode/OLAPAggregateRel.java   |    25 +-
 .../apache/kylin/query/relnode/OLAPContext.java |     5 +-
 .../kylin/query/relnode/OLAPFilterRel.java      |    11 +-
 .../apache/kylin/query/relnode/OLAPJoinRel.java |    12 +-
 .../kylin/query/relnode/OLAPLimitRel.java       |     3 +-
 .../kylin/query/relnode/OLAPProjectRel.java     |    39 +-
 .../org/apache/kylin/query/relnode/OLAPRel.java |     5 +-
 .../apache/kylin/query/relnode/OLAPSortRel.java |     9 +-
 .../kylin/query/relnode/OLAPTableScan.java      |     6 +-
 .../relnode/OLAPToEnumerableConverter.java      |    10 +-
 .../kylin/query/relnode/OLAPUnionRel.java       |     2 +-
 .../kylin/query/relnode/OLAPWindowRel.java      |    15 +-
 .../apache/kylin/query/routing/QueryRouter.java |     9 +-
 .../kylin/query/routing/RealizationChooser.java |     6 +-
 .../apache/kylin/query/routing/RoutingRule.java |     6 +-
 .../routing/rules/RealizationSortRule.java      |     3 +-
 .../rules/RemoveBlackoutRealizationsRule.java   |     4 +-
 .../apache/kylin/query/schema/OLAPTable.java    |    15 +-
 .../query/util/CognosParenthesesEscape.java     |     3 +-
 .../org/apache/kylin/query/util/QueryUtil.java  |    17 +-
 .../apache/kylin/query/QueryDataSourceTest.java |     6 +-
 .../query/util/CognosParenthesesEscapeTest.java |    21 +-
 .../apache/kylin/query/util/QueryUtilTest.java  |     4 +-
 .../kylin/rest/adhoc/AdHocRunnerJdbcImpl.java   |    18 +-
 .../kylin/rest/adhoc/JdbcConnectionFactory.java |    12 +-
 .../kylin/rest/adhoc/JdbcConnectionPool.java    |     5 +-
 .../apache/kylin/rest/constant/Constant.java    |     3 +-
 .../kylin/rest/controller/AccessController.java |     9 +-
 .../kylin/rest/controller/BasicController.java  |     6 +-
 .../kylin/rest/controller/CacheController.java  |    12 +-
 .../kylin/rest/controller/CubeController.java   |    52 +-
 .../rest/controller/DiagnosisController.java    |     9 +-
 .../controller/ExternalFilterController.java    |     6 +-
 .../kylin/rest/controller/HybridController.java |     9 +-
 .../kylin/rest/controller/JobController.java    |     9 +-
 .../kylin/rest/controller/ModelController.java  |    11 +-
 .../rest/controller/ProjectController.java      |    14 +-
 .../kylin/rest/controller/QueryController.java  |     7 +-
 .../rest/controller/StreamingController.java    |    26 +-
 .../kylin/rest/controller/TableController.java  |    12 +-
 .../kylin/rest/controller/UserController.java   |     3 +-
 .../rest/controller2/AccessControllerV2.java    |    24 +-
 .../rest/controller2/AdminControllerV2.java     |    18 +-
 .../rest/controller2/CacheControllerV2.java     |    15 +-
 .../rest/controller2/CubeControllerV2.java      |   151 +-
 .../rest/controller2/CubeDescControllerV2.java  |     6 +-
 .../rest/controller2/DiagnosisControllerV2.java |    21 +-
 .../rest/controller2/EncodingControllerV2.java  |     3 +-
 .../controller2/ExternalFilterControllerV2.java |    21 +-
 .../rest/controller2/HybridControllerV2.java    |    18 +-
 .../kylin/rest/controller2/JobControllerV2.java |    39 +-
 .../rest/controller2/ModelControllerV2.java     |    52 +-
 .../rest/controller2/ModelDescControllerV2.java |     3 +-
 .../rest/controller2/ProjectControllerV2.java   |    29 +-
 .../rest/controller2/QueryControllerV2.java     |    47 +-
 .../rest/controller2/StreamingControllerV2.java |    39 +-
 .../rest/controller2/TableControllerV2.java     |    47 +-
 .../rest/controller2/UserControllerV2.java      |     9 +-
 .../rest/exception/BadRequestException.java     |     3 +-
 .../kylin/rest/init/InitialTaskManager.java     |     2 +-
 .../apache/kylin/rest/job/HybridCubeCLI.java    |    42 +-
 .../kylin/rest/job/StorageCleanupJob.java       |    33 +-
 .../apache/kylin/rest/metrics/QueryMetrics.java |    18 +-
 .../kylin/rest/metrics/QueryMetricsFacade.java  |     7 +-
 .../org/apache/kylin/rest/msg/CnMessage.java    |     2 +-
 .../kylin/rest/request/HiveTableRequestV2.java  |     1 +
 .../kylin/rest/request/JobBuildRequest2.java    |     4 +-
 .../kylin/rest/request/PrepareSqlRequest.java   |     2 +-
 .../kylin/rest/response/ErrorResponse.java      |     2 +-
 .../apache/kylin/rest/response/SQLResponse.java |     9 +-
 .../kylin/rest/security/AclHBaseStorage.java    |     2 +-
 .../rest/security/AuthoritiesPopulator.java     |     3 +-
 .../kylin/rest/security/CrossDomainFilter.java  |     9 +-
 .../security/KylinAuthenticationProvider.java   |     8 +-
 .../apache/kylin/rest/security/MockHTable.java  |    74 +-
 .../security/PasswordPlaceholderConfigurer.java |     6 +-
 .../rest/security/RealAclHBaseStorage.java      |     3 +-
 .../rest/security/UnauthorisedEntryPoint.java   |     3 +-
 .../kylin/rest/service/AccessService.java       |     6 +-
 .../apache/kylin/rest/service/AclService.java   |    28 +-
 .../rest/service/AclTableMigrationTool.java     |    37 +-
 .../kylin/rest/service/BadQueryDetector.java    |    28 +-
 .../apache/kylin/rest/service/CacheService.java |     3 +-
 .../apache/kylin/rest/service/CubeService.java  |    69 +-
 .../kylin/rest/service/DiagnosisService.java    |     3 +-
 .../kylin/rest/service/DomainObjectInfo.java    |     1 +
 .../kylin/rest/service/EncodingService.java     |     9 +-
 .../kylin/rest/service/HybridService.java       |    12 +-
 .../apache/kylin/rest/service/JobService.java   |   224 +-
 .../kylin/rest/service/KafkaConfigService.java  |     3 +-
 .../kylin/rest/service/LegacyAclService.java    |    58 +-
 .../kylin/rest/service/LegacyUserService.java   |    12 +-
 .../apache/kylin/rest/service/ModelService.java |    24 +-
 .../kylin/rest/service/ProjectService.java      |    17 +-
 .../apache/kylin/rest/service/QueryService.java |   109 +-
 .../kylin/rest/service/StreamingService.java    |     3 +-
 .../rest/service/TableSchemaUpdateChecker.java  |    36 +-
 .../apache/kylin/rest/service/TableService.java |    15 +-
 .../rest/service/UserGrantedAuthority.java      |     1 +
 .../org/apache/kylin/rest/service/UserInfo.java |     1 +
 .../apache/kylin/rest/service/UserService.java  |     3 +-
 .../org/apache/kylin/rest/util/AclUtil.java     |     8 +-
 .../org/apache/kylin/rest/util/AdHocUtil.java   |    17 +-
 .../kylin/rest/util/ControllerSplitter.java     |    18 +-
 .../kylin/rest/util/TableauInterceptor.java     |     4 +-
 .../org/apache/kylin/rest/bean/BeanTest.java    |     9 +-
 .../apache/kylin/rest/bean/BeanValidator.java   |    53 +-
 .../PasswordPlaceHolderConfigurerTest.java      |     2 +-
 .../java/org/apache/kylin/rest/DebugTomcat.java |    14 +-
 .../rest/controller/AccessControllerTest.java   |    17 +-
 .../rest/controller/CubeControllerTest.java     |     6 +-
 .../rest/controller/ProjectControllerTest.java  |    14 +-
 .../rest/controller/QueryControllerTest.java    |     6 +-
 .../kylin/rest/service/AccessServiceTest.java   |     6 +-
 .../rest/service/BadQueryDetectorTest.java      |     3 +-
 .../kylin/rest/service/CacheServiceTest.java    |    57 +-
 .../kylin/rest/service/CubeServiceTest.java     |     2 +-
 .../kylin/rest/service/ModelServiceTest.java    |     9 +-
 .../kylin/rest/service/ServiceTestBase.java     |     7 +-
 .../kylin/source/hive/BeelineHiveClient.java    |     9 +-
 .../apache/kylin/source/hive/CLIHiveClient.java |    12 +-
 .../apache/kylin/source/hive/HiveMRInput.java   |    27 +-
 .../org/apache/kylin/source/hive/HiveTable.java |     3 +-
 .../apache/kylin/source/hive/HiveTableMeta.java |    13 +-
 .../kylin/source/hive/HiveTableMetaBuilder.java |     5 +-
 .../kylin/source/hive/HiveTableReader.java      |     6 +-
 .../apache/kylin/source/hive/IHiveClient.java   |     4 +-
 .../cardinality/ColumnCardinalityMapper.java    |     2 +-
 .../cardinality/ColumnCardinalityReducer.java   |     3 +-
 .../cardinality/HiveColumnCardinalityJob.java   |     3 +-
 .../HiveColumnCardinalityUpdateJob.java         |     3 +-
 .../kylin/source/kafka/DefaultTimeParser.java   |     4 +-
 .../kylin/source/kafka/KafkaConfigManager.java  |    12 +-
 .../apache/kylin/source/kafka/KafkaMRInput.java |    37 +-
 .../apache/kylin/source/kafka/KafkaSource.java  |    36 +-
 .../kylin/source/kafka/StreamingParser.java     |    12 +-
 .../source/kafka/TimedJsonStreamParser.java     |    22 +-
 .../kylin/source/kafka/config/BrokerConfig.java |     6 +-
 .../source/kafka/config/KafkaClusterConfig.java |     3 +-
 .../kafka/config/KafkaConsumerProperties.java   |    13 +-
 .../source/kafka/hadoop/KafkaFlatTableJob.java  |     5 +-
 .../source/kafka/hadoop/KafkaInputFormat.java   |    12 +-
 .../kafka/hadoop/KafkaInputRecordReader.java    |    16 +-
 .../source/kafka/hadoop/KafkaInputSplit.java    |     4 +-
 .../kylin/source/kafka/job/MergeOffsetStep.java |     3 +-
 .../kylin/source/kafka/util/KafkaClient.java    |    21 +-
 .../source/kafka/util/KafkaSampleProducer.java  |    12 +-
 .../source/kafka/TimedJsonStreamParserTest.java |     3 +-
 .../config/KafkaConsumerPropertiesTest.java     |     9 +-
 .../kylin/storage/hbase/HBaseConnection.java    |    13 +-
 .../kylin/storage/hbase/HBaseResourceStore.java |    30 +-
 .../kylin/storage/hbase/HBaseStorage.java       |    12 +-
 .../hbase/common/coprocessor/AggrKey.java       |     3 +-
 .../common/coprocessor/AggregationCache.java    |     7 +-
 .../common/coprocessor/CoprocessorFilter.java   |     6 +-
 .../coprocessor/CoprocessorProjector.java       |     7 +-
 .../common/coprocessor/FilterDecorator.java     |     8 +-
 .../hbase/cube/HBaseScannerBenchmark.java       |     6 +-
 .../storage/hbase/cube/SimpleHBaseStore.java    |    12 +-
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java     |    57 +-
 .../storage/hbase/cube/v2/CubeHBaseRPC.java     |    22 +-
 .../storage/hbase/cube/v2/CubeHBaseScanRPC.java |     9 +-
 .../hbase/cube/v2/ExpectedSizeIterator.java     |     7 +-
 .../hbase/cube/v2/HBaseReadonlyStore.java       |    15 +-
 .../coprocessor/endpoint/CubeVisitService.java  |    67 +-
 .../endpoint/generated/CubeVisitProtos.java     | 10764 ++++++++---------
 .../storage/hbase/steps/CreateHTableJob.java    |    34 +-
 .../storage/hbase/steps/CubeHTableUtil.java     |     9 +-
 .../storage/hbase/steps/DeprecatedGCStep.java   |     5 +-
 .../storage/hbase/steps/HBaseCuboidWriter.java  |     6 +-
 .../hbase/steps/HBaseMROutput2Transition.java   |    19 +-
 .../kylin/storage/hbase/steps/HBaseMRSteps.java |    36 +-
 .../hbase/steps/RangeKeyDistributionJob.java    |     6 +-
 .../steps/RangeKeyDistributionReducer.java      |    10 +-
 .../hbase/steps/SequenceFileCuboidWriter.java   |     4 +-
 .../storage/hbase/util/CubeMigrationCLI.java    |    59 +-
 .../hbase/util/CubeMigrationCheckCLI.java       |    19 +-
 .../hbase/util/DeployCoprocessorCLI.java        |    35 +-
 .../hbase/util/ExtendCubeToHybridCLI.java       |    24 +-
 .../hbase/util/GridTableHBaseBenchmark.java     |     6 +-
 .../kylin/storage/hbase/util/HBaseClean.java    |     6 +-
 .../storage/hbase/util/HbaseStreamingInput.java |     3 +-
 .../hbase/util/HtableAlterMetadataCLI.java      |     9 +-
 .../storage/hbase/util/OrphanHBaseCleanJob.java |     6 +-
 .../kylin/storage/hbase/util/PingHBaseCLI.java  |     6 +-
 .../storage/hbase/util/PrintHBaseConfig.java    |     8 +-
 .../kylin/storage/hbase/util/Results.java       |     6 +-
 .../kylin/storage/hbase/util/RowCounterCLI.java |     3 +-
 .../storage/hbase/util/StorageCleanupJob.java   |    24 +-
 .../kylin/storage/hbase/util/TarGZUtil.java     |     3 +-
 .../hbase/util/ZookeeperDistributedLock.java    |    15 +-
 .../storage/hbase/util/ZookeeperJobLock.java    |     8 +-
 .../kylin/storage/hbase/util/ZookeeperUtil.java |    15 +-
 .../kylin/storage/hdfs/HDFSResourceStore.java   |    14 +-
 .../apache/kylin/storage/hdfs/LockManager.java  |     1 +
 .../apache/kylin/storage/hdfs/ResourceLock.java |     9 +-
 .../hbase/common/FuzzyValueCombinationTest.java |     6 +-
 .../hbase/common/HiveJDBCClientTest.java        |     3 +-
 .../TimeConditionLiteralsReplacerTest.java      |     3 +-
 .../common/coprocessor/FilterBaseTest.java      |     6 +-
 .../cube/MeasureTypeOnlyAggrInBaseTest.java     |     6 +-
 .../hbase/steps/CubeHFileMapper2Test.java       |     6 +-
 .../steps/RangeKeyDistributionMapperTest.java   |    24 +-
 .../hbase/steps/RowValueDecoderTest.java        |     6 +-
 .../hbase/steps/SandboxMetastoreCLI.java        |     3 +-
 .../kylin/tool/AbstractInfoExtractor.java       |    18 +-
 .../apache/kylin/tool/AclTableMigrationCLI.java |     3 +-
 .../apache/kylin/tool/CubeMetaExtractor.java    |    66 +-
 .../org/apache/kylin/tool/CubeMetaIngester.java |    31 +-
 .../org/apache/kylin/tool/CubeMigrationCLI.java |    53 +-
 .../kylin/tool/CubeMigrationCheckCLI.java       |    16 +-
 .../org/apache/kylin/tool/DiagnosisInfoCLI.java |    61 +-
 .../kylin/tool/ExtendCubeToHybridCLI.java       |    21 +-
 .../apache/kylin/tool/HBaseUsageExtractor.java  |    14 +-
 .../apache/kylin/tool/JobDiagnosisInfoCLI.java  |    45 +-
 .../apache/kylin/tool/JobInstanceExtractor.java |    23 +-
 .../apache/kylin/tool/KylinLogExtractor.java    |     7 +-
 .../apache/kylin/tool/MetadataCleanupJob.java   |    23 +-
 .../apache/kylin/tool/MrJobInfoExtractor.java   |    30 +-
 .../org/apache/kylin/tool/util/ToolUtil.java    |    11 +-
 .../apache/kylin/tool/CubeMetaIngesterTest.java |    14 +-
 .../apache/kylin/tool/HybridCubeCLITest.java    |    18 +-
 .../apache/kylin/tool/KylinConfigCLITest.java   |     4 +-
 694 files changed, 11076 insertions(+), 13764 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/assembly/src/test/java/org/apache/kylin/RealizationRegistryTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/RealizationRegistryTest.java b/assembly/src/test/java/org/apache/kylin/RealizationRegistryTest.java
index 57ba53f..11723c8 100644
--- a/assembly/src/test/java/org/apache/kylin/RealizationRegistryTest.java
+++ b/assembly/src/test/java/org/apache/kylin/RealizationRegistryTest.java
@@ -18,10 +18,6 @@
 
 package org.apache.kylin;
 
-import static org.junit.Assert.assertEquals;
-
-import java.util.Set;
-
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.metadata.realization.RealizationRegistry;
@@ -30,6 +26,10 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.util.Set;
+
+import static org.junit.Assert.assertEquals;
+
 /**
  */
 public class RealizationRegistryTest extends LocalFileMetadataTestCase {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/assembly/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java b/assembly/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java
index cfb61ff..dac6aa9 100644
--- a/assembly/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java
+++ b/assembly/src/test/java/org/apache/kylin/job/DeployLocalMetaToRemoteTest.java
@@ -46,8 +46,7 @@ public class DeployLocalMetaToRemoteTest {
         ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
         System.setProperty(KylinConfig.KYLIN_CONF, HBaseMetadataTestCase.SANDBOX_TEST_DATA);
         if (StringUtils.isEmpty(System.getProperty("hdp.version"))) {
-            throw new RuntimeException(
-                    "No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
+            throw new RuntimeException("No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
index 61a4d33..077c056 100644
--- a/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
+++ b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
@@ -153,11 +153,9 @@ public class DeployUtil {
         deployHiveTables();
     }
 
-    public static void prepareTestDataForStreamingCube(long startTime, long endTime, int numberOfRecords,
-            String cubeName, StreamDataLoader streamDataLoader) throws IOException {
+    public static void prepareTestDataForStreamingCube(long startTime, long endTime, int numberOfRecords, String cubeName, StreamDataLoader streamDataLoader) throws IOException {
         CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
-        List<String> data = StreamingTableDataGenerator.generate(numberOfRecords, startTime, endTime,
-                cubeInstance.getRootFactTable());
+        List<String> data = StreamingTableDataGenerator.generate(numberOfRecords, startTime, endTime, cubeInstance.getRootFactTable());
         //load into kafka
         streamDataLoader.loadIntoKafka(data);
         logger.info("Write {} messages into {}", data.size(), streamDataLoader.toString());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java b/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java
index 5a1f6a1..fae81ce 100644
--- a/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java
+++ b/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java
@@ -52,20 +52,18 @@ public class Kafka10DataLoader extends StreamDataLoader {
     public void loadIntoKafka(List<String> messages) {
 
         KafkaClusterConfig clusterConfig = kafkaClusterConfigs.get(0);
-        String brokerList = StringUtils
-                .join(Collections2.transform(clusterConfig.getBrokerConfigs(), new Function<BrokerConfig, String>() {
-                    @Nullable
-                    @Override
-                    public String apply(BrokerConfig brokerConfig) {
-                        return brokerConfig.getHost() + ":" + brokerConfig.getPort();
-                    }
-                }), ",");
+        String brokerList = StringUtils.join(Collections2.transform(clusterConfig.getBrokerConfigs(), new Function<BrokerConfig, String>() {
+            @Nullable
+            @Override
+            public String apply(BrokerConfig brokerConfig) {
+                return brokerConfig.getHost() + ":" + brokerConfig.getPort();
+            }
+        }), ",");
 
         KafkaProducer producer = getKafkaProducer(brokerList, null);
 
         for (int i = 0; i < messages.size(); i++) {
-            ProducerRecord<String, String> keyedMessage = new ProducerRecord<String, String>(clusterConfig.getTopic(),
-                    String.valueOf(i), messages.get(i));
+            ProducerRecord<String, String> keyedMessage = new ProducerRecord<String, String>(clusterConfig.getTopic(), String.valueOf(i), messages.get(i));
             producer.send(keyedMessage);
         }
         logger.info("sent " + messages.size() + " messages to " + this.toString());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/assembly/src/test/java/org/apache/kylin/job/streaming/StreamDataLoader.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/streaming/StreamDataLoader.java b/assembly/src/test/java/org/apache/kylin/job/streaming/StreamDataLoader.java
index 52658e2..2f7d54d 100644
--- a/assembly/src/test/java/org/apache/kylin/job/streaming/StreamDataLoader.java
+++ b/assembly/src/test/java/org/apache/kylin/job/streaming/StreamDataLoader.java
@@ -18,15 +18,14 @@
 
 package org.apache.kylin.job.streaming;
 
-import java.util.List;
-
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 
+import java.util.List;
+
 /**
  */
 public abstract class StreamDataLoader {
     protected KafkaConfig kafkaConfig;
-
     public StreamDataLoader(KafkaConfig kafkaConfig) {
         this.kafkaConfig = kafkaConfig;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableWindowBridge.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableWindowBridge.java b/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableWindowBridge.java
index 8e2cb62..13a33e3 100644
--- a/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableWindowBridge.java
+++ b/atopcalcite/src/main/java/org/apache/calcite/adapter/enumerable/EnumerableWindowBridge.java
@@ -18,8 +18,6 @@
 
 package org.apache.calcite.adapter.enumerable;
 
-import java.util.List;
-
 import org.apache.calcite.plan.RelOptCluster;
 import org.apache.calcite.plan.RelTraitSet;
 import org.apache.calcite.rel.RelNode;
@@ -27,13 +25,15 @@ import org.apache.calcite.rel.core.Window;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rex.RexLiteral;
 
+import java.util.List;
+
 /**
  * EnumerableWindow cant'be created out of package, here's hack of workaround
  */
 public class EnumerableWindowBridge {
 
     public static EnumerableWindow createEnumerableWindow(RelOptCluster cluster, RelTraitSet traits, RelNode child,
-            List<RexLiteral> constants, RelDataType rowType, List<Window.Group> groups) {
+                                                   List<RexLiteral> constants, RelDataType rowType, List<Window.Group> groups) {
         return new EnumerableWindow(cluster, traits, child, constants, rowType, groups);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/atopcalcite/src/main/java/org/apache/calcite/rel/rules/OLAPJoinPushThroughJoinRule.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/rel/rules/OLAPJoinPushThroughJoinRule.java b/atopcalcite/src/main/java/org/apache/calcite/rel/rules/OLAPJoinPushThroughJoinRule.java
index c3cad47..35f2ae6 100644
--- a/atopcalcite/src/main/java/org/apache/calcite/rel/rules/OLAPJoinPushThroughJoinRule.java
+++ b/atopcalcite/src/main/java/org/apache/calcite/rel/rules/OLAPJoinPushThroughJoinRule.java
@@ -52,11 +52,9 @@ public class OLAPJoinPushThroughJoinRule extends RelOptRule {
      * Instance of the rule that works on logical joins only, and pushes to the
      * right.
      */
-    public static final RelOptRule INSTANCE = new OLAPJoinPushThroughJoinRule("OLAPJoinPushThroughJoinRule",
-            LogicalJoin.class, RelFactories.LOGICAL_BUILDER);
+    public static final RelOptRule INSTANCE = new OLAPJoinPushThroughJoinRule("OLAPJoinPushThroughJoinRule", LogicalJoin.class, RelFactories.LOGICAL_BUILDER);
 
-    public OLAPJoinPushThroughJoinRule(String description, Class<? extends Join> clazz,
-            RelBuilderFactory relBuilderFactory) {
+    public OLAPJoinPushThroughJoinRule(String description, Class<? extends Join> clazz, RelBuilderFactory relBuilderFactory) {
         super(operand(clazz,
 
                 operand(clazz, operand(RelNode.class, any()), operand(RelNode.class, null, new Predicate<RelNode>() {
@@ -132,27 +130,23 @@ public class OLAPJoinPushThroughJoinRule extends RelOptRule {
         //            .createShiftMapping(aCount + bCount + cCount, 0, 0, aCount, aCount, aCount + bCount,
         //                cCount);
 
-        final Mappings.TargetMapping bottomMapping = Mappings.createShiftMapping(aCount + bCount + cCount, 0, 0, aCount,
-                aCount + cCount, aCount, bCount, aCount, aCount + bCount, cCount);
+        final Mappings.TargetMapping bottomMapping = Mappings.createShiftMapping(aCount + bCount + cCount, 0, 0, aCount, aCount + cCount, aCount, bCount, aCount, aCount + bCount, cCount);
         final List<RexNode> newBottomList = new ArrayList<>();
         new RexPermuteInputsShuttle(bottomMapping, relA, relC).visitList(nonIntersecting, newBottomList);
         new RexPermuteInputsShuttle(bottomMapping, relA, relC).visitList(bottomNonIntersecting, newBottomList);
         final RexBuilder rexBuilder = cluster.getRexBuilder();
         RexNode newBottomCondition = RexUtil.composeConjunction(rexBuilder, newBottomList, false);
-        final Join newBottomJoin = bottomJoin.copy(bottomJoin.getTraitSet(), newBottomCondition, relA, relC,
-                bottomJoin.getJoinType(), bottomJoin.isSemiJoinDone());
+        final Join newBottomJoin = bottomJoin.copy(bottomJoin.getTraitSet(), newBottomCondition, relA, relC, bottomJoin.getJoinType(), bottomJoin.isSemiJoinDone());
 
         // target: | A       | C      | B |
         // source: | A       | B | C      |
-        final Mappings.TargetMapping topMapping = Mappings.createShiftMapping(aCount + bCount + cCount, 0, 0, aCount,
-                aCount + cCount, aCount, bCount, aCount, aCount + bCount, cCount);
+        final Mappings.TargetMapping topMapping = Mappings.createShiftMapping(aCount + bCount + cCount, 0, 0, aCount, aCount + cCount, aCount, bCount, aCount, aCount + bCount, cCount);
         final List<RexNode> newTopList = new ArrayList<>();
         new RexPermuteInputsShuttle(topMapping, newBottomJoin, relB).visitList(intersecting, newTopList);
         new RexPermuteInputsShuttle(topMapping, newBottomJoin, relB).visitList(bottomIntersecting, newTopList);
         RexNode newTopCondition = RexUtil.composeConjunction(rexBuilder, newTopList, false);
         @SuppressWarnings("SuspiciousNameCombination")
-        final Join newTopJoin = topJoin.copy(topJoin.getTraitSet(), newTopCondition, newBottomJoin, relB,
-                topJoin.getJoinType(), topJoin.isSemiJoinDone());
+        final Join newTopJoin = topJoin.copy(topJoin.getTraitSet(), newTopCondition, newBottomJoin, relB, topJoin.getJoinType(), topJoin.isSemiJoinDone());
 
         assert !Mappings.isIdentity(topMapping);
         final RelBuilder relBuilder = call.builder();
@@ -165,8 +159,7 @@ public class OLAPJoinPushThroughJoinRule extends RelOptRule {
      * Splits a condition into conjunctions that do or do not intersect with
      * a given bit set.
      */
-    static void split(RexNode condition, ImmutableBitSet bitSet, List<RexNode> intersecting,
-            List<RexNode> nonIntersecting) {
+    static void split(RexNode condition, ImmutableBitSet bitSet, List<RexNode> intersecting, List<RexNode> nonIntersecting) {
         for (RexNode node : RelOptUtil.conjunctions(condition)) {
             ImmutableBitSet inputBitSet = RelOptUtil.InputFinder.bits(node);
             if (bitSet.intersects(inputBitSet)) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/atopcalcite/src/main/java/org/apache/calcite/rel/rules/OLAPJoinPushThroughJoinRule2.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/rel/rules/OLAPJoinPushThroughJoinRule2.java b/atopcalcite/src/main/java/org/apache/calcite/rel/rules/OLAPJoinPushThroughJoinRule2.java
index ad73216..a769cbd 100644
--- a/atopcalcite/src/main/java/org/apache/calcite/rel/rules/OLAPJoinPushThroughJoinRule2.java
+++ b/atopcalcite/src/main/java/org/apache/calcite/rel/rules/OLAPJoinPushThroughJoinRule2.java
@@ -61,11 +61,9 @@ public class OLAPJoinPushThroughJoinRule2 extends RelOptRule {
      * Instance of the rule that works on logical joins only, and pushes to the
      * right.
      */
-    public static final RelOptRule INSTANCE = new OLAPJoinPushThroughJoinRule2("OLAPJoinPushThroughJoinRule2",
-            LogicalJoin.class, RelFactories.LOGICAL_BUILDER);
+    public static final RelOptRule INSTANCE = new OLAPJoinPushThroughJoinRule2("OLAPJoinPushThroughJoinRule2", LogicalJoin.class, RelFactories.LOGICAL_BUILDER);
 
-    public OLAPJoinPushThroughJoinRule2(String description, Class<? extends Join> clazz,
-            RelBuilderFactory relBuilderFactory) {
+    public OLAPJoinPushThroughJoinRule2(String description, Class<? extends Join> clazz, RelBuilderFactory relBuilderFactory) {
         super(operand(clazz,
 
                 operand(Project.class, //project is added on top by OLAPJoinPushThroughJoinRule
@@ -118,8 +116,7 @@ public class OLAPJoinPushThroughJoinRule2 extends RelOptRule {
         final ImmutableBitSet bBitSetBelowProject = ImmutableBitSet.range(aCount, aCount + bCount);
         final ImmutableBitSet bBitSetAboveProject = Mappings.apply(inverseProjectPermu, bBitSetBelowProject);
 
-        final Mapping extendedProjectPerm = createAbstractTargetMapping(
-                Mappings.append(projectPermu, Mappings.createIdentity(cCount)));
+        final Mapping extendedProjectPerm = createAbstractTargetMapping(Mappings.append(projectPermu, Mappings.createIdentity(cCount)));
 
         // becomes
         //
@@ -157,29 +154,23 @@ public class OLAPJoinPushThroughJoinRule2 extends RelOptRule {
 
         // target: | A       | C      |
         // source: | A       | B | C      |
-        final Mappings.TargetMapping tempMapping = Mappings.createShiftMapping(aCount + bCount + cCount, 0, 0, aCount,
-                aCount + cCount, aCount, bCount, aCount, aCount + bCount, cCount);
-        final Mappings.TargetMapping thruProjectMapping = Mappings.multiply(extendedProjectPerm,
-                createAbstractTargetMapping(tempMapping));
+        final Mappings.TargetMapping tempMapping = Mappings.createShiftMapping(aCount + bCount + cCount, 0, 0, aCount, aCount + cCount, aCount, bCount, aCount, aCount + bCount, cCount);
+        final Mappings.TargetMapping thruProjectMapping = Mappings.multiply(extendedProjectPerm, createAbstractTargetMapping(tempMapping));
         final List<RexNode> newBottomList = new ArrayList<>();
         new RexPermuteInputsShuttle(thruProjectMapping, relA, relC).visitList(nonIntersecting, newBottomList);
         final RexBuilder rexBuilder = cluster.getRexBuilder();
         RexNode newBottomCondition = RexUtil.composeConjunction(rexBuilder, newBottomList, false);
-        final Join newBottomJoin = bottomJoin.copy(bottomJoin.getTraitSet(), newBottomCondition, relA, relC,
-                bottomJoin.getJoinType(), bottomJoin.isSemiJoinDone());
+        final Join newBottomJoin = bottomJoin.copy(bottomJoin.getTraitSet(), newBottomCondition, relA, relC, bottomJoin.getJoinType(), bottomJoin.isSemiJoinDone());
 
         // target: | A       | C      | B |
         // source: | A       | B | C      |
-        final Mappings.TargetMapping nonThruProjectMapping = Mappings.createShiftMapping(aCount + bCount + cCount, 0, 0,
-                aCount, aCount + cCount, aCount, bCount, aCount, aCount + bCount, cCount);
+        final Mappings.TargetMapping nonThruProjectMapping = Mappings.createShiftMapping(aCount + bCount + cCount, 0, 0, aCount, aCount + cCount, aCount, bCount, aCount, aCount + bCount, cCount);
         final List<RexNode> newTopList = new ArrayList<>();
         new RexPermuteInputsShuttle(thruProjectMapping, newBottomJoin, relB).visitList(intersecting, newTopList);
-        new RexPermuteInputsShuttle(nonThruProjectMapping, newBottomJoin, relB).visitList(bottomIntersecting,
-                newTopList);
+        new RexPermuteInputsShuttle(nonThruProjectMapping, newBottomJoin, relB).visitList(bottomIntersecting, newTopList);
         RexNode newTopCondition = RexUtil.composeConjunction(rexBuilder, newTopList, false);
         @SuppressWarnings("SuspiciousNameCombination")
-        final Join newTopJoin = topJoin.copy(topJoin.getTraitSet(), newTopCondition, newBottomJoin, relB,
-                topJoin.getJoinType(), topJoin.isSemiJoinDone());
+        final Join newTopJoin = topJoin.copy(topJoin.getTraitSet(), newTopCondition, newBottomJoin, relB, topJoin.getJoinType(), topJoin.isSemiJoinDone());
 
         assert !Mappings.isIdentity(thruProjectMapping);
         final RelBuilder relBuilder = call.builder();
@@ -201,8 +192,7 @@ public class OLAPJoinPushThroughJoinRule2 extends RelOptRule {
      * Splits a condition into conjunctions that do or do not intersect with
      * a given bit set.
      */
-    static void split(RexNode condition, ImmutableBitSet bitSet, List<RexNode> intersecting,
-            List<RexNode> nonIntersecting) {
+    static void split(RexNode condition, ImmutableBitSet bitSet, List<RexNode> intersecting, List<RexNode> nonIntersecting) {
         for (RexNode node : RelOptUtil.conjunctions(condition)) {
             ImmutableBitSet inputBitSet = RelOptUtil.InputFinder.bits(node);
             if (bitSet.intersects(inputBitSet)) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/atopcalcite/src/main/java/org/apache/calcite/sql2rel/RelFieldTrimmer.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/sql2rel/RelFieldTrimmer.java b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/RelFieldTrimmer.java
index f5cfa85..f88157c 100644
--- a/atopcalcite/src/main/java/org/apache/calcite/sql2rel/RelFieldTrimmer.java
+++ b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/RelFieldTrimmer.java
@@ -34,10 +34,7 @@ public class RelFieldTrimmer {
     public RelFieldTrimmer(SqlValidator validator, RelBuilder relBuilder) {
     }
 
-    public RelFieldTrimmer(SqlValidator validator, RelOptCluster cluster, RelFactories.ProjectFactory projectFactory,
-            RelFactories.FilterFactory filterFactory, RelFactories.JoinFactory joinFactory,
-            RelFactories.SemiJoinFactory semiJoinFactory, RelFactories.SortFactory sortFactory,
-            RelFactories.AggregateFactory aggregateFactory, RelFactories.SetOpFactory setOpFactory) {
+    public RelFieldTrimmer(SqlValidator validator, RelOptCluster cluster, RelFactories.ProjectFactory projectFactory, RelFactories.FilterFactory filterFactory, RelFactories.JoinFactory joinFactory, RelFactories.SemiJoinFactory semiJoinFactory, RelFactories.SortFactory sortFactory, RelFactories.AggregateFactory aggregateFactory, RelFactories.SetOpFactory setOpFactory) {
     }
 
     public RelNode trim(RelNode rootRel) {


[67/67] [abbrv] kylin git commit: Merge commit '53aafa97e150d8ac9dd4dfc54d6b55b12a70240c'

Posted by li...@apache.org.
Merge commit '53aafa97e150d8ac9dd4dfc54d6b55b12a70240c'


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c38def7b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c38def7b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c38def7b

Branch: refs/heads/master
Commit: c38def7b53dae81f9fde0520b1fb270804dde728
Parents: 25a5367 53aafa9
Author: Hongbin Ma <ma...@apache.org>
Authored: Mon Jun 5 13:22:06 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Mon Jun 5 13:22:06 2017 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/job/JobInstance.java  | 16 +++++++++++++++
 .../kylin/job/execution/AbstractExecutable.java | 21 ++++++++++++++++----
 .../job/execution/DefaultChainedExecutable.java | 12 +++++++++--
 .../engine/mr/common/JobInfoConverter.java      |  3 ++-
 .../apache/kylin/tool/JobInstanceExtractor.java |  3 ++-
 webapp/app/partials/query/query.html            |  2 ++
 6 files changed, 49 insertions(+), 8 deletions(-)
----------------------------------------------------------------------



[39/67] [abbrv] kylin git commit: fix CI

Posted by li...@apache.org.
fix CI


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/dd371532
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/dd371532
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/dd371532

Branch: refs/heads/master
Commit: dd3715329416bac41e6017157ba2a6bd343ab22a
Parents: ce97f81
Author: Hongbin Ma <ma...@apache.org>
Authored: Sat May 27 20:49:26 2017 +0800
Committer: Roger Shi <ro...@gmail.com>
Committed: Sat May 27 21:11:03 2017 +0800

----------------------------------------------------------------------
 examples/test_case_data/sandbox/kylin.properties | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/dd371532/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index bab51e3..6a4f785 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -187,7 +187,7 @@ kylin.engine.spark-conf.spark.executor.extraJavaOptions=-Dhdp.version=current
 
 
 ### AD-HOC QUERY ###
-kylin.query.ad-hoc.runner.class-name=org.apache.kylin.rest.adhoc.AdHocRunnerJdbcImpl
+#kylin.query.ad-hoc.runner.class-name=org.apache.kylin.rest.adhoc.AdHocRunnerJdbcImpl
 
 kylin.query.ad-hoc.jdbc.url=jdbc:hive2://sandbox:10000/default
 kylin.query.ad-hoc.jdbc.driver=org.apache.hive.jdbc.HiveDriver


[11/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/optrule/AggregateMultipleExpandRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/optrule/AggregateMultipleExpandRule.java b/query/src/main/java/org/apache/kylin/query/optrule/AggregateMultipleExpandRule.java
index 582a434..cdd6004 100644
--- a/query/src/main/java/org/apache/kylin/query/optrule/AggregateMultipleExpandRule.java
+++ b/query/src/main/java/org/apache/kylin/query/optrule/AggregateMultipleExpandRule.java
@@ -18,12 +18,8 @@
 
 package org.apache.kylin.query.optrule;
 
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import javax.annotation.Nullable;
-
+import com.google.common.base.Predicate;
+import com.google.common.collect.ImmutableList;
 import org.apache.calcite.plan.RelOptRule;
 import org.apache.calcite.plan.RelOptRuleCall;
 import org.apache.calcite.plan.RelOptRuleOperand;
@@ -37,8 +33,10 @@ import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.tools.RelBuilder;
 import org.apache.calcite.util.ImmutableBitSet;
 
-import com.google.common.base.Predicate;
-import com.google.common.collect.ImmutableList;
+import javax.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
 
 /**
  * Supoort grouping query. Expand the non-simple aggregate to more than one simple aggregates.
@@ -85,8 +83,7 @@ public class AggregateMultipleExpandRule extends RelOptRule {
 
         for (ImmutableBitSet groupSet : aggr.getGroupSets()) {
             // push the simple aggregate with one group set
-            relBuilder.push(
-                    aggr.copy(aggr.getTraitSet(), input, false, groupSet, asList(groupSet), aggr.getAggCallList()));
+            relBuilder.push(aggr.copy(aggr.getTraitSet(), input, false, groupSet, asList(groupSet), aggr.getAggCallList()));
 
             ImmutableList.Builder<RexNode> rexNodes = new ImmutableList.Builder<>();
             int index = 0;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/optrule/AggregateProjectReduceRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/optrule/AggregateProjectReduceRule.java b/query/src/main/java/org/apache/kylin/query/optrule/AggregateProjectReduceRule.java
index a411ec2..f6ac61a 100644
--- a/query/src/main/java/org/apache/kylin/query/optrule/AggregateProjectReduceRule.java
+++ b/query/src/main/java/org/apache/kylin/query/optrule/AggregateProjectReduceRule.java
@@ -36,9 +36,9 @@ import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.tools.RelBuilder;
 import org.apache.calcite.tools.RelBuilderFactory;
 import org.apache.calcite.util.ImmutableBitSet;
-import org.apache.calcite.util.Pair;
 
 import com.google.common.collect.ImmutableList;
+import org.apache.calcite.util.Pair;
 
 /**
  * Reduce project under aggregate which has unused input ref.
@@ -55,8 +55,7 @@ public class AggregateProjectReduceRule extends RelOptRule {
         super(operand, factory, description);
     }
 
-    private void mappingKeys(int key, Pair<RexNode, String> project, List<Pair<RexNode, String>> projects,
-            Map<Integer, Integer> mapping) {
+    private void mappingKeys(int key, Pair<RexNode, String> project, List<Pair<RexNode, String>> projects, Map<Integer, Integer> mapping) {
         if (!projects.contains(project)) {
             projects.add(project);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/optrule/OLAPAggregateRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/optrule/OLAPAggregateRule.java b/query/src/main/java/org/apache/kylin/query/optrule/OLAPAggregateRule.java
index c76fb9b..da53152 100644
--- a/query/src/main/java/org/apache/kylin/query/optrule/OLAPAggregateRule.java
+++ b/query/src/main/java/org/apache/kylin/query/optrule/OLAPAggregateRule.java
@@ -52,8 +52,7 @@ public class OLAPAggregateRule extends ConverterRule {
 
         RelTraitSet traitSet = agg.getTraitSet().replace(OLAPRel.CONVENTION);
         try {
-            return new OLAPAggregateRel(agg.getCluster(), traitSet, convert(agg.getInput(), traitSet), agg.indicator,
-                    agg.getGroupSet(), agg.getGroupSets(), agg.getAggCallList());
+            return new OLAPAggregateRel(agg.getCluster(), traitSet, convert(agg.getInput(), traitSet), agg.indicator, agg.getGroupSet(), agg.getGroupSets(), agg.getAggCallList());
         } catch (InvalidRelException e) {
             throw new IllegalStateException("Can't create OLAPAggregateRel!", e);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/optrule/OLAPFilterRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/optrule/OLAPFilterRule.java b/query/src/main/java/org/apache/kylin/query/optrule/OLAPFilterRule.java
index 3f07f48..33f3ea8 100644
--- a/query/src/main/java/org/apache/kylin/query/optrule/OLAPFilterRule.java
+++ b/query/src/main/java/org/apache/kylin/query/optrule/OLAPFilterRule.java
@@ -42,8 +42,7 @@ public class OLAPFilterRule extends RelOptRule {
         RelTraitSet origTraitSet = filter.getTraitSet();
         RelTraitSet traitSet = origTraitSet.replace(OLAPRel.CONVENTION).simplify();
 
-        OLAPFilterRel olapFilter = new OLAPFilterRel(filter.getCluster(), traitSet,
-                convert(filter.getInput(), traitSet), filter.getCondition());
+        OLAPFilterRel olapFilter = new OLAPFilterRel(filter.getCluster(), traitSet, convert(filter.getInput(), traitSet), filter.getCondition());
         call.transformTo(olapFilter);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/optrule/OLAPJoinRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/optrule/OLAPJoinRule.java b/query/src/main/java/org/apache/kylin/query/optrule/OLAPJoinRule.java
index a006eb0..c31d1d0 100644
--- a/query/src/main/java/org/apache/kylin/query/optrule/OLAPJoinRule.java
+++ b/query/src/main/java/org/apache/kylin/query/optrule/OLAPJoinRule.java
@@ -71,8 +71,7 @@ public class OLAPJoinRule extends ConverterRule {
             // return null;
         }
         if (!info.isEqui()) {
-            newRel = new OLAPFilterRel(cluster, newRel.getTraitSet(), newRel,
-                    info.getRemaining(cluster.getRexBuilder()));
+            newRel = new OLAPFilterRel(cluster, newRel.getTraitSet(), newRel, info.getRemaining(cluster.getRexBuilder()));
         }
         return newRel;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/optrule/OLAPSortRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/optrule/OLAPSortRule.java b/query/src/main/java/org/apache/kylin/query/optrule/OLAPSortRule.java
index f85b190..c020d63 100644
--- a/query/src/main/java/org/apache/kylin/query/optrule/OLAPSortRule.java
+++ b/query/src/main/java/org/apache/kylin/query/optrule/OLAPSortRule.java
@@ -44,9 +44,7 @@ public class OLAPSortRule extends ConverterRule {
         }
         final RelTraitSet traitSet = sort.getTraitSet().replace(OLAPRel.CONVENTION);
         final RelNode input = sort.getInput();
-        return new OLAPSortRel(rel.getCluster(), traitSet,
-                convert(input, input.getTraitSet().replace(OLAPRel.CONVENTION)), sort.getCollation(), sort.offset,
-                sort.fetch);
+        return new OLAPSortRel(rel.getCluster(), traitSet, convert(input, input.getTraitSet().replace(OLAPRel.CONVENTION)), sort.getCollation(), sort.offset, sort.fetch);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/optrule/OLAPUnionRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/optrule/OLAPUnionRule.java b/query/src/main/java/org/apache/kylin/query/optrule/OLAPUnionRule.java
index 5e2bf15..e3bc7a9 100644
--- a/query/src/main/java/org/apache/kylin/query/optrule/OLAPUnionRule.java
+++ b/query/src/main/java/org/apache/kylin/query/optrule/OLAPUnionRule.java
@@ -18,8 +18,6 @@
 
 package org.apache.kylin.query.optrule;
 
-import java.util.List;
-
 import org.apache.calcite.plan.Convention;
 import org.apache.calcite.plan.RelTraitSet;
 import org.apache.calcite.rel.RelNode;
@@ -28,6 +26,8 @@ import org.apache.calcite.rel.core.Union;
 import org.apache.kylin.query.relnode.OLAPRel;
 import org.apache.kylin.query.relnode.OLAPUnionRel;
 
+import java.util.List;
+
 /**
  */
 public class OLAPUnionRule extends ConverterRule {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/optrule/OLAPWindowRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/optrule/OLAPWindowRule.java b/query/src/main/java/org/apache/kylin/query/optrule/OLAPWindowRule.java
index ef9de56..74d1b10 100644
--- a/query/src/main/java/org/apache/kylin/query/optrule/OLAPWindowRule.java
+++ b/query/src/main/java/org/apache/kylin/query/optrule/OLAPWindowRule.java
@@ -41,7 +41,6 @@ public class OLAPWindowRule extends ConverterRule {
         final Window window = (Window) rel;
         final RelTraitSet traitSet = window.getTraitSet().replace(OLAPRel.CONVENTION);
         final RelNode input = window.getInput();
-        return new OLAPWindowRel(rel.getCluster(), traitSet, convert(input, OLAPRel.CONVENTION), window.constants,
-                window.getRowType(), window.groups);
+        return new OLAPWindowRel(rel.getCluster(), traitSet, convert(input, OLAPRel.CONVENTION), window.constants, window.getRowType(), window.groups);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPAggregateRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPAggregateRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPAggregateRel.java
index 2485915..adb145a 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPAggregateRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPAggregateRel.java
@@ -91,8 +91,7 @@ public class OLAPAggregateRel extends Aggregate implements OLAPRel {
         Map<String, Class<?>> udafs = MeasureTypeFactory.getUDAFs();
         for (String func : udafs.keySet()) {
             try {
-                AGGR_FUNC_PARAM_AS_MEASTURE_MAP.put(func,
-                        ((ParamAsMeasureCount) (udafs.get(func).newInstance())).getParamAsMeasureCount());
+                AGGR_FUNC_PARAM_AS_MEASTURE_MAP.put(func, ((ParamAsMeasureCount) (udafs.get(func).newInstance())).getParamAsMeasureCount());
             } catch (Exception e) {
                 throw new RuntimeException(e);
             }
@@ -123,9 +122,7 @@ public class OLAPAggregateRel extends Aggregate implements OLAPRel {
     private List<TblColRef> groups;
     private List<FunctionDesc> aggregations;
 
-    public OLAPAggregateRel(RelOptCluster cluster, RelTraitSet traits, RelNode child, boolean indicator,
-            ImmutableBitSet groupSet, List<ImmutableBitSet> groupSets, List<AggregateCall> aggCalls)
-            throws InvalidRelException {
+    public OLAPAggregateRel(RelOptCluster cluster, RelTraitSet traits, RelNode child, boolean indicator, ImmutableBitSet groupSet, List<ImmutableBitSet> groupSets, List<AggregateCall> aggCalls) throws InvalidRelException {
         super(cluster, traits, child, indicator, groupSet, groupSets, aggCalls);
         Preconditions.checkArgument(getConvention() == OLAPRel.CONVENTION);
         this.afterAggregate = false;
@@ -134,8 +131,7 @@ public class OLAPAggregateRel extends Aggregate implements OLAPRel {
     }
 
     @Override
-    public Aggregate copy(RelTraitSet traitSet, RelNode input, boolean indicator, ImmutableBitSet groupSet,
-            List<ImmutableBitSet> groupSets, List<AggregateCall> aggCalls) {
+    public Aggregate copy(RelTraitSet traitSet, RelNode input, boolean indicator, ImmutableBitSet groupSet, List<ImmutableBitSet> groupSets, List<AggregateCall> aggCalls) {
         try {
             return new OLAPAggregateRel(getCluster(), traitSet, input, indicator, groupSet, groupSets, aggCalls);
         } catch (InvalidRelException e) {
@@ -153,8 +149,7 @@ public class OLAPAggregateRel extends Aggregate implements OLAPRel {
         if (getGroupType() == Group.SIMPLE) {
             cost = super.computeSelfCost(planner, mq).multiplyBy(.05);
         } else {
-            cost = super.computeSelfCost(planner, mq).multiplyBy(.05).plus(planner.getCost(getInput(), mq))
-                    .multiplyBy(groupSets.size() * 1.5);
+            cost = super.computeSelfCost(planner, mq).multiplyBy(.05).plus(planner.getCost(getInput(), mq)).multiplyBy(groupSets.size() * 1.5);
         }
         return cost;
     }
@@ -172,8 +167,7 @@ public class OLAPAggregateRel extends Aggregate implements OLAPRel {
         if (!this.afterAggregate) {
             addToContextGroupBy(this.groups);
             this.context.aggregations.addAll(this.aggregations);
-            this.context.aggrOutCols
-                    .addAll(columnRowType.getAllColumns().subList(groups.size(), columnRowType.getAllColumns().size()));
+            this.context.aggrOutCols.addAll(columnRowType.getAllColumns().subList(groups.size(), columnRowType.getAllColumns().size()));
             this.context.afterAggregate = true;
 
             if (this.context.afterLimit) {
@@ -221,8 +215,7 @@ public class OLAPAggregateRel extends Aggregate implements OLAPRel {
             } else {
                 AggregateCall aggCall = this.rewriteAggCalls.get(i);
                 int index = aggCall.getArgList().get(0);
-                aggOutName = getSqlFuncName(aggCall) + "_"
-                        + inputColumnRowType.getColumnByIndex(index).getIdentity().replace('.', '_') + "_";
+                aggOutName = getSqlFuncName(aggCall) + "_" + inputColumnRowType.getColumnByIndex(index).getIdentity().replace('.', '_') + "_";
             }
             TblColRef aggOutCol = TblColRef.newInnerColumn(aggOutName, TblColRef.InnerDataTypeEnum.LITERAL);
             aggOutCol.getColumnDesc().setId("" + (i + 1)); // mark the index of aggregation
@@ -365,8 +358,7 @@ public class OLAPAggregateRel extends Aggregate implements OLAPRel {
 
             if (aggFunc.needRewriteField()) {
                 String rewriteFieldName = aggFunc.getRewriteFieldName();
-                RelDataType rewriteFieldType = OLAPTable.createSqlType(typeFactory, aggFunc.getRewriteFieldType(),
-                        true);
+                RelDataType rewriteFieldType = OLAPTable.createSqlType(typeFactory, aggFunc.getRewriteFieldType(), true);
                 this.context.rewriteFields.put(rewriteFieldName, rewriteFieldType);
 
                 TblColRef column = buildRewriteColumn(aggFunc);
@@ -460,8 +452,7 @@ public class OLAPAggregateRel extends Aggregate implements OLAPRel {
             argTypes.add(type);
             typeFamilies.add(Util.first(type.getSqlTypeName().getFamily(), SqlTypeFamily.ANY));
         }
-        return new SqlUserDefinedAggFunction(sqlIdentifier, ReturnTypes.explicit(returnType),
-                InferTypes.explicit(argTypes), OperandTypes.family(typeFamilies), aggFunction);
+        return new SqlUserDefinedAggFunction(sqlIdentifier, ReturnTypes.explicit(returnType), InferTypes.explicit(argTypes), OperandTypes.family(typeFamilies), aggFunction);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java
index f232813..31ed075 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPContext.java
@@ -175,7 +175,7 @@ public class OLAPContext {
                 return true;
             }
         }
-
+        
         return false;
     }
 
@@ -207,8 +207,7 @@ public class OLAPContext {
         * @realization the cube used in this query
         * @OLAPInsufficientException no rights exception
         */
-        public TupleFilter check(OLAPAuthentication olapAuthentication, Collection<TblColRef> columns,
-                IRealization realization) throws IllegalArgumentException;
+        public TupleFilter check(OLAPAuthentication olapAuthentication, Collection<TblColRef> columns, IRealization realization) throws IllegalArgumentException;
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
index 6609b63..8f86ae0 100755
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPFilterRel.java
@@ -206,7 +206,7 @@ public class OLAPFilterRel extends Filter implements OLAPRel {
 
             Preconditions.checkNotNull(left);
             Preconditions.checkNotNull(right);
-
+            
             switch (call.op.getKind()) {
             case PLUS:
                 return new ConstantTupleFilter(left.add(right).toString());
@@ -228,8 +228,7 @@ public class OLAPFilterRel extends Filter implements OLAPRel {
 
             ConstantTupleFilter constFilter = (ConstantTupleFilter) filter;
 
-            if (type.getFamily() == SqlTypeFamily.DATE || type.getFamily() == SqlTypeFamily.DATETIME
-                    || type.getFamily() == SqlTypeFamily.TIMESTAMP) {
+            if (type.getFamily() == SqlTypeFamily.DATE || type.getFamily() == SqlTypeFamily.DATETIME || type.getFamily() == SqlTypeFamily.TIMESTAMP) {
                 List<String> newValues = Lists.newArrayList();
                 for (Object v : constFilter.getValues()) {
                     if (v == null)
@@ -357,7 +356,7 @@ public class OLAPFilterRel extends Filter implements OLAPRel {
             translateFilter(context);
         } else {
             context.afterHavingClauseFilter = true;
-
+            
             TupleFilterVisitor visitor = new TupleFilterVisitor(this.columnRowType);
             TupleFilter havingFilter = this.condition.accept(visitor);
             if (context.havingFilter == null)
@@ -378,10 +377,10 @@ public class OLAPFilterRel extends Filter implements OLAPRel {
 
         TupleFilterVisitor visitor = new TupleFilterVisitor(this.columnRowType);
         TupleFilter filter = this.condition.accept(visitor);
-
+        
         // optimize the filter, the optimization has to be segment-irrelevant
         filter = new FilterOptimizeTransformer().transform(filter);
-
+        
         Set<TblColRef> filterColumns = Sets.newHashSet();
         TupleFilter.collectColumns(filter, filterColumns);
         for (TblColRef tblColRef : filterColumns) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPJoinRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPJoinRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPJoinRel.java
index 56449db..a27cf76 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPJoinRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPJoinRel.java
@@ -90,8 +90,7 @@ public class OLAPJoinRel extends EnumerableJoin implements OLAPRel {
         final JoinInfo joinInfo = JoinInfo.of(left, right, condition);
         assert joinInfo.isEqui();
         try {
-            return new OLAPJoinRel(getCluster(), traitSet, left, right, condition, joinInfo.leftKeys,
-                    joinInfo.rightKeys, variablesSet, joinType);
+            return new OLAPJoinRel(getCluster(), traitSet, left, right, condition, joinInfo.leftKeys, joinInfo.rightKeys, variablesSet, joinType);
         } catch (InvalidRelException e) {
             // Semantic error not possible. Must be a bug. Convert to internal error.
             throw new AssertionError(e);
@@ -179,8 +178,7 @@ public class OLAPJoinRel extends EnumerableJoin implements OLAPRel {
             JoinDesc join = buildJoin(condition);
 
             JoinRelType joinRelType = this.getJoinType();
-            String joinType = joinRelType == JoinRelType.INNER ? "INNER"
-                    : joinRelType == JoinRelType.LEFT ? "LEFT" : null;
+            String joinType = joinRelType == JoinRelType.INNER ? "INNER" : joinRelType == JoinRelType.LEFT ? "LEFT" : null;
             join.setType(joinType);
 
             this.context.joins.add(join);
@@ -211,8 +209,7 @@ public class OLAPJoinRel extends EnumerableJoin implements OLAPRel {
         columns.addAll(rightColumnRowType.getAllColumns());
 
         if (columns.size() != this.rowType.getFieldCount()) {
-            throw new IllegalStateException(
-                    "RowType=" + this.rowType.getFieldCount() + ", ColumnRowType=" + columns.size());
+            throw new IllegalStateException("RowType=" + this.rowType.getFieldCount() + ", ColumnRowType=" + columns.size());
         }
         return new ColumnRowType(columns);
     }
@@ -310,8 +307,7 @@ public class OLAPJoinRel extends EnumerableJoin implements OLAPRel {
 
         PhysType physType = PhysTypeImpl.of(implementor.getTypeFactory(), getRowType(), pref.preferArray());
         RelOptTable factTable = context.firstTableScan.getTable();
-        MethodCallExpression exprCall = Expressions.call(factTable.getExpression(OLAPTable.class), "executeOLAPQuery",
-                implementor.getRootExpression(), Expressions.constant(context.id));
+        MethodCallExpression exprCall = Expressions.call(factTable.getExpression(OLAPTable.class), "executeOLAPQuery", implementor.getRootExpression(), Expressions.constant(context.id));
         return implementor.result(physType, Blocks.toBlock(exprCall));
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
index 1388d10..2a248cc 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPLimitRel.java
@@ -66,8 +66,7 @@ public class OLAPLimitRel extends SingleRel implements OLAPRel {
 
     @Override
     public RelWriter explainTerms(RelWriter pw) {
-        return super.explainTerms(pw).itemIf("offset", localOffset, localOffset != null).itemIf("fetch", localFetch,
-                localFetch != null);
+        return super.explainTerms(pw).itemIf("offset", localOffset, localOffset != null).itemIf("fetch", localFetch, localFetch != null);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java
index 258aa37..e7b09a3 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPProjectRel.java
@@ -72,8 +72,7 @@ public class OLAPProjectRel extends Project implements OLAPRel {
     private boolean afterAggregate;
     private boolean isMerelyPermutation = false;//project additionally added by OLAPJoinPushThroughJoinRule
 
-    public OLAPProjectRel(RelOptCluster cluster, RelTraitSet traitSet, RelNode child, List<RexNode> exps,
-            RelDataType rowType) {
+    public OLAPProjectRel(RelOptCluster cluster, RelTraitSet traitSet, RelNode child, List<RexNode> exps, RelDataType rowType) {
         super(cluster, traitSet, child, exps, rowType);
         Preconditions.checkArgument(getConvention() == OLAPRel.CONVENTION);
         Preconditions.checkArgument(child.getConvention() == OLAPRel.CONVENTION);
@@ -103,8 +102,7 @@ public class OLAPProjectRel extends Project implements OLAPRel {
     @Override
     public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) {
         boolean hasRexOver = RexOver.containsOver(getProjects(), null);
-        return super.computeSelfCost(planner, mq).multiplyBy(.05)
-                .multiplyBy(getProjects().size() * (hasRexOver ? 50 : 1));
+        return super.computeSelfCost(planner, mq).multiplyBy(.05).multiplyBy(getProjects().size() * (hasRexOver ? 50 : 1));
     }
 
     @Override
@@ -148,8 +146,7 @@ public class OLAPProjectRel extends Project implements OLAPRel {
         return new ColumnRowType(columns, sourceColumns);
     }
 
-    private TblColRef translateRexNode(RexNode rexNode, ColumnRowType inputColumnRowType, String fieldName,
-            Set<TblColRef> sourceCollector) {
+    private TblColRef translateRexNode(RexNode rexNode, ColumnRowType inputColumnRowType, String fieldName, Set<TblColRef> sourceCollector) {
         TblColRef column = null;
         if (rexNode instanceof RexInputRef) {
             RexInputRef inputRef = (RexInputRef) rexNode;
@@ -166,15 +163,13 @@ public class OLAPProjectRel extends Project implements OLAPRel {
         return column;
     }
 
-    private TblColRef translateFirstRexInputRef(RexCall call, ColumnRowType inputColumnRowType, String fieldName,
-            Set<TblColRef> sourceCollector) {
+    private TblColRef translateFirstRexInputRef(RexCall call, ColumnRowType inputColumnRowType, String fieldName, Set<TblColRef> sourceCollector) {
         for (RexNode operand : call.getOperands()) {
             if (operand instanceof RexInputRef) {
                 return translateRexInputRef((RexInputRef) operand, inputColumnRowType, fieldName, sourceCollector);
             }
             if (operand instanceof RexCall) {
-                TblColRef r = translateFirstRexInputRef((RexCall) operand, inputColumnRowType, fieldName,
-                        sourceCollector);
+                TblColRef r = translateFirstRexInputRef((RexCall) operand, inputColumnRowType, fieldName, sourceCollector);
                 if (r != null)
                     return r;
             }
@@ -182,14 +177,12 @@ public class OLAPProjectRel extends Project implements OLAPRel {
         return null;
     }
 
-    private TblColRef translateRexInputRef(RexInputRef inputRef, ColumnRowType inputColumnRowType, String fieldName,
-            Set<TblColRef> sourceCollector) {
+    private TblColRef translateRexInputRef(RexInputRef inputRef, ColumnRowType inputColumnRowType, String fieldName, Set<TblColRef> sourceCollector) {
         int index = inputRef.getIndex();
         // check it for rewrite count
         if (index < inputColumnRowType.size()) {
             TblColRef column = inputColumnRowType.getColumnByIndex(index);
-            if (!column.isInnerColumn() && context.belongToContextTables(column) && !this.rewriting
-                    && !this.afterAggregate) {
+            if (!column.isInnerColumn() && context.belongToContextTables(column) && !this.rewriting && !this.afterAggregate) {
                 if (!isMerelyPermutation) {
                     context.allColumns.add(column);
                 }
@@ -197,8 +190,7 @@ public class OLAPProjectRel extends Project implements OLAPRel {
             }
             return column;
         } else {
-            throw new IllegalStateException("Can't find " + inputRef + " from child columnrowtype " + inputColumnRowType
-                    + " with fieldname " + fieldName);
+            throw new IllegalStateException("Can't find " + inputRef + " from child columnrowtype " + inputColumnRowType + " with fieldname " + fieldName);
         }
     }
 
@@ -211,8 +203,7 @@ public class OLAPProjectRel extends Project implements OLAPRel {
 
     }
 
-    private TblColRef translateRexCall(RexCall call, ColumnRowType inputColumnRowType, String fieldName,
-            Set<TblColRef> sourceCollector) {
+    private TblColRef translateRexCall(RexCall call, ColumnRowType inputColumnRowType, String fieldName, Set<TblColRef> sourceCollector) {
         SqlOperator operator = call.getOperator();
         if (operator == SqlStdOperatorTable.EXTRACT_DATE) {
             return translateFirstRexInputRef(call, inputColumnRowType, fieldName, sourceCollector);
@@ -275,8 +266,7 @@ public class OLAPProjectRel extends Project implements OLAPRel {
             return true;
         }
 
-        if (rexNode instanceof RexCall && SqlKind.CAST.equals(rexNode.getKind())
-                && ((RexCall) rexNode).getOperands().get(0) instanceof RexLiteral) {
+        if (rexNode instanceof RexCall && SqlKind.CAST.equals(rexNode.getKind()) && ((RexCall) rexNode).getOperands().get(0) instanceof RexLiteral) {
             return true;
         }
 
@@ -289,15 +279,13 @@ public class OLAPProjectRel extends Project implements OLAPRel {
             // merge project & filter
             OLAPFilterRel filter = (OLAPFilterRel) getInput();
             RelNode inputOfFilter = inputs.get(0).getInput(0);
-            RexProgram program = RexProgram.create(inputOfFilter.getRowType(), this.rewriteProjects,
-                    filter.getCondition(), this.rowType, getCluster().getRexBuilder());
+            RexProgram program = RexProgram.create(inputOfFilter.getRowType(), this.rewriteProjects, filter.getCondition(), this.rowType, getCluster().getRexBuilder());
             return new EnumerableCalc(getCluster(), getCluster().traitSetOf(EnumerableConvention.INSTANCE), //
                     inputOfFilter, program);
         } else {
             // keep project for table scan
             EnumerableRel input = sole(inputs);
-            RexProgram program = RexProgram.create(input.getRowType(), this.rewriteProjects, null, this.rowType,
-                    getCluster().getRexBuilder());
+            RexProgram program = RexProgram.create(input.getRowType(), this.rewriteProjects, null, this.rowType, getCluster().getRexBuilder());
             return new EnumerableCalc(getCluster(), getCluster().traitSetOf(EnumerableConvention.INSTANCE), //
                     input, program);
         }
@@ -315,8 +303,7 @@ public class OLAPProjectRel extends Project implements OLAPRel {
         this.rewriting = true;
 
         // project before join or is just after OLAPToEnumerableConverter
-        if (!RewriteImplementor.needRewrite(this.context) || (this.hasJoin && !this.afterJoin) || this.afterAggregate
-                || !(this.context.hasPrecalculatedFields())) {
+        if (!RewriteImplementor.needRewrite(this.context) || (this.hasJoin && !this.afterJoin) || this.afterAggregate || !(this.context.hasPrecalculatedFields())) {
             this.columnRowType = this.buildColumnRowType();
             return;
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPRel.java
index 4ed84c2..814b0fd 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPRel.java
@@ -215,9 +215,8 @@ public interface OLAPRel extends RelNode {
         }
 
         @Override
-        public EnumerableRel.Result visitChild(EnumerableRel parent, int ordinal, EnumerableRel child,
-                EnumerableRel.Prefer prefer) {
-
+        public EnumerableRel.Result visitChild(EnumerableRel parent, int ordinal, EnumerableRel child, EnumerableRel.Prefer prefer) {
+            
             if (calciteDebug) {
                 OLAPContext context;
                 if (child instanceof OLAPRel)

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPSortRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPSortRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPSortRel.java
index d24c91e..03ba9c5 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPSortRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPSortRel.java
@@ -47,16 +47,14 @@ public class OLAPSortRel extends Sort implements OLAPRel {
     private ColumnRowType columnRowType;
     private OLAPContext context;
 
-    public OLAPSortRel(RelOptCluster cluster, RelTraitSet traitSet, RelNode child, RelCollation collation,
-            RexNode offset, RexNode fetch) {
+    public OLAPSortRel(RelOptCluster cluster, RelTraitSet traitSet, RelNode child, RelCollation collation, RexNode offset, RexNode fetch) {
         super(cluster, traitSet, child, collation, offset, fetch);
         Preconditions.checkArgument(getConvention() == OLAPRel.CONVENTION);
         Preconditions.checkArgument(getConvention() == child.getConvention());
     }
 
     @Override
-    public OLAPSortRel copy(RelTraitSet traitSet, RelNode newInput, RelCollation newCollation, RexNode offset,
-            RexNode fetch) {
+    public OLAPSortRel copy(RelTraitSet traitSet, RelNode newInput, RelCollation newCollation, RexNode offset, RexNode fetch) {
         return new OLAPSortRel(getCluster(), traitSet, newInput, newCollation, offset, fetch);
     }
 
@@ -122,8 +120,7 @@ public class OLAPSortRel extends Sort implements OLAPRel {
 
     @Override
     public EnumerableRel implementEnumerable(List<EnumerableRel> inputs) {
-        return new EnumerableSort(getCluster(),
-                getCluster().traitSetOf(EnumerableConvention.INSTANCE).replace(collation), //
+        return new EnumerableSort(getCluster(), getCluster().traitSetOf(EnumerableConvention.INSTANCE).replace(collation), //
                 sole(inputs), collation, offset, fetch);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPTableScan.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPTableScan.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPTableScan.java
index ddb9ebf..b583291 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPTableScan.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPTableScan.java
@@ -210,8 +210,7 @@ public class OLAPTableScan extends TableScan implements OLAPRel, EnumerableRel {
         Preconditions.checkState(columnRowType == null, "OLAPTableScan MUST NOT be shared by more than one prent");
 
         // create context in case of non-join
-        if (implementor.getContext() == null || !(implementor.getParentNode() instanceof OLAPJoinRel)
-                || implementor.isNewOLAPContextRequired()) {
+        if (implementor.getContext() == null || !(implementor.getParentNode() instanceof OLAPJoinRel) || implementor.isNewOLAPContextRequired()) {
             implementor.allocateContext();
         }
 
@@ -289,8 +288,7 @@ public class OLAPTableScan extends TableScan implements OLAPRel, EnumerableRel {
         String execFunction = genExecFunc();
 
         PhysType physType = PhysTypeImpl.of(implementor.getTypeFactory(), this.rowType, pref.preferArray());
-        MethodCallExpression exprCall = Expressions.call(table.getExpression(OLAPTable.class), execFunction,
-                implementor.getRootExpression(), Expressions.constant(context.id));
+        MethodCallExpression exprCall = Expressions.call(table.getExpression(OLAPTable.class), execFunction, implementor.getRootExpression(), Expressions.constant(context.id));
         return implementor.result(physType, Blocks.toBlock(exprCall));
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPToEnumerableConverter.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPToEnumerableConverter.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPToEnumerableConverter.java
index 8104a87..7ac86b2 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPToEnumerableConverter.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPToEnumerableConverter.java
@@ -125,10 +125,8 @@ public class OLAPToEnumerableConverter extends ConverterImpl implements Enumerab
     private void doAccessControl(OLAPContext context) {
         String controllerCls = KylinConfig.getInstanceFromEnv().getQueryAccessController();
         if (null != controllerCls && !controllerCls.isEmpty()) {
-            OLAPContext.IAccessController accessController = (OLAPContext.IAccessController) ClassUtil
-                    .newInstance(controllerCls);
-            TupleFilter tupleFilter = accessController.check(context.olapAuthen, context.allColumns,
-                    context.realization);
+            OLAPContext.IAccessController accessController = (OLAPContext.IAccessController) ClassUtil.newInstance(controllerCls);
+            TupleFilter tupleFilter = accessController.check(context.olapAuthen, context.allColumns, context.realization);
             if (null != tupleFilter) {
                 context.filterColumns.addAll(collectColumns(tupleFilter));
                 context.allColumns.addAll(collectColumns(tupleFilter));
@@ -163,9 +161,7 @@ public class OLAPToEnumerableConverter extends ConverterImpl implements Enumerab
         PhysType physType = PhysTypeImpl.of(enumImplementor.getTypeFactory(), hiveRowType, pref.preferArray());
 
         RelOptTable factTable = context.firstTableScan.getTable();
-        Result result = enumImplementor.result(physType,
-                Blocks.toBlock(Expressions.call(factTable.getExpression(OLAPTable.class), "executeHiveQuery",
-                        enumImplementor.getRootExpression())));
+        Result result = enumImplementor.result(physType, Blocks.toBlock(Expressions.call(factTable.getExpression(OLAPTable.class), "executeHiveQuery", enumImplementor.getRootExpression())));
         return result;
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPUnionRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPUnionRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPUnionRel.java
index 1a2d9e2..e04ba6f 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPUnionRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPUnionRel.java
@@ -122,7 +122,7 @@ public class OLAPUnionRel extends Union implements OLAPRel {
     @Override
     public boolean hasSubQuery() {
         for (RelNode child : getInputs()) {
-            if (((OLAPRel) child).hasSubQuery()) {
+            if (((OLAPRel)child).hasSubQuery()) {
                 return true;
             }
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/relnode/OLAPWindowRel.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/relnode/OLAPWindowRel.java b/query/src/main/java/org/apache/kylin/query/relnode/OLAPWindowRel.java
index 79e5cad..c2ea4e2 100644
--- a/query/src/main/java/org/apache/kylin/query/relnode/OLAPWindowRel.java
+++ b/query/src/main/java/org/apache/kylin/query/relnode/OLAPWindowRel.java
@@ -21,9 +21,9 @@ package org.apache.kylin.query.relnode;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.calcite.adapter.enumerable.EnumerableWindowBridge;
 import org.apache.calcite.adapter.enumerable.EnumerableConvention;
 import org.apache.calcite.adapter.enumerable.EnumerableRel;
-import org.apache.calcite.adapter.enumerable.EnumerableWindowBridge;
 import org.apache.calcite.plan.RelOptCluster;
 import org.apache.calcite.plan.RelOptCost;
 import org.apache.calcite.plan.RelOptPlanner;
@@ -34,20 +34,19 @@ import org.apache.calcite.rel.RelWriter;
 import org.apache.calcite.rel.core.AggregateCall;
 import org.apache.calcite.rel.core.Window;
 import org.apache.calcite.rel.metadata.RelMetadataQuery;
+
+import com.google.common.base.Preconditions;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rex.RexLiteral;
 import org.apache.kylin.metadata.model.TblColRef;
 
-import com.google.common.base.Preconditions;
-
 /**
  */
 public class OLAPWindowRel extends Window implements OLAPRel {
     private ColumnRowType columnRowType;
     private OLAPContext context;
 
-    public OLAPWindowRel(RelOptCluster cluster, RelTraitSet traitSet, RelNode input, List<RexLiteral> constants,
-            RelDataType rowType, List<Group> groups) {
+    public OLAPWindowRel(RelOptCluster cluster, RelTraitSet traitSet, RelNode input, List<RexLiteral> constants, RelDataType rowType, List<Group> groups) {
         super(cluster, traitSet, input, constants, rowType, groups);
         Preconditions.checkArgument(getConvention() == CONVENTION);
         Preconditions.checkArgument(getConvention() == input.getConvention());
@@ -91,8 +90,7 @@ public class OLAPWindowRel extends Window implements OLAPRel {
         // add window aggregate calls column
         for (Group group : groups) {
             for (AggregateCall aggrCall : group.getAggregateCalls(this)) {
-                TblColRef aggrCallCol = TblColRef.newInnerColumn(aggrCall.getName(),
-                        TblColRef.InnerDataTypeEnum.LITERAL);
+                TblColRef aggrCallCol = TblColRef.newInnerColumn(aggrCall.getName(), TblColRef.InnerDataTypeEnum.LITERAL);
                 columns.add(aggrCallCol);
             }
         }
@@ -115,8 +113,7 @@ public class OLAPWindowRel extends Window implements OLAPRel {
             }
             relInputs.add(input);
         }
-        return EnumerableWindowBridge.createEnumerableWindow(getCluster(), traitSet, inputs.get(0), constants, rowType,
-                groups);
+        return EnumerableWindowBridge.createEnumerableWindow(getCluster(), traitSet, inputs.get(0), constants, rowType, groups);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/routing/QueryRouter.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/routing/QueryRouter.java b/query/src/main/java/org/apache/kylin/query/routing/QueryRouter.java
index ecb78cd..18db4ac 100644
--- a/query/src/main/java/org/apache/kylin/query/routing/QueryRouter.java
+++ b/query/src/main/java/org/apache/kylin/query/routing/QueryRouter.java
@@ -41,8 +41,7 @@ public class QueryRouter {
 
     private static final Logger logger = LoggerFactory.getLogger(QueryRouter.class);
 
-    public static IRealization selectRealization(OLAPContext olapContext, Set<IRealization> realizations)
-            throws NoRealizationFoundException {
+    public static IRealization selectRealization(OLAPContext olapContext, Set<IRealization> realizations) throws NoRealizationFoundException {
 
         String factTableName = olapContext.firstTableScan.getTableName();
         String projectName = olapContext.olapSchema.getProjectName();
@@ -54,8 +53,7 @@ public class QueryRouter {
                 candidates.add(new Candidate(real, sqlDigest));
         }
 
-        logger.info("Find candidates by table " + factTableName + " and project=" + projectName + " : "
-                + StringUtils.join(candidates, ","));
+        logger.info("Find candidates by table " + factTableName + " and project=" + projectName + " : " + StringUtils.join(candidates, ","));
 
         // rule based realization selection, rules might reorder realizations or remove specific realization
         RoutingRule.applyRules(candidates);
@@ -67,8 +65,7 @@ public class QueryRouter {
         Candidate chosen = candidates.get(0);
         adjustForDimensionAsMeasure(chosen, olapContext);
 
-        logger.info("The realizations remaining: " + RoutingRule.getPrintableText(candidates)
-                + " And the final chosen one is the first one");
+        logger.info("The realizations remaining: " + RoutingRule.getPrintableText(candidates) + " And the final chosen one is the first one");
 
         return chosen.realization;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/routing/RealizationChooser.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/routing/RealizationChooser.java b/query/src/main/java/org/apache/kylin/query/routing/RealizationChooser.java
index 870ad26..a598ee6 100644
--- a/query/src/main/java/org/apache/kylin/query/routing/RealizationChooser.java
+++ b/query/src/main/java/org/apache/kylin/query/routing/RealizationChooser.java
@@ -130,8 +130,7 @@ public class RealizationChooser {
         KylinConfig kylinConfig = first.olapSchema.getConfig();
         String projectName = first.olapSchema.getProjectName();
         String factTableName = first.firstTableScan.getOlapTable().getTableName();
-        Set<IRealization> realizations = ProjectManager.getInstance(kylinConfig).getRealizationsByTable(projectName,
-                factTableName);
+        Set<IRealization> realizations = ProjectManager.getInstance(kylinConfig).getRealizationsByTable(projectName, factTableName);
 
         final Map<DataModelDesc, Set<IRealization>> models = Maps.newHashMap();
         final Map<DataModelDesc, RealizationCost> costs = Maps.newHashMap();
@@ -205,8 +204,7 @@ public class RealizationChooser {
             this.priority = Candidate.PRIORITIES.get(real.getType());
 
             // ref CubeInstance.getCost()
-            int c = real.getAllDimensions().size() * CubeInstance.COST_WEIGHT_DIMENSION
-                    + real.getMeasures().size() * CubeInstance.COST_WEIGHT_MEASURE;
+            int c = real.getAllDimensions().size() * CubeInstance.COST_WEIGHT_DIMENSION + real.getMeasures().size() * CubeInstance.COST_WEIGHT_MEASURE;
             for (JoinTableDesc join : real.getModel().getJoinTables()) {
                 if (join.getJoin().isInnerJoin())
                     c += CubeInstance.COST_WEIGHT_INNER_JOIN;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/routing/RoutingRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/routing/RoutingRule.java b/query/src/main/java/org/apache/kylin/query/routing/RoutingRule.java
index 5cf9ae8..53f871d 100644
--- a/query/src/main/java/org/apache/kylin/query/routing/RoutingRule.java
+++ b/query/src/main/java/org/apache/kylin/query/routing/RoutingRule.java
@@ -48,8 +48,7 @@ public abstract class RoutingRule {
             String before = getPrintableText(candidates);
             rule.apply(candidates);
             String after = getPrintableText(candidates);
-            logger.info(
-                    "Applying rule: " + rule + ", realizations before: " + before + ", realizations after: " + after);
+            logger.info("Applying rule: " + rule + ", realizations before: " + before + ", realizations after: " + after);
         }
     }
 
@@ -77,8 +76,7 @@ public abstract class RoutingRule {
      */
     public static void registerRule(RoutingRule rule, int applyOrder) {
         if (applyOrder > rules.size()) {
-            logger.warn("apply order " + applyOrder + "  is larger than rules size " + rules.size()
-                    + ", will put the new rule at the end");
+            logger.warn("apply order " + applyOrder + "  is larger than rules size " + rules.size() + ", will put the new rule at the end");
             rules.add(rule);
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/routing/rules/RealizationSortRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/routing/rules/RealizationSortRule.java b/query/src/main/java/org/apache/kylin/query/routing/rules/RealizationSortRule.java
index 9f339eb..bd83af1 100644
--- a/query/src/main/java/org/apache/kylin/query/routing/rules/RealizationSortRule.java
+++ b/query/src/main/java/org/apache/kylin/query/routing/rules/RealizationSortRule.java
@@ -35,8 +35,7 @@ public class RealizationSortRule extends RoutingRule {
     public void apply(List<Candidate> candidates) {
         StringBuilder sb = new StringBuilder();
         for (Candidate candidate : candidates) {
-            sb.append(candidate.getRealization().getCanonicalName() + " priority " + candidate.getPriority() + " cost "
-                    + candidate.getCapability().cost + ". ");
+            sb.append(candidate.getRealization().getCanonicalName() + " priority " + candidate.getPriority() + " cost " + candidate.getCapability().cost + ". ");
         }
         logger.info(sb.toString());
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/routing/rules/RemoveBlackoutRealizationsRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/routing/rules/RemoveBlackoutRealizationsRule.java b/query/src/main/java/org/apache/kylin/query/routing/rules/RemoveBlackoutRealizationsRule.java
index 7cf4663..f309757 100644
--- a/query/src/main/java/org/apache/kylin/query/routing/rules/RemoveBlackoutRealizationsRule.java
+++ b/query/src/main/java/org/apache/kylin/query/routing/rules/RemoveBlackoutRealizationsRule.java
@@ -40,10 +40,10 @@ public class RemoveBlackoutRealizationsRule extends RoutingRule {
             return false;
         if (!whiteList.isEmpty() && !whiteList.contains(real.getCanonicalName()))
             return false;
-
+        
         return true;
     }
-
+    
     @Override
     public void apply(List<Candidate> candidates) {
         for (Iterator<Candidate> iterator = candidates.iterator(); iterator.hasNext();) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/schema/OLAPTable.java b/query/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
index 44ee956..21d2fc1 100644
--- a/query/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
+++ b/query/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
@@ -172,8 +172,7 @@ public class OLAPTable extends AbstractQueryableTable implements TranslatableTab
         List<ColumnDesc> tableColumns = mgr.listExposedColumns(olapSchema.getProjectName(), sourceTable);
 
         List<ColumnDesc> metricColumns = Lists.newArrayList();
-        List<MeasureDesc> countMeasures = mgr.listEffectiveRewriteMeasures(olapSchema.getProjectName(),
-                sourceTable.getIdentity());
+        List<MeasureDesc> countMeasures = mgr.listEffectiveRewriteMeasures(olapSchema.getProjectName(), sourceTable.getIdentity());
         HashSet<String> metFields = new HashSet<String>();
         for (MeasureDesc m : countMeasures) {
 
@@ -203,13 +202,11 @@ public class OLAPTable extends AbstractQueryableTable implements TranslatableTab
         }
         //2. All integer measures in non-cube realizations
         for (IRealization realization : mgr.listAllRealizations(olapSchema.getProjectName())) {
-            if (realization.getType() == RealizationType.INVERTED_INDEX
-                    && realization.getModel().isFactTable(sourceTable.getIdentity())) {
+            if (realization.getType() == RealizationType.INVERTED_INDEX && realization.getModel().isFactTable(sourceTable.getIdentity())) {
                 DataModelDesc model = realization.getModel();
                 for (String metricColumn : model.getMetrics()) {
                     TblColRef col = model.findColumn(metricColumn);
-                    if (col.getTable().equals(sourceTable.getIdentity()) && col.getType().isIntegerFamily()
-                            && !col.getType().isBigInt())
+                    if (col.getTable().equals(sourceTable.getIdentity()) && col.getType().isIntegerFamily() && !col.getType().isBigInt())
                         upgradeCols.add(col.getColumnDesc());
                 }
             }
@@ -218,12 +215,10 @@ public class OLAPTable extends AbstractQueryableTable implements TranslatableTab
         for (ColumnDesc upgrade : upgradeCols) {
             int index = tableColumns.indexOf(upgrade);
             if (index < 0) {
-                throw new IllegalStateException(
-                        "Metric column " + upgrade + " is not found in the the project's columns");
+                throw new IllegalStateException("Metric column " + upgrade + " is not found in the the project's columns");
             }
             tableColumns.get(index).setUpgradedType("bigint");
-            logger.info("To avoid overflow, upgraded {}'s type from {} to {}", tableColumns.get(index),
-                    tableColumns.get(index).getType(), tableColumns.get(index).getUpgradedType());
+            logger.info("To avoid overflow, upgraded {}'s type from {} to {}", tableColumns.get(index), tableColumns.get(index).getType(), tableColumns.get(index).getUpgradedType());
         }
 
         Collections.sort(tableColumns, new Comparator<ColumnDesc>() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/util/CognosParenthesesEscape.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/util/CognosParenthesesEscape.java b/query/src/main/java/org/apache/kylin/query/util/CognosParenthesesEscape.java
index a8dfda1..6d930a5 100644
--- a/query/src/main/java/org/apache/kylin/query/util/CognosParenthesesEscape.java
+++ b/query/src/main/java/org/apache/kylin/query/util/CognosParenthesesEscape.java
@@ -29,8 +29,7 @@ import java.util.regex.Pattern;
 import com.google.common.collect.Lists;
 
 public class CognosParenthesesEscape implements QueryUtil.IQueryTransformer {
-    private static final Pattern FROM_PATTERN = Pattern.compile("\\s+from\\s+(\\s*\\(\\s*)+(?!\\s*select\\s)",
-            Pattern.CASE_INSENSITIVE);
+    private static final Pattern FROM_PATTERN = Pattern.compile("\\s+from\\s+(\\s*\\(\\s*)+(?!\\s*select\\s)", Pattern.CASE_INSENSITIVE);
 
     @Override
     public String transform(String sql) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java b/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
index d891aed..d48a26f 100644
--- a/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
+++ b/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
@@ -92,19 +92,12 @@ public class QueryUtil {
         private static final String S0 = "\\s*";
         private static final String S1 = "\\s";
         private static final String SM = "\\s+";
-        private static final Pattern PTN_GROUP_BY = Pattern.compile(S1 + "GROUP" + SM + "BY" + S1,
-                Pattern.CASE_INSENSITIVE);
-        private static final Pattern PTN_HAVING_COUNT_GREATER_THAN_ZERO = Pattern.compile(S1 + "HAVING" + SM + "[(]?"
-                + S0 + "COUNT" + S0 + "[(]" + S0 + "1" + S0 + "[)]" + S0 + ">" + S0 + "0" + S0 + "[)]?",
-                Pattern.CASE_INSENSITIVE);
-        private static final Pattern PTN_SUM_1 = Pattern.compile(S0 + "SUM" + S0 + "[(]" + S0 + "[1]" + S0 + "[)]" + S0,
-                Pattern.CASE_INSENSITIVE);
+        private static final Pattern PTN_GROUP_BY = Pattern.compile(S1 + "GROUP" + SM + "BY" + S1, Pattern.CASE_INSENSITIVE);
+        private static final Pattern PTN_HAVING_COUNT_GREATER_THAN_ZERO = Pattern.compile(S1 + "HAVING" + SM + "[(]?" + S0 + "COUNT" + S0 + "[(]" + S0 + "1" + S0 + "[)]" + S0 + ">" + S0 + "0" + S0 + "[)]?", Pattern.CASE_INSENSITIVE);
+        private static final Pattern PTN_SUM_1 = Pattern.compile(S0 + "SUM" + S0 + "[(]" + S0 + "[1]" + S0 + "[)]" + S0, Pattern.CASE_INSENSITIVE);
         private static final Pattern PTN_NOT_EQ = Pattern.compile(S0 + "!=" + S0, Pattern.CASE_INSENSITIVE);
-        private static final Pattern PTN_INTERVAL = Pattern.compile(
-                "interval" + SM + "(floor\\()([\\d\\.]+)(\\))" + SM + "(second|minute|hour|day|month|year)",
-                Pattern.CASE_INSENSITIVE);
-        private static final Pattern PTN_HAVING_ESCAPE_FUNCTION = Pattern.compile("\\{fn" + "(.*?)" + "\\}",
-                Pattern.CASE_INSENSITIVE);
+        private static final Pattern PTN_INTERVAL = Pattern.compile("interval" + SM + "(floor\\()([\\d\\.]+)(\\))" + SM + "(second|minute|hour|day|month|year)", Pattern.CASE_INSENSITIVE);
+        private static final Pattern PTN_HAVING_ESCAPE_FUNCTION = Pattern.compile("\\{fn" + "(.*?)" + "\\}", Pattern.CASE_INSENSITIVE);
 
         @Override
         public String transform(String sql) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/test/java/org/apache/kylin/query/QueryDataSourceTest.java
----------------------------------------------------------------------
diff --git a/query/src/test/java/org/apache/kylin/query/QueryDataSourceTest.java b/query/src/test/java/org/apache/kylin/query/QueryDataSourceTest.java
index 1606c70..2c06be8 100644
--- a/query/src/test/java/org/apache/kylin/query/QueryDataSourceTest.java
+++ b/query/src/test/java/org/apache/kylin/query/QueryDataSourceTest.java
@@ -87,10 +87,10 @@ public class QueryDataSourceTest extends LocalFileMetadataTestCase {
         dsCache.clearCache();
     }
 
-    @Test(timeout = 10000)
+    @Test(timeout=10000)
     public void testMaxConnLimit() throws SQLException {
         KylinConfig config = KylinConfig.getInstanceFromEnv();
-
+        
         // Test with connection limit 
         Properties props = new Properties();
         props.setProperty("maxActive", "3");
@@ -109,7 +109,7 @@ public class QueryDataSourceTest extends LocalFileMetadataTestCase {
         DBUtils.closeQuietly(ds1Conn1);
         DBUtils.closeQuietly(ds1Conn2);
         DBUtils.closeQuietly(ds1Conn3);
-
+        
         // Test with not connection limit
         DataSource ds2 = QueryDataSource.create("default", config);
         Connection ds2Conn1 = ds2.getConnection();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/test/java/org/apache/kylin/query/util/CognosParenthesesEscapeTest.java
----------------------------------------------------------------------
diff --git a/query/src/test/java/org/apache/kylin/query/util/CognosParenthesesEscapeTest.java b/query/src/test/java/org/apache/kylin/query/util/CognosParenthesesEscapeTest.java
index c8ac432..153c097 100644
--- a/query/src/test/java/org/apache/kylin/query/util/CognosParenthesesEscapeTest.java
+++ b/query/src/test/java/org/apache/kylin/query/util/CognosParenthesesEscapeTest.java
@@ -40,10 +40,8 @@ public class CognosParenthesesEscapeTest {
     @Test
     public void advanced1Test() throws IOException {
         CognosParenthesesEscape escape = new CognosParenthesesEscape();
-        String query = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query01.sql"),
-                Charset.defaultCharset());
-        String expected = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query01.sql.expected"),
-                Charset.defaultCharset());
+        String query = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query01.sql"), Charset.defaultCharset());
+        String expected = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query01.sql.expected"), Charset.defaultCharset());
         String transformed = escape.transform(query);
         //System.out.println(transformed);
         Assert.assertEquals(expected, transformed);
@@ -52,10 +50,8 @@ public class CognosParenthesesEscapeTest {
     @Test
     public void advanced2Test() throws IOException {
         CognosParenthesesEscape escape = new CognosParenthesesEscape();
-        String query = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query02.sql"),
-                Charset.defaultCharset());
-        String expected = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query02.sql.expected"),
-                Charset.defaultCharset());
+        String query = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query02.sql"), Charset.defaultCharset());
+        String expected = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query02.sql.expected"), Charset.defaultCharset());
         String transformed = escape.transform(query);
         //System.out.println(transformed);
         Assert.assertEquals(expected, transformed);
@@ -64,10 +60,8 @@ public class CognosParenthesesEscapeTest {
     @Test
     public void advanced3Test() throws IOException {
         CognosParenthesesEscape escape = new CognosParenthesesEscape();
-        String query = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query03.sql"),
-                Charset.defaultCharset());
-        String expected = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query03.sql.expected"),
-                Charset.defaultCharset());
+        String query = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query03.sql"), Charset.defaultCharset());
+        String expected = FileUtils.readFileToString(new File("src/test/resources/query/cognos/query03.sql.expected"), Charset.defaultCharset());
         String transformed = escape.transform(query);
         //System.out.println(transformed);
         Assert.assertEquals(expected, transformed);
@@ -76,8 +70,7 @@ public class CognosParenthesesEscapeTest {
     @Test
     public void proguardTest() throws IOException {
         CognosParenthesesEscape escape = new CognosParenthesesEscape();
-        Collection<File> files = FileUtils.listFiles(new File("../kylin-it/src/test/resources"), new String[] { "sql" },
-                true);
+        Collection<File> files = FileUtils.listFiles(new File("../kylin-it/src/test/resources"), new String[] { "sql" }, true);
         for (File f : files) {
             System.out.println("checking " + f.getAbsolutePath());
             String query = FileUtils.readFileToString(f, Charset.defaultCharset());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/query/src/test/java/org/apache/kylin/query/util/QueryUtilTest.java
----------------------------------------------------------------------
diff --git a/query/src/test/java/org/apache/kylin/query/util/QueryUtilTest.java b/query/src/test/java/org/apache/kylin/query/util/QueryUtilTest.java
index b38393c..a1edd89 100644
--- a/query/src/test/java/org/apache/kylin/query/util/QueryUtilTest.java
+++ b/query/src/test/java/org/apache/kylin/query/util/QueryUtilTest.java
@@ -46,9 +46,7 @@ public class QueryUtilTest extends LocalFileMetadataTestCase {
         {
             String sql = "select ( date '2001-09-28' + interval floor(2) month) from test_kylin_fact group by ( date '2001-09-28' + interval floor(2) month)";
             String s = QueryUtil.massageSql(sql, 0, 0);
-            Assert.assertEquals(
-                    "select ( date '2001-09-28' + interval '2' month) from test_kylin_fact group by ( date '2001-09-28' + interval '2' month)",
-                    s);
+            Assert.assertEquals("select ( date '2001-09-28' + interval '2' month) from test_kylin_fact group by ( date '2001-09-28' + interval '2' month)", s);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/adhoc/AdHocRunnerJdbcImpl.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/adhoc/AdHocRunnerJdbcImpl.java b/server-base/src/main/java/org/apache/kylin/rest/adhoc/AdHocRunnerJdbcImpl.java
index 2ce8f0a..275fce5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/adhoc/AdHocRunnerJdbcImpl.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/adhoc/AdHocRunnerJdbcImpl.java
@@ -28,9 +28,9 @@ import java.util.LinkedList;
 import java.util.List;
 
 import org.apache.commons.pool.impl.GenericObjectPool;
+import org.apache.kylin.storage.adhoc.AdHocRunnerBase;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
-import org.apache.kylin.storage.adhoc.AdHocRunnerBase;
 
 public class AdHocRunnerJdbcImpl extends AdHocRunnerBase {
 
@@ -48,8 +48,7 @@ public class AdHocRunnerJdbcImpl extends AdHocRunnerBase {
     public void init() {
         if (this.pool == null) {
             this.pool = new JdbcConnectionPool();
-            JdbcConnectionFactory factory = new JdbcConnectionFactory(this.config.getJdbcUrl(),
-                    this.config.getJdbcDriverClass(), this.config.getJdbcUsername(), this.config.getJdbcPassword());
+            JdbcConnectionFactory factory = new JdbcConnectionFactory(this.config.getJdbcUrl(), this.config.getJdbcDriverClass(), this.config.getJdbcUsername(), this.config.getJdbcPassword());
             GenericObjectPool.Config poolConfig = new GenericObjectPool.Config();
             poolConfig.maxActive = this.config.getPoolMaxTotal();
             poolConfig.maxIdle = this.config.getPoolMaxIdle();
@@ -64,8 +63,7 @@ public class AdHocRunnerJdbcImpl extends AdHocRunnerBase {
     }
 
     @Override
-    public void executeQuery(String query, List<List<String>> results, List<SelectedColumnMeta> columnMetas)
-            throws Exception {
+    public void executeQuery(String query, List<List<String>> results, List<SelectedColumnMeta> columnMetas) throws Exception {
         Statement statement = null;
         Connection connection = this.getConnection();
         ResultSet resultSet = null;
@@ -87,11 +85,7 @@ public class AdHocRunnerJdbcImpl extends AdHocRunnerBase {
 
             // fill in selected column meta
             for (int i = 1; i <= columnCount; ++i) {
-                columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), false,
-                        metaData.isCurrency(i), metaData.isNullable(i), false, metaData.getColumnDisplaySize(i),
-                        metaData.getColumnLabel(i), metaData.getColumnName(i), null, null, null,
-                        metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i),
-                        metaData.getColumnTypeName(i), metaData.isReadOnly(i), false, false));
+                columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), false, metaData.isCurrency(i), metaData.isNullable(i), false, metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i), null, null, null, metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i), false, false));
             }
 
         } catch (SQLException sqlException) {
@@ -109,6 +103,7 @@ public class AdHocRunnerJdbcImpl extends AdHocRunnerBase {
         this.pool.returnConnection(connection);
     }
 
+
     static void extractResults(ResultSet resultSet, List<List<String>> results) throws SQLException {
         List<String> oneRow = new LinkedList<String>();
 
@@ -127,4 +122,5 @@ public class AdHocRunnerJdbcImpl extends AdHocRunnerBase {
         }
     }
 
-}
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionFactory.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionFactory.java b/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionFactory.java
index a316342..42613fe 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionFactory.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionFactory.java
@@ -18,12 +18,13 @@
 
 package org.apache.kylin.rest.adhoc;
 
+
+import org.apache.commons.pool.PoolableObjectFactory;
+
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.SQLException;
 
-import org.apache.commons.pool.PoolableObjectFactory;
-
 class JdbcConnectionFactory implements PoolableObjectFactory {
 
     private final String jdbcUrl;
@@ -34,6 +35,7 @@ class JdbcConnectionFactory implements PoolableObjectFactory {
 
     private final String password;
 
+
     public JdbcConnectionFactory(String jdbcUrl, String driverClass, String username, String password) {
         this.jdbcUrl = jdbcUrl;
         this.driverClass = driverClass;
@@ -47,12 +49,14 @@ class JdbcConnectionFactory implements PoolableObjectFactory {
         }
     }
 
+
     @Override
     public Connection makeObject() throws Exception {
         Connection connection = DriverManager.getConnection(jdbcUrl, username, password);
         return connection;
     }
 
+
     @Override
     public void activateObject(Object o) throws Exception {
 
@@ -66,7 +70,7 @@ class JdbcConnectionFactory implements PoolableObjectFactory {
     @Override
     public void destroyObject(Object pooledObject) throws Exception {
 
-        if (pooledObject instanceof Connection) {
+        if(pooledObject instanceof Connection) {
             Connection connection = (Connection) pooledObject;
 
             if (connection != null)
@@ -77,7 +81,7 @@ class JdbcConnectionFactory implements PoolableObjectFactory {
 
     @Override
     public boolean validateObject(Object pooledObject) {
-        if (pooledObject instanceof Connection) {
+        if(pooledObject instanceof Connection) {
             Connection connection = (Connection) pooledObject;
 
             if (connection != null) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionPool.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionPool.java b/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionPool.java
index f5f4606..8954d22 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionPool.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/adhoc/JdbcConnectionPool.java
@@ -18,16 +18,19 @@
 
 package org.apache.kylin.rest.adhoc;
 
+
 import java.io.Closeable;
 import java.io.IOException;
 import java.sql.Connection;
 
 import org.apache.commons.pool.impl.GenericObjectPool;
 
+
 public class JdbcConnectionPool implements Closeable {
 
     private GenericObjectPool internalPool = null;
 
+
     public void createPool(JdbcConnectionFactory factory, GenericObjectPool.Config poolConfig) throws IOException {
         if (this.internalPool != null)
             this.close();
@@ -70,4 +73,4 @@ public class JdbcConnectionPool implements Closeable {
             throw new RuntimeException(e.getMessage(), e);
         }
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/constant/Constant.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/constant/Constant.java b/server-base/src/main/java/org/apache/kylin/rest/constant/Constant.java
index f0899ce..f068e5f 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/constant/Constant.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/constant/Constant.java
@@ -37,8 +37,7 @@ public class Constant {
     public final static String ACCESS_HAS_ROLE_ADMIN = "hasRole('ROLE_ADMIN')";
     public final static String ACCESS_HAS_ROLE_MODELER = "hasRole('ROLE_MODELER')";
 
-    public final static String ACCESS_POST_FILTER_READ = "hasRole('ROLE_ADMIN') or hasPermission(filterObject, 'READ') or hasPermission(filterObject, 'MANAGEMENT') "
-            + "or hasPermission(filterObject, 'OPERATION') or hasPermission(filterObject, 'ADMINISTRATION')";
+    public final static String ACCESS_POST_FILTER_READ = "hasRole('ROLE_ADMIN') or hasPermission(filterObject, 'READ') or hasPermission(filterObject, 'MANAGEMENT') " + "or hasPermission(filterObject, 'OPERATION') or hasPermission(filterObject, 'ADMINISTRATION')";
 
     public final static String SERVER_MODE_QUERY = "query";
     public final static String SERVER_MODE_JOB = "job";

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/AccessController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/AccessController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/AccessController.java
index 440caba..a88c342 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/AccessController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/AccessController.java
@@ -73,8 +73,7 @@ public class AccessController extends BasicController {
      */
     @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.POST }, produces = { "application/json" })
     @ResponseBody
-    public List<AccessEntryResponse> grant(@PathVariable String type, @PathVariable String uuid,
-            @RequestBody AccessRequest accessRequest) {
+    public List<AccessEntryResponse> grant(@PathVariable String type, @PathVariable String uuid, @RequestBody AccessRequest accessRequest) {
         AclEntity ae = accessService.getAclEntity(type, uuid);
         Sid sid = accessService.getSid(accessRequest.getSid(), accessRequest.isPrincipal());
         Permission permission = AclPermissionFactory.getPermission(accessRequest.getPermission());
@@ -90,8 +89,7 @@ public class AccessController extends BasicController {
      */
     @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.PUT }, produces = { "application/json" })
     @ResponseBody
-    public List<AccessEntryResponse> update(@PathVariable String type, @PathVariable String uuid,
-            @RequestBody AccessRequest accessRequest) {
+    public List<AccessEntryResponse> update(@PathVariable String type, @PathVariable String uuid, @RequestBody AccessRequest accessRequest) {
         AclEntity ae = accessService.getAclEntity(type, uuid);
         Permission permission = AclPermissionFactory.getPermission(accessRequest.getPermission());
         Acl acl = accessService.update(ae, accessRequest.getAccessEntryId(), permission);
@@ -105,8 +103,7 @@ public class AccessController extends BasicController {
      * @param accessRequest
      */
     @RequestMapping(value = "/{type}/{uuid}", method = { RequestMethod.DELETE }, produces = { "application/json" })
-    public List<AccessEntryResponse> revoke(@PathVariable String type, @PathVariable String uuid,
-            AccessRequest accessRequest) {
+    public List<AccessEntryResponse> revoke(@PathVariable String type, @PathVariable String uuid, AccessRequest accessRequest) {
         AclEntity ae = accessService.getAclEntity(type, uuid);
         Acl acl = accessService.revoke(ae, accessRequest.getAccessEntryId());
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java
index d9d171a..eb4b6ed 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java
@@ -59,8 +59,7 @@ public class BasicController {
         Throwable cause = ex;
         while (cause != null) {
             if (cause.getClass().getPackage().getName().startsWith("org.apache.hadoop.hbase")) {
-                return new ErrorResponse(req.getRequestURL().toString(),
-                        new InternalErrorException(String.format(msg.getHBASE_FAIL(), ex.getMessage()), ex));
+                return new ErrorResponse(req.getRequestURL().toString(), new InternalErrorException(String.format(msg.getHBASE_FAIL(), ex.getMessage()), ex));
             }
             cause = cause.getCause();
         }
@@ -98,8 +97,7 @@ public class BasicController {
 
     protected void setDownloadResponse(String downloadFile, final HttpServletResponse response) {
         File file = new File(downloadFile);
-        try (InputStream fileInputStream = new FileInputStream(file);
-                OutputStream output = response.getOutputStream();) {
+        try (InputStream fileInputStream = new FileInputStream(file); OutputStream output = response.getOutputStream();) {
             response.reset();
             response.setContentType("application/octet-stream");
             response.setContentLength((int) (file.length()));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller/CacheController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CacheController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CacheController.java
index a6b1d1b..092220c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CacheController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CacheController.java
@@ -52,22 +52,18 @@ public class CacheController extends BasicController {
     /**
      * Announce wipe cache to all cluster nodes
      */
-    @RequestMapping(value = "/announce/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = {
-            "application/json" })
+    @RequestMapping(value = "/announce/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = { "application/json" })
     @ResponseBody
-    public void announceWipeCache(@PathVariable String entity, @PathVariable String event,
-            @PathVariable String cacheKey) throws IOException {
+    public void announceWipeCache(@PathVariable String entity, @PathVariable String event, @PathVariable String cacheKey) throws IOException {
         cacheService.annouceWipeCache(entity, event, cacheKey);
     }
 
     /**
      * Wipe cache on this node
      */
-    @RequestMapping(value = "/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = {
-            "application/json" })
+    @RequestMapping(value = "/{entity}/{cacheKey}/{event}", method = { RequestMethod.PUT }, produces = { "application/json" })
     @ResponseBody
-    public void wipeCache(@PathVariable String entity, @PathVariable String event, @PathVariable String cacheKey)
-            throws IOException {
+    public void wipeCache(@PathVariable String entity, @PathVariable String event, @PathVariable String cacheKey) throws IOException {
         cacheService.notifyMetadataChange(entity, Broadcaster.Event.getEvent(event), cacheKey);
     }
 


[02/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
index 50051e0..afb0b33 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
@@ -84,8 +84,7 @@ public class FuzzyValueCombinationTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testSomeNull() {
-        System.out
-                .println("test some null ============================================================================");
+        System.out.println("test some null ============================================================================");
         Map<TblColRef, Set<String>> values = Maps.newHashMap();
         values.put(col1, set("a", "b", "c"));
         values.put(col2, set());
@@ -99,8 +98,7 @@ public class FuzzyValueCombinationTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testAllNulls() {
-        System.out
-                .println("test all nulls ============================================================================");
+        System.out.println("test all nulls ============================================================================");
         Map<TblColRef, Set<String>> values = Maps.newHashMap();
         values.put(col1, set());
         values.put(col2, set());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
index 5f87b3b..d2b3488 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
@@ -86,8 +86,7 @@ public class HiveJDBCClientTest {
         Statement stmt = con.createStatement();
         String tableName = "testHiveDriverTable";
         stmt.execute("drop table if exists " + tableName);
-        stmt.execute(
-                "create table " + tableName + " (key int, value string) row format delimited fields terminated by ' '");
+        stmt.execute("create table " + tableName + " (key int, value string) row format delimited fields terminated by ' '");
         // show tables
         String sql = "show tables '" + tableName + "'";
         System.out.println("Running: " + sql);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TimeConditionLiteralsReplacerTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TimeConditionLiteralsReplacerTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TimeConditionLiteralsReplacerTest.java
index 5e3c112..63f8961 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TimeConditionLiteralsReplacerTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TimeConditionLiteralsReplacerTest.java
@@ -48,8 +48,7 @@ public class TimeConditionLiteralsReplacerTest extends FilterBaseTest {
 
         TimeConditionLiteralsReplacer filterDecorator = new TimeConditionLiteralsReplacer(compareFilter);
         byte[] bytes = TupleFilterSerializer.serialize(compareFilter, filterDecorator, DictCodeSystem.INSTANCE);
-        CompareTupleFilter compareTupleFilter = (CompareTupleFilter) TupleFilterSerializer.deserialize(bytes,
-                DictCodeSystem.INSTANCE);
+        CompareTupleFilter compareTupleFilter = (CompareTupleFilter) TupleFilterSerializer.deserialize(bytes, DictCodeSystem.INSTANCE);
         Assert.assertEquals("2000-01-01", compareTupleFilter.getFirstValue());
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
index 1235a9d..291072f 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
@@ -201,8 +201,7 @@ public class FilterBaseTest extends LocalFileMetadataTestCase {
         int s1 = f1.getChildren().size();
         int s2 = f2.getChildren().size();
         if (s1 != s2) {
-            throw new IllegalStateException(
-                    "f1=" + str1 + ", f2=" + str2 + " has different children: " + s1 + " vs. " + s2);
+            throw new IllegalStateException("f1=" + str1 + ", f2=" + str2 + " has different children: " + s1 + " vs. " + s2);
         }
 
         for (int i = 0; i < s1; i++) {
@@ -210,8 +209,7 @@ public class FilterBaseTest extends LocalFileMetadataTestCase {
         }
     }
 
-    private static String[][] SAMPLE_DATA = new String[][] { { "2013-03-10", "2012-01-12", "2014-03-10" },
-            { "ClothinShoes & Accessories", "ABIN", "FP-GTC", "FP-NON-GTC" } };
+    private static String[][] SAMPLE_DATA = new String[][] { { "2013-03-10", "2012-01-12", "2014-03-10" }, { "ClothinShoes & Accessories", "ABIN", "FP-GTC", "FP-NON-GTC" } };
 
     protected Collection<Tuple> generateTuple(int number, List<TblColRef> columns, int[] matches) {
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/MeasureTypeOnlyAggrInBaseTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/MeasureTypeOnlyAggrInBaseTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/MeasureTypeOnlyAggrInBaseTest.java
index 7335017..f151876 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/MeasureTypeOnlyAggrInBaseTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/MeasureTypeOnlyAggrInBaseTest.java
@@ -89,11 +89,9 @@ public class MeasureTypeOnlyAggrInBaseTest extends LocalFileMetadataTestCase {
     }
 
     @Test
-    public void testIdentifyCuboidV2()
-            throws InvocationTargetException, NoSuchMethodException, IllegalAccessException, NoSuchFieldException {
+    public void testIdentifyCuboidV2() throws InvocationTargetException, NoSuchMethodException, IllegalAccessException, NoSuchFieldException {
         CubeDesc cubeDesc = cube.getDescriptor();
-        Cuboid ret = Cuboid.identifyCuboid(cubeDesc, Sets.<TblColRef> newHashSet(),
-                Lists.<FunctionDesc> newArrayList());
+        Cuboid ret = Cuboid.identifyCuboid(cubeDesc, Sets.<TblColRef> newHashSet(), Lists.<FunctionDesc> newArrayList());
         long baseCuboidId = cubeDesc.getRowkey().getFullMask();
         assertNotEquals(baseCuboidId, ret.getId());
         ret = Cuboid.identifyCuboid(cubeDesc, dimensions, metrics);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
index ef7ec96..d47f393 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
@@ -83,11 +83,9 @@ public class CubeHFileMapper2Test extends LocalFileMetadataTestCase {
         ImmutableBytesWritable outKey = (ImmutableBytesWritable) outKV[0];
         KeyValue outValue = (KeyValue) outKV[1];
 
-        assertTrue(Bytes.compareTo(key.getBytes(), 0, key.getLength(), outKey.get(), outKey.getOffset(),
-                outKey.getLength()) == 0);
+        assertTrue(Bytes.compareTo(key.getBytes(), 0, key.getLength(), outKey.get(), outKey.getOffset(), outKey.getLength()) == 0);
 
-        assertTrue(Bytes.compareTo(value.getBytes(), 0, value.getLength(), outValue.getValueArray(),
-                outValue.getValueOffset(), outValue.getValueLength()) == 0);
+        assertTrue(Bytes.compareTo(value.getBytes(), 0, value.getLength(), outValue.getValueArray(), outValue.getValueOffset(), outValue.getValueLength()) == 0);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
index b66895f..03a3cba 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
@@ -82,20 +82,13 @@ public class RangeKeyDistributionMapperTest {
     @Test
     public void testMapperWithHeader() throws IOException {
 
-        Text inputKey1 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 0, 0, 0,
-                0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey2 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 0, 0, 0,
-                0, 0, 0, 0, 127, 11, 122, 1, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey3 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 2, 2, 2,
-                2, 2, 2, 2, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey4 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 3, 3, 3,
-                3, 3, 3, 3, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey5 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 4, 4, 4,
-                4, 4, 4, 4, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey6 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 5, 5, 5,
-                5, 5, 5, 5, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey7 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 6, 6, 6,
-                6, 6, 6, 6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey1 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey2 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 0, 0, 0, 0, 0, 0, 0, 127, 11, 122, 1, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey3 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 2, 2, 2, 2, 2, 2, 2, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey4 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 3, 3, 3, 3, 3, 3, 3, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey5 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 4, 4, 4, 4, 4, 4, 4, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey6 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 5, 5, 5, 5, 5, 5, 5, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey7 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 6, 6, 6, 6, 6, 6, 6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
 
         mapDriver.addInput(inputKey1, new Text("abc"));
         mapDriver.addInput(inputKey2, new Text("abc"));
@@ -111,8 +104,7 @@ public class RangeKeyDistributionMapperTest {
 
         byte[] key1 = result.get(0).getFirst().getBytes();
         LongWritable value1 = result.get(0).getSecond();
-        assertArrayEquals(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 6, 6, 6, 6, 6, 6,
-                6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 }, key1);
+        assertArrayEquals(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 6, 6, 6, 6, 6, 6, 6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 }, key1);
         assertEquals(273, value1.get());
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
index 33def1c..9b1a00d 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
@@ -52,8 +52,7 @@ public class RowValueDecoderTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testDecode() throws Exception {
-        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready")
-                .getDescriptor();
+        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready").getDescriptor();
         HBaseColumnDesc hbaseCol = cubeDesc.getHbaseMapping().getColumnFamily()[0].getColumns()[0];
 
         BufferedMeasureCodec codec = new BufferedMeasureCodec(hbaseCol.getMeasures());
@@ -83,8 +82,7 @@ public class RowValueDecoderTest extends LocalFileMetadataTestCase {
 
     @Test(expected = IllegalArgumentException.class)
     public void testError() throws Exception {
-        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready")
-                .getDescriptor();
+        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready").getDescriptor();
         HBaseColumnDesc hbaseCol = cubeDesc.getHbaseMapping().getColumnFamily()[0].getColumns()[0];
 
         BufferedMeasureCodec codec = new BufferedMeasureCodec(hbaseCol.getMeasures());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
index 2faf3c8..62b154e 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
@@ -43,8 +43,7 @@ public class SandboxMetastoreCLI {
         ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
         System.setProperty(KylinConfig.KYLIN_CONF, HBaseMetadataTestCase.SANDBOX_TEST_DATA);
         if (StringUtils.isEmpty(System.getProperty("hdp.version"))) {
-            throw new RuntimeException(
-                    "No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
+            throw new RuntimeException("No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
         }
 
         if (args.length < 1) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java b/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
index 6beb835..19ee08f 100644
--- a/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
@@ -43,16 +43,13 @@ public abstract class AbstractInfoExtractor extends AbstractApplication {
     private static final Logger logger = LoggerFactory.getLogger(AbstractInfoExtractor.class);
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(true)
-            .withDescription("specify the dest dir to save the related information").create("destDir");
+    private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(true).withDescription("specify the dest dir to save the related information").create("destDir");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_COMPRESS = OptionBuilder.withArgName("compress").hasArg().isRequired(false)
-            .withDescription("specify whether to compress the output with zip. Default true.").create("compress");
+    private static final Option OPTION_COMPRESS = OptionBuilder.withArgName("compress").hasArg().isRequired(false).withDescription("specify whether to compress the output with zip. Default true.").create("compress");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_SUBMODULE = OptionBuilder.withArgName("submodule").hasArg().isRequired(false)
-            .withDescription("specify whether this is a submodule of other CLI tool").create("submodule");
+    private static final Option OPTION_SUBMODULE = OptionBuilder.withArgName("submodule").hasArg().isRequired(false).withDescription("specify whether this is a submodule of other CLI tool").create("submodule");
 
     private static final String DEFAULT_PACKAGE_TYPE = "base";
     private static final String[] COMMIT_SHA1_FILES = { "commit_SHA1", "commit.sha1" };
@@ -79,10 +76,8 @@ public abstract class AbstractInfoExtractor extends AbstractApplication {
     @Override
     protected void execute(OptionsHelper optionsHelper) throws Exception {
         String exportDest = optionsHelper.getOptionValue(options.getOption("destDir"));
-        boolean shouldCompress = optionsHelper.hasOption(OPTION_COMPRESS)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_COMPRESS)) : true;
-        boolean isSubmodule = optionsHelper.hasOption(OPTION_SUBMODULE)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_SUBMODULE)) : false;
+        boolean shouldCompress = optionsHelper.hasOption(OPTION_COMPRESS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_COMPRESS)) : true;
+        boolean isSubmodule = optionsHelper.hasOption(OPTION_SUBMODULE) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_SUBMODULE)) : false;
 
         if (StringUtils.isEmpty(exportDest)) {
             throw new RuntimeException("destDir is not set, exit directly without extracting");
@@ -92,8 +87,7 @@ public abstract class AbstractInfoExtractor extends AbstractApplication {
         }
 
         // create new folder to contain the output
-        String packageName = packageType.toLowerCase() + "_"
-                + new SimpleDateFormat("YYYY_MM_dd_HH_mm_ss").format(new Date());
+        String packageName = packageType.toLowerCase() + "_" + new SimpleDateFormat("YYYY_MM_dd_HH_mm_ss").format(new Date());
         if (!isSubmodule && new File(exportDest).exists()) {
             exportDest = exportDest + packageName + "/";
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
index fabee87..6c8a6b0 100644
--- a/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
@@ -40,8 +40,7 @@ public class AclTableMigrationCLI {
         case CHECK:
             boolean needMigrate = tool.checkIfNeedMigrate(KylinConfig.getInstanceFromEnv());
             if (needMigrate) {
-                System.out.println(
-                        "Found acl tables that need to migrate. Please execute command : ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.AclTableMigrationCLI MIGRATE");
+                System.out.println("Found acl tables that need to migrate. Please execute command : ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.AclTableMigrationCLI MIGRATE");
                 System.exit(1);
             }
             break;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java b/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
index 39e316c..30bbb5e 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
@@ -77,49 +77,31 @@ public class CubeMetaExtractor extends AbstractInfoExtractor {
     private static final Logger logger = LoggerFactory.getLogger(CubeMetaExtractor.class);
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false)
-            .withDescription("Specify which cube to extract").create("cube");
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("Specify which cube to extract").create("cube");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_HYBRID = OptionBuilder.withArgName("hybrid").hasArg().isRequired(false)
-            .withDescription("Specify which hybrid to extract").create("hybrid");
+    private static final Option OPTION_HYBRID = OptionBuilder.withArgName("hybrid").hasArg().isRequired(false).withDescription("Specify which hybrid to extract").create("hybrid");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false)
-            .withDescription("Specify realizations in which project to extract").create("project");
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify realizations in which project to extract").create("project");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_All_PROJECT = OptionBuilder.withArgName("allProjects").hasArg(false)
-            .isRequired(false).withDescription("Specify realizations in all projects to extract").create("allProjects");
+    private static final Option OPTION_All_PROJECT = OptionBuilder.withArgName("allProjects").hasArg(false).isRequired(false).withDescription("Specify realizations in all projects to extract").create("allProjects");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_STORAGE_TYPE = OptionBuilder.withArgName("storageType").hasArg()
-            .isRequired(false).withDescription("Specify the storage type to overwrite. Default is empty, keep origin.")
-            .create("storageType");
+    private static final Option OPTION_STORAGE_TYPE = OptionBuilder.withArgName("storageType").hasArg().isRequired(false).withDescription("Specify the storage type to overwrite. Default is empty, keep origin.").create("storageType");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_ENGINE_TYPE = OptionBuilder.withArgName("engineType").hasArg().isRequired(false)
-            .withDescription("Specify the engine type to overwrite. Default is empty, keep origin.")
-            .create("engineType");
+    private static final Option OPTION_ENGINE_TYPE = OptionBuilder.withArgName("engineType").hasArg().isRequired(false).withDescription("Specify the engine type to overwrite. Default is empty, keep origin.").create("engineType");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_SEGMENTS = OptionBuilder.withArgName("includeSegments").hasArg()
-            .isRequired(false).withDescription("set this to true if want extract the segments info. Default true")
-            .create("includeSegments");
+    private static final Option OPTION_INCLUDE_SEGMENTS = OptionBuilder.withArgName("includeSegments").hasArg().isRequired(false).withDescription("set this to true if want extract the segments info. Default true").create("includeSegments");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false)
-            .withDescription("set this to true if want to extract job info/outputs too. Default false")
-            .create("includeJobs");
+    private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false).withDescription("set this to true if want to extract job info/outputs too. Default false").create("includeJobs");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_ONLY_JOB_OUTPUT = OptionBuilder.withArgName("onlyOutput").hasArg()
-            .isRequired(false).withDescription("when include jobs, onlt extract output of job. Default true")
-            .create("onlyOutput");
+    private static final Option OPTION_INCLUDE_ONLY_JOB_OUTPUT = OptionBuilder.withArgName("onlyOutput").hasArg().isRequired(false).withDescription("when include jobs, onlt extract output of job. Default true").create("onlyOutput");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_SEGMENT_DETAILS = OptionBuilder.withArgName("includeSegmentDetails")
-            .hasArg().isRequired(false)
-            .withDescription(
-                    "set this to true if want to extract segment details too, such as dict, tablesnapshot. Default false")
-            .create("includeSegmentDetails");
+    private static final Option OPTION_INCLUDE_SEGMENT_DETAILS = OptionBuilder.withArgName("includeSegmentDetails").hasArg().isRequired(false).withDescription("set this to true if want to extract segment details too, such as dict, tablesnapshot. Default false").create("includeSegmentDetails");
 
     private KylinConfig kylinConfig;
     private MetadataManager metadataManager;
@@ -166,18 +148,12 @@ public class CubeMetaExtractor extends AbstractInfoExtractor {
 
     @Override
     protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
-        includeSegments = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENTS)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENTS)) : true;
-        includeJobs = optionsHelper.hasOption(OPTION_INCLUDE_JOB)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : false;
-        includeSegmentDetails = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENT_DETAILS)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENT_DETAILS)) : false;
-        onlyJobOutput = optionsHelper.hasOption(OPTION_INCLUDE_ONLY_JOB_OUTPUT)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_ONLY_JOB_OUTPUT)) : true;
-        storageType = optionsHelper.hasOption(OPTION_STORAGE_TYPE) ? optionsHelper.getOptionValue(OPTION_STORAGE_TYPE)
-                : null;
-        engineType = optionsHelper.hasOption(OPTION_ENGINE_TYPE) ? optionsHelper.getOptionValue(OPTION_ENGINE_TYPE)
-                : null;
+        includeSegments = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENTS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENTS)) : true;
+        includeJobs = optionsHelper.hasOption(OPTION_INCLUDE_JOB) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : false;
+        includeSegmentDetails = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENT_DETAILS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENT_DETAILS)) : false;
+        onlyJobOutput = optionsHelper.hasOption(OPTION_INCLUDE_ONLY_JOB_OUTPUT) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_ONLY_JOB_OUTPUT)) : true;
+        storageType = optionsHelper.hasOption(OPTION_STORAGE_TYPE) ? optionsHelper.getOptionValue(OPTION_STORAGE_TYPE) : null;
+        engineType = optionsHelper.hasOption(OPTION_ENGINE_TYPE) ? optionsHelper.getOptionValue(OPTION_ENGINE_TYPE) : null;
 
         kylinConfig = KylinConfig.getInstanceFromEnv();
         metadataManager = MetadataManager.getInstance(kylinConfig);
@@ -262,9 +238,7 @@ public class CubeMetaExtractor extends AbstractInfoExtractor {
                 try {
                     ResourceTool.copy(srcConfig, dstConfig, Lists.newArrayList(r));
                 } catch (Exception e) {
-                    logger.warn(
-                            "Exception when copying optional resource {}. May be caused by resource missing. skip it.",
-                            r);
+                    logger.warn("Exception when copying optional resource {}. May be caused by resource missing. skip it.", r);
                 }
             }
 
@@ -322,8 +296,7 @@ public class CubeMetaExtractor extends AbstractInfoExtractor {
     private void dealWithStreaming(CubeInstance cube) {
         streamingManager = StreamingManager.getInstance(kylinConfig);
         for (StreamingConfig streamingConfig : streamingManager.listAllStreaming()) {
-            if (streamingConfig.getName() != null
-                    && streamingConfig.getName().equalsIgnoreCase(cube.getRootFactTable())) {
+            if (streamingConfig.getName() != null && streamingConfig.getName().equalsIgnoreCase(cube.getRootFactTable())) {
                 addRequired(StreamingConfig.concatResourcePath(streamingConfig.getName()));
                 addRequired(KafkaConfig.concatResourcePath(streamingConfig.getName()));
             }
@@ -405,8 +378,7 @@ public class CubeMetaExtractor extends AbstractInfoExtractor {
             addRequired(HybridInstance.concatResourcePath(hybridInstance.getName()));
             for (IRealization iRealization : hybridInstance.getRealizations()) {
                 if (iRealization.getType() != RealizationType.CUBE) {
-                    throw new RuntimeException("Hybrid " + iRealization.getName() + " contains non cube child "
-                            + iRealization.getName() + " with type " + iRealization.getType());
+                    throw new RuntimeException("Hybrid " + iRealization.getName() + " contains non cube child " + iRealization.getName() + " with type " + iRealization.getType());
                 }
                 retrieveResourcePath(iRealization);
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/CubeMetaIngester.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMetaIngester.java b/tool/src/main/java/org/apache/kylin/tool/CubeMetaIngester.java
index 0b145d6..b4c44c3 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMetaIngester.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMetaIngester.java
@@ -60,26 +60,16 @@ public class CubeMetaIngester extends AbstractApplication {
     private static final Logger logger = LoggerFactory.getLogger(CubeMetaIngester.class);
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_SRC = OptionBuilder.withArgName("srcPath").hasArg().isRequired(true)
-            .withDescription("specify the path to the extracted cube metadata zip file").create("srcPath");
+    private static final Option OPTION_SRC = OptionBuilder.withArgName("srcPath").hasArg().isRequired(true).withDescription("specify the path to the extracted cube metadata zip file").create("srcPath");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(true)
-            .withDescription("specify the target project for the new cubes").create("project");
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(true).withDescription("specify the target project for the new cubes").create("project");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_FORCE_INGEST = OptionBuilder.withArgName("forceIngest").hasArg()
-            .isRequired(false)
-            .withDescription(
-                    "skip the target cube, model and table check and ingest by force. Use in caution because it might break existing cubes! Suggest to backup metadata store first")
-            .create("forceIngest");
+    private static final Option OPTION_FORCE_INGEST = OptionBuilder.withArgName("forceIngest").hasArg().isRequired(false).withDescription("skip the target cube, model and table check and ingest by force. Use in caution because it might break existing cubes! Suggest to backup metadata store first").create("forceIngest");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_OVERWRITE_TABLES = OptionBuilder.withArgName("overwriteTables").hasArg()
-            .isRequired(false)
-            .withDescription(
-                    "If table meta conflicts, overwrite the one in metadata store with the one in srcPath. Use in caution because it might break existing cubes! Suggest to backup metadata store first")
-            .create("overwriteTables");
+    private static final Option OPTION_OVERWRITE_TABLES = OptionBuilder.withArgName("overwriteTables").hasArg().isRequired(false).withDescription("If table meta conflicts, overwrite the one in metadata store with the one in srcPath. Use in caution because it might break existing cubes! Suggest to backup metadata store first").create("overwriteTables");
 
     private KylinConfig kylinConfig;
     private MetadataManager metadataManager;
@@ -158,8 +148,7 @@ public class CubeMetaIngester extends AbstractApplication {
 
         for (TableDesc tableDesc : srcMetadataManager.listAllTables()) {
             logger.info("add " + tableDesc + " to " + targetProjectName);
-            projectManager.addTableDescToProject(Lists.newArrayList(tableDesc.getIdentity()).toArray(new String[0]),
-                    targetProjectName);
+            projectManager.addTableDescToProject(Lists.newArrayList(tableDesc.getIdentity()).toArray(new String[0]), targetProjectName);
         }
 
         for (CubeInstance cube : srcCubeManager.listAllCubes()) {
@@ -170,8 +159,7 @@ public class CubeMetaIngester extends AbstractApplication {
 
     }
 
-    private void checkAndMark(MetadataManager srcMetadataManager, HybridManager srcHybridManager,
-            CubeManager srcCubeManager, CubeDescManager srcCubeDescManager) {
+    private void checkAndMark(MetadataManager srcMetadataManager, HybridManager srcHybridManager, CubeManager srcCubeManager, CubeDescManager srcCubeDescManager) {
         if (srcHybridManager.listHybridInstances().size() > 0) {
             throw new IllegalStateException("Does not support ingest hybrid yet");
         }
@@ -184,15 +172,12 @@ public class CubeMetaIngester extends AbstractApplication {
         for (TableDesc tableDesc : srcMetadataManager.listAllTables()) {
             TableDesc existing = metadataManager.getTableDesc(tableDesc.getIdentity());
             if (existing != null && !existing.equals(tableDesc)) {
-                logger.info("Table {} already has a different version in target metadata store",
-                        tableDesc.getIdentity());
+                logger.info("Table {} already has a different version in target metadata store", tableDesc.getIdentity());
                 logger.info("Existing version: " + existing);
                 logger.info("New version: " + tableDesc);
 
                 if (!forceIngest && !overwriteTables) {
-                    throw new IllegalStateException(
-                            "table already exists with a different version: " + tableDesc.getIdentity()
-                                    + ". Consider adding -overwriteTables option to force overwriting (with caution)");
+                    throw new IllegalStateException("table already exists with a different version: " + tableDesc.getIdentity() + ". Consider adding -overwriteTables option to force overwriting (with caution)");
                 } else {
                     logger.warn("Overwriting the old table desc: " + tableDesc.getIdentity());
                 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
index 36feef8..08d4292 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
@@ -108,22 +108,12 @@ public class CubeMigrationCLI {
     }
 
     protected void usage() {
-        System.out.println(
-                "Usage: CubeMigrationCLI srcKylinConfigUri dstKylinConfigUri cubeName projectName copyAclOrNot purgeOrNot overwriteIfExists realExecute");
-        System.out.println(" srcKylinConfigUri: The KylinConfig of the cube’s source \n"
-                + "dstKylinConfigUri: The KylinConfig of the cube’s new home \n"
-                + "cubeName: the name of cube to be migrated. \n"
-                + "projectName: The target project in the target environment.(Make sure it exist) \n"
-                + "copyAclOrNot: true or false: whether copy cube ACL to target environment. \n"
-                + "purgeOrNot: true or false: whether purge the cube from src server after the migration. \n"
-                + "overwriteIfExists: overwrite cube if it already exists in the target environment. \n"
-                + "realExecute: if false, just print the operations to take, if true, do the real migration. \n");
+        System.out.println("Usage: CubeMigrationCLI srcKylinConfigUri dstKylinConfigUri cubeName projectName copyAclOrNot purgeOrNot overwriteIfExists realExecute");
+        System.out.println(" srcKylinConfigUri: The KylinConfig of the cube’s source \n" + "dstKylinConfigUri: The KylinConfig of the cube’s new home \n" + "cubeName: the name of cube to be migrated. \n" + "projectName: The target project in the target environment.(Make sure it exist) \n" + "copyAclOrNot: true or false: whether copy cube ACL to target environment. \n" + "purgeOrNot: true or false: whether purge the cube from src server after the migration. \n" + "overwriteIfExists: overwrite cube if it already exists in the target environment. \n" + "realExecute: if false, just print the operations to take, if true, do the real migration. \n");
 
     }
 
-    public void moveCube(KylinConfig srcCfg, KylinConfig dstCfg, String cubeName, String projectName, String copyAcl,
-            String purgeAndDisable, String overwriteIfExists, String realExecute)
-            throws IOException, InterruptedException {
+    public void moveCube(KylinConfig srcCfg, KylinConfig dstCfg, String cubeName, String projectName, String copyAcl, String purgeAndDisable, String overwriteIfExists, String realExecute) throws IOException, InterruptedException {
 
         srcConfig = srcCfg;
         srcStore = ResourceStore.getStore(srcConfig);
@@ -173,12 +163,9 @@ public class CubeMigrationCLI {
         }
     }
 
-    public void moveCube(String srcCfgUri, String dstCfgUri, String cubeName, String projectName, String copyAcl,
-            String purgeAndDisable, String overwriteIfExists, String realExecute)
-            throws IOException, InterruptedException {
+    public void moveCube(String srcCfgUri, String dstCfgUri, String cubeName, String projectName, String copyAcl, String purgeAndDisable, String overwriteIfExists, String realExecute) throws IOException, InterruptedException {
 
-        moveCube(KylinConfig.createInstanceFromUri(srcCfgUri), KylinConfig.createInstanceFromUri(dstCfgUri), cubeName,
-                projectName, copyAcl, purgeAndDisable, overwriteIfExists, realExecute);
+        moveCube(KylinConfig.createInstanceFromUri(srcCfgUri), KylinConfig.createInstanceFromUri(dstCfgUri), cubeName, projectName, copyAcl, purgeAndDisable, overwriteIfExists, realExecute);
     }
 
     public void checkMigrationSuccess(KylinConfig kylinConfig, String cubeName, Boolean ifFix) throws IOException {
@@ -213,14 +200,12 @@ public class CubeMigrationCLI {
         if (cube.getDescriptor().getEngineType() != IStorageAware.ID_SHARDED_HBASE)
             return;
         for (CubeSegment segment : cube.getSegments()) {
-            operations
-                    .add(new Opt(OptType.CHANGE_HTABLE_HOST, new Object[] { segment.getStorageLocationIdentifier() }));
+            operations.add(new Opt(OptType.CHANGE_HTABLE_HOST, new Object[] { segment.getStorageLocationIdentifier() }));
         }
     }
 
     private void copyACL(CubeInstance cube, String projectName) {
-        operations.add(new Opt(OptType.COPY_ACL,
-                new Object[] { cube.getUuid(), cube.getDescriptor().getModel().getUuid(), projectName }));
+        operations.add(new Opt(OptType.COPY_ACL, new Object[] { cube.getUuid(), cube.getDescriptor().getModel().getUuid(), projectName }));
     }
 
     private void copyFilesInMetaStore(CubeInstance cube, String overwriteIfExists) throws IOException {
@@ -230,8 +215,7 @@ public class CubeMigrationCLI {
         listCubeRelatedResources(cube, metaItems, dictAndSnapshot);
 
         if (dstStore.exists(cube.getResourcePath()) && !overwriteIfExists.equalsIgnoreCase("true"))
-            throw new IllegalStateException("The cube named " + cube.getName()
-                    + " already exists on target metadata store. Use overwriteIfExists to overwrite it");
+            throw new IllegalStateException("The cube named " + cube.getName() + " already exists on target metadata store. Use overwriteIfExists to overwrite it");
 
         for (String item : metaItems) {
             operations.add(new Opt(OptType.COPY_FILE_IN_META, new Object[] { item }));
@@ -242,8 +226,7 @@ public class CubeMigrationCLI {
         }
     }
 
-    private void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName)
-            throws IOException {
+    private void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
         String projectResPath = ProjectInstance.concatResourcePath(projectName);
         if (!dstStore.exists(projectResPath))
             throw new IllegalStateException("The target project " + projectName + " does not exist");
@@ -255,8 +238,7 @@ public class CubeMigrationCLI {
         operations.add(new Opt(OptType.PURGE_AND_DISABLE, new Object[] { cubeName }));
     }
 
-    protected void listCubeRelatedResources(CubeInstance cube, List<String> metaResource, Set<String> dictAndSnapshot)
-            throws IOException {
+    protected void listCubeRelatedResources(CubeInstance cube, List<String> metaResource, Set<String> dictAndSnapshot) throws IOException {
 
         CubeDesc cubeDesc = cube.getDescriptor();
         metaResource.add(cube.getResourcePath());
@@ -473,10 +455,8 @@ public class CubeMigrationCLI {
             Table srcAclHtable = null;
             Table destAclHtable = null;
             try {
-                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl())
-                        .getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl())
-                        .getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
 
                 // cube acl
                 Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -487,10 +467,8 @@ public class CubeMigrationCLI {
                         byte[] value = CellUtil.cloneValue(cell);
 
                         // use the target project uuid as the parent
-                        if (Bytes.toString(family).equals(ACL_INFO_FAMILY)
-                                && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
-                            String valueString = "{\"id\":\"" + projUUID
-                                    + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
+                        if (Bytes.toString(family).equals(ACL_INFO_FAMILY) && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
+                            String valueString = "{\"id\":\"" + projUUID + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
                             value = Bytes.toBytes(valueString);
                         }
                         Put put = new Put(Bytes.toBytes(cubeId));
@@ -565,8 +543,7 @@ public class CubeMigrationCLI {
             String modelId = (String) opt.params[1];
             Table destAclHtable = null;
             try {
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl())
-                        .getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
 
                 destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
                 destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCheckCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCheckCLI.java b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCheckCLI.java
index 040c702..54fbbc0 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCheckCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCheckCLI.java
@@ -54,14 +54,11 @@ public class CubeMigrationCheckCLI {
 
     private static final Logger logger = LoggerFactory.getLogger(CubeMigrationCheckCLI.class);
 
-    private static final Option OPTION_FIX = OptionBuilder.withArgName("fix").hasArg().isRequired(false)
-            .withDescription("Fix the inconsistent cube segments' HOST").create("fix");
+    private static final Option OPTION_FIX = OptionBuilder.withArgName("fix").hasArg().isRequired(false).withDescription("Fix the inconsistent cube segments' HOST").create("fix");
 
-    private static final Option OPTION_DST_CFG_URI = OptionBuilder.withArgName("dstCfgUri").hasArg().isRequired(false)
-            .withDescription("The KylinConfig of the cube’s new home").create("dstCfgUri");
+    private static final Option OPTION_DST_CFG_URI = OptionBuilder.withArgName("dstCfgUri").hasArg().isRequired(false).withDescription("The KylinConfig of the cube’s new home").create("dstCfgUri");
 
-    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false)
-            .withDescription("The name of cube migrated").create("cube");
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The name of cube migrated").create("cube");
 
     private KylinConfig dstCfg;
     private HBaseAdmin hbaseAdmin;
@@ -191,9 +188,7 @@ public class CubeMigrationCheckCLI {
             for (String segFullName : inconsistentHTables) {
                 String[] sepNameList = segFullName.split(",");
                 HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
-                logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1]
-                        + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to "
-                        + dstCfg.getMetadataUrlPrefix());
+                logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1] + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to " + dstCfg.getMetadataUrlPrefix());
                 hbaseAdmin.disableTable(sepNameList[0]);
                 desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix());
                 hbaseAdmin.modifyTable(sepNameList[0], desc);
@@ -213,8 +208,7 @@ public class CubeMigrationCheckCLI {
         logger.info("------ HTables exist issues in hbase : not existing, metadata broken ------");
         for (String segFullName : issueExistHTables) {
             String[] sepNameList = segFullName.split(",");
-            logger.error(sepNameList[0] + " belonging to cube " + sepNameList[1]
-                    + " has some issues and cannot be read successfully!!!");
+            logger.error(sepNameList[0] + " belonging to cube " + sepNameList[1] + " has some issues and cannot be read successfully!!!");
         }
         logger.info("----------------------------------------------------");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java b/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
index 4c34f47..e1f994f 100644
--- a/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
@@ -46,36 +46,25 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
     private static final int DEFAULT_PERIOD = 3;
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false)
-            .withDescription("Specify realizations in which project to extract").create("project");
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify realizations in which project to extract").create("project");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_CONF = OptionBuilder.withArgName("includeConf").hasArg()
-            .isRequired(false).withDescription("Specify whether to include conf files to extract. Default true.")
-            .create("includeConf");
+    private static final Option OPTION_INCLUDE_CONF = OptionBuilder.withArgName("includeConf").hasArg().isRequired(false).withDescription("Specify whether to include conf files to extract. Default true.").create("includeConf");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_HBASE = OptionBuilder.withArgName("includeHBase").hasArg()
-            .isRequired(false).withDescription("Specify whether to include hbase files to extract. Default true.")
-            .create("includeHBase");
+    private static final Option OPTION_INCLUDE_HBASE = OptionBuilder.withArgName("includeHBase").hasArg().isRequired(false).withDescription("Specify whether to include hbase files to extract. Default true.").create("includeHBase");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_CLIENT = OptionBuilder.withArgName("includeClient").hasArg()
-            .isRequired(false).withDescription("Specify whether to include client info to extract. Default true.")
-            .create("includeClient");
+    private static final Option OPTION_INCLUDE_CLIENT = OptionBuilder.withArgName("includeClient").hasArg().isRequired(false).withDescription("Specify whether to include client info to extract. Default true.").create("includeClient");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false)
-            .withDescription("Specify whether to include job info to extract. Default true.").create("includeJobs");
+    private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false).withDescription("Specify whether to include job info to extract. Default true.").create("includeJobs");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_THREADS = OptionBuilder.withArgName("threads").hasArg().isRequired(false)
-            .withDescription("Specify number of threads for parallel extraction.").create("threads");
+    private static final Option OPTION_THREADS = OptionBuilder.withArgName("threads").hasArg().isRequired(false).withDescription("Specify number of threads for parallel extraction.").create("threads");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PERIOD = OptionBuilder.withArgName("period").hasArg().isRequired(false)
-            .withDescription("specify how many days of kylin info to extract. Default " + DEFAULT_PERIOD + ".")
-            .create("period");
+    private static final Option OPTION_PERIOD = OptionBuilder.withArgName("period").hasArg().isRequired(false).withDescription("specify how many days of kylin info to extract. Default " + DEFAULT_PERIOD + ".").create("period");
 
     private static final int DEFAULT_PARALLEL_SIZE = 4;
 
@@ -119,19 +108,13 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
     @Override
     protected void executeExtract(final OptionsHelper optionsHelper, final File exportDir) throws IOException {
         final String projectInput = optionsHelper.getOptionValue(options.getOption("project"));
-        final boolean includeConf = optionsHelper.hasOption(OPTION_INCLUDE_CONF)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CONF)) : true;
-        final boolean includeHBase = optionsHelper.hasOption(OPTION_INCLUDE_HBASE)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_HBASE)) : true;
-        final boolean includeClient = optionsHelper.hasOption(OPTION_INCLUDE_CLIENT)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CLIENT)) : true;
-        final boolean includeJob = optionsHelper.hasOption(OPTION_INCLUDE_JOB)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : true;
-        final int threadsNum = optionsHelper.hasOption(OPTION_THREADS)
-                ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_THREADS)) : DEFAULT_PARALLEL_SIZE;
+        final boolean includeConf = optionsHelper.hasOption(OPTION_INCLUDE_CONF) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CONF)) : true;
+        final boolean includeHBase = optionsHelper.hasOption(OPTION_INCLUDE_HBASE) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_HBASE)) : true;
+        final boolean includeClient = optionsHelper.hasOption(OPTION_INCLUDE_CLIENT) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CLIENT)) : true;
+        final boolean includeJob = optionsHelper.hasOption(OPTION_INCLUDE_JOB) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : true;
+        final int threadsNum = optionsHelper.hasOption(OPTION_THREADS) ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_THREADS)) : DEFAULT_PARALLEL_SIZE;
         final String projectNames = StringUtils.join(getProjects(projectInput), ",");
-        final int period = optionsHelper.hasOption(OPTION_PERIOD)
-                ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_PERIOD)) : DEFAULT_PERIOD;
+        final int period = optionsHelper.hasOption(OPTION_PERIOD) ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_PERIOD)) : DEFAULT_PERIOD;
 
         logger.info("Start diagnosis info extraction in {} threads.", threadsNum);
         executorService = Executors.newFixedThreadPool(threadsNum);
@@ -142,8 +125,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
             public void run() {
                 logger.info("Start to extract metadata.");
                 try {
-                    String[] cubeMetaArgs = { "-destDir", new File(exportDir, "metadata").getAbsolutePath(), "-project",
-                            projectNames, "-compress", "false", "-includeJobs", "false", "-submodule", "true" };
+                    String[] cubeMetaArgs = { "-destDir", new File(exportDir, "metadata").getAbsolutePath(), "-project", projectNames, "-compress", "false", "-includeJobs", "false", "-submodule", "true" };
                     CubeMetaExtractor cubeMetaExtractor = new CubeMetaExtractor();
                     logger.info("CubeMetaExtractor args: " + Arrays.toString(cubeMetaArgs));
                     cubeMetaExtractor.execute(cubeMetaArgs);
@@ -160,8 +142,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
                 public void run() {
                     logger.info("Start to extract jobs.");
                     try {
-                        String[] jobArgs = { "-destDir", new File(exportDir, "jobs").getAbsolutePath(), "-period",
-                                Integer.toString(period), "-compress", "false", "-submodule", "true" };
+                        String[] jobArgs = { "-destDir", new File(exportDir, "jobs").getAbsolutePath(), "-period", Integer.toString(period), "-compress", "false", "-submodule", "true" };
                         JobInstanceExtractor jobInstanceExtractor = new JobInstanceExtractor();
                         jobInstanceExtractor.execute(jobArgs);
                     } catch (Exception e) {
@@ -178,8 +159,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
                 public void run() {
                     logger.info("Start to extract HBase usage.");
                     try {
-                        String[] hbaseArgs = { "-destDir", new File(exportDir, "hbase").getAbsolutePath(), "-project",
-                                projectNames, "-compress", "false", "-submodule", "true" };
+                        String[] hbaseArgs = { "-destDir", new File(exportDir, "hbase").getAbsolutePath(), "-project", projectNames, "-compress", "false", "-submodule", "true" };
                         HBaseUsageExtractor hBaseUsageExtractor = new HBaseUsageExtractor();
                         logger.info("HBaseUsageExtractor args: " + Arrays.toString(hbaseArgs));
                         hBaseUsageExtractor.execute(hbaseArgs);
@@ -200,8 +180,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
                         File destConfDir = new File(exportDir, "conf");
                         FileUtils.forceMkdir(destConfDir);
                         File srcConfDir = new File(ToolUtil.getConfFolder());
-                        Preconditions.checkState(srcConfDir.exists(),
-                                "Cannot find config dir: " + srcConfDir.getAbsolutePath());
+                        Preconditions.checkState(srcConfDir.exists(), "Cannot find config dir: " + srcConfDir.getAbsolutePath());
                         File[] confFiles = srcConfDir.listFiles();
                         if (confFiles != null) {
                             for (File confFile : confFiles) {
@@ -221,8 +200,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
                 @Override
                 public void run() {
                     try {
-                        String[] clientArgs = { "-destDir", new File(exportDir, "client").getAbsolutePath(),
-                                "-compress", "false", "-submodule", "true" };
+                        String[] clientArgs = { "-destDir", new File(exportDir, "client").getAbsolutePath(), "-compress", "false", "-submodule", "true" };
                         ClientEnvExtractor clientEnvExtractor = new ClientEnvExtractor();
                         logger.info("ClientEnvExtractor args: " + Arrays.toString(clientArgs));
                         clientEnvExtractor.execute(clientArgs);
@@ -239,8 +217,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
             public void run() {
                 logger.info("Start to extract logs.");
                 try {
-                    String[] logsArgs = { "-destDir", new File(exportDir, "logs").getAbsolutePath(), "-logPeriod",
-                            Integer.toString(period), "-compress", "false", "-submodule", "true" };
+                    String[] logsArgs = { "-destDir", new File(exportDir, "logs").getAbsolutePath(), "-logPeriod", Integer.toString(period), "-compress", "false", "-submodule", "true" };
                     KylinLogExtractor logExtractor = new KylinLogExtractor();
                     logger.info("KylinLogExtractor args: " + Arrays.toString(logsArgs));
                     logExtractor.execute(logsArgs);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
index 3df0cbf..f52fc3e 100644
--- a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
@@ -118,8 +118,7 @@ public class ExtendCubeToHybridCLI {
     }
 
     public void createFromCube(String projectName, String cubeName, String partitionDateStr) throws Exception {
-        logger.info("Create hybrid for cube[" + cubeName + "], project[" + projectName + "], partition_date["
-                + partitionDateStr + "].");
+        logger.info("Create hybrid for cube[" + cubeName + "], project[" + projectName + "], partition_date[" + partitionDateStr + "].");
 
         CubeInstance cubeInstance = cubeManager.getCube(cubeName);
         if (!validateCubeInstance(cubeInstance)) {
@@ -153,8 +152,7 @@ public class ExtendCubeToHybridCLI {
         CubeSegment currentSeg = null;
         while (segmentIterator.hasNext()) {
             currentSeg = segmentIterator.next();
-            if (partitionDateStr != null && (currentSeg.getDateRangeStart() >= partitionDate
-                    || currentSeg.getDateRangeEnd() > partitionDate)) {
+            if (partitionDateStr != null && (currentSeg.getDateRangeStart() >= partitionDate || currentSeg.getDateRangeEnd() > partitionDate)) {
                 segmentIterator.remove();
                 logger.info("CubeSegment[" + currentSeg + "] was removed.");
             }
@@ -197,11 +195,9 @@ public class ExtendCubeToHybridCLI {
         List<RealizationEntry> realizationEntries = Lists.newArrayListWithCapacity(2);
         realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, cubeInstance.getName()));
         realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, newCubeInstance.getName()));
-        HybridInstance hybridInstance = HybridInstance.create(kylinConfig, renameHybrid(cubeInstance.getName()),
-                realizationEntries);
+        HybridInstance hybridInstance = HybridInstance.create(kylinConfig, renameHybrid(cubeInstance.getName()), realizationEntries);
         store.putResource(hybridInstance.getResourcePath(), hybridInstance, HybridManager.HYBRID_SERIALIZER);
-        ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID,
-                hybridInstance.getName(), projectName, owner);
+        ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID, hybridInstance.getName(), projectName, owner);
         logger.info("HybridInstance was saved at: " + hybridInstance.getResourcePath());
 
         // copy Acl from old cube to new cube
@@ -238,8 +234,7 @@ public class ExtendCubeToHybridCLI {
         String projUUID = project.getUuid();
         Table aclHtable = null;
         try {
-            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl())
-                    .getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
+            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
 
             // cube acl
             Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -250,10 +245,8 @@ public class ExtendCubeToHybridCLI {
                     byte[] value = CellUtil.cloneValue(cell);
 
                     // use the target project uuid as the parent
-                    if (Bytes.toString(family).equals(ACL_INFO_FAMILY)
-                            && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
-                        String valueString = "{\"id\":\"" + projUUID
-                                + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
+                    if (Bytes.toString(family).equals(ACL_INFO_FAMILY) && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
+                        String valueString = "{\"id\":\"" + projUUID + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
                         value = Bytes.toBytes(valueString);
                     }
                     Put put = new Put(Bytes.toBytes(newCubeId));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java b/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
index 8d0da59..0d8c08f 100644
--- a/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
@@ -53,11 +53,9 @@ public class HBaseUsageExtractor extends AbstractInfoExtractor {
 
     private static final Logger logger = LoggerFactory.getLogger(HBaseUsageExtractor.class);
     @SuppressWarnings("static-access")
-    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false)
-            .withDescription("Specify which cube to extract").create("cube");
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("Specify which cube to extract").create("cube");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false)
-            .withDescription("Specify realizations in which project to extract").create("project");
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify realizations in which project to extract").create("project");
 
     private List<String> htables = Lists.newArrayList();
     private Configuration conf;
@@ -103,7 +101,7 @@ public class HBaseUsageExtractor extends AbstractInfoExtractor {
 
         if (optionsHelper.hasOption(OPTION_PROJECT)) {
             String projectNames = optionsHelper.getOptionValue(OPTION_PROJECT);
-            for (String projectName : projectNames.split(",")) {
+            for (String projectName: projectNames.split(",")) {
                 ProjectInstance projectInstance = projectManager.getProject(projectName);
                 if (projectInstance == null) {
                     throw new IllegalArgumentException("Project " + projectName + " does not exist");
@@ -190,11 +188,9 @@ public class HBaseUsageExtractor extends AbstractInfoExtractor {
             File hdfsDir = new File(dest, "hdfs");
             FileUtils.forceMkdir(hdfsDir);
             CliCommandExecutor cliCommandExecutor = kylinConfig.getCliCommandExecutor();
-            String output = cliCommandExecutor
-                    .execute("hadoop fs -ls -R " + conf.get("hbase.rootdir") + "/data/default/KYLIN_*").getSecond();
+            String output = cliCommandExecutor.execute("hadoop fs -ls -R " + conf.get("hbase.rootdir") + "/data/default/KYLIN_*").getSecond();
             FileUtils.writeStringToFile(new File(hdfsDir, "hdfs-files.list"), output, Charset.defaultCharset());
-            output = cliCommandExecutor
-                    .execute("hadoop fs -ls -R " + conf.get("hbase.rootdir") + "/data/default/kylin_*").getSecond();
+            output = cliCommandExecutor.execute("hadoop fs -ls -R " + conf.get("hbase.rootdir") + "/data/default/kylin_*").getSecond();
             FileUtils.writeStringToFile(new File(hdfsDir, "hdfs-files.list"), output, Charset.defaultCharset(), true);
         } catch (Exception e) {
             logger.warn("HBase hdfs status fetch failed: ", e);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java b/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
index 85dfa98..04dbef7 100644
--- a/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
@@ -46,30 +46,19 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
     private static final Logger logger = LoggerFactory.getLogger(JobDiagnosisInfoCLI.class);
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_JOB_ID = OptionBuilder.withArgName("jobId").hasArg().isRequired(true)
-            .withDescription("specify the Job ID to extract information. ").create("jobId");
+    private static final Option OPTION_JOB_ID = OptionBuilder.withArgName("jobId").hasArg().isRequired(true).withDescription("specify the Job ID to extract information. ").create("jobId");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_CUBE = OptionBuilder.withArgName("includeCube").hasArg()
-            .isRequired(false)
-            .withDescription("set this to true if want to extract related cube info too. Default true")
-            .create("includeCube");
+    private static final Option OPTION_INCLUDE_CUBE = OptionBuilder.withArgName("includeCube").hasArg().isRequired(false).withDescription("set this to true if want to extract related cube info too. Default true").create("includeCube");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_YARN_LOGS = OptionBuilder.withArgName("includeYarnLogs").hasArg()
-            .isRequired(false)
-            .withDescription("set this to true if want to extract related yarn logs too. Default true")
-            .create("includeYarnLogs");
+    private static final Option OPTION_INCLUDE_YARN_LOGS = OptionBuilder.withArgName("includeYarnLogs").hasArg().isRequired(false).withDescription("set this to true if want to extract related yarn logs too. Default true").create("includeYarnLogs");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_CLIENT = OptionBuilder.withArgName("includeClient").hasArg()
-            .isRequired(false).withDescription("Specify whether to include client info to extract. Default true.")
-            .create("includeClient");
+    private static final Option OPTION_INCLUDE_CLIENT = OptionBuilder.withArgName("includeClient").hasArg().isRequired(false).withDescription("Specify whether to include client info to extract. Default true.").create("includeClient");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_CONF = OptionBuilder.withArgName("includeConf").hasArg()
-            .isRequired(false).withDescription("Specify whether to include conf files to extract. Default true.")
-            .create("includeConf");
+    private static final Option OPTION_INCLUDE_CONF = OptionBuilder.withArgName("includeConf").hasArg().isRequired(false).withDescription("Specify whether to include conf files to extract. Default true.").create("includeConf");
 
     List<String> requiredResources = Lists.newArrayList();
     List<String> yarnLogsResources = Lists.newArrayList();
@@ -99,14 +88,10 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
     @Override
     protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
         String kylinJobId = optionsHelper.getOptionValue(OPTION_JOB_ID);
-        boolean includeCube = optionsHelper.hasOption(OPTION_INCLUDE_CUBE)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CUBE)) : true;
-        boolean includeYarnLogs = optionsHelper.hasOption(OPTION_INCLUDE_YARN_LOGS)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_YARN_LOGS)) : true;
-        boolean includeClient = optionsHelper.hasOption(OPTION_INCLUDE_CLIENT)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CLIENT)) : true;
-        boolean includeConf = optionsHelper.hasOption(OPTION_INCLUDE_CONF)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CONF)) : true;
+        boolean includeCube = optionsHelper.hasOption(OPTION_INCLUDE_CUBE) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CUBE)) : true;
+        boolean includeYarnLogs = optionsHelper.hasOption(OPTION_INCLUDE_YARN_LOGS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_YARN_LOGS)) : true;
+        boolean includeClient = optionsHelper.hasOption(OPTION_INCLUDE_CLIENT) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CLIENT)) : true;
+        boolean includeConf = optionsHelper.hasOption(OPTION_INCLUDE_CONF) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CONF)) : true;
 
         // dump job output
         logger.info("Start to dump job output");
@@ -128,8 +113,7 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
             if (!StringUtils.isEmpty(cubeName)) {
                 File metaDir = new File(exportDir, "cube");
                 FileUtils.forceMkdir(metaDir);
-                String[] cubeMetaArgs = { "-cube", cubeName, "-destDir", new File(metaDir, cubeName).getAbsolutePath(),
-                        "-includeJobs", "false", "-compress", "false", "-submodule", "true" };
+                String[] cubeMetaArgs = { "-cube", cubeName, "-destDir", new File(metaDir, cubeName).getAbsolutePath(), "-includeJobs", "false", "-compress", "false", "-submodule", "true" };
                 logger.info("Start to extract related cube: " + StringUtils.join(cubeMetaArgs));
                 CubeMetaExtractor cubeMetaExtractor = new CubeMetaExtractor();
                 logger.info("CubeMetaExtractor args: " + Arrays.toString(cubeMetaArgs));
@@ -149,8 +133,7 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
         }
 
         if (includeClient) {
-            String[] clientArgs = { "-destDir", new File(exportDir, "client").getAbsolutePath(), "-compress", "false",
-                    "-submodule", "true" };
+            String[] clientArgs = { "-destDir", new File(exportDir, "client").getAbsolutePath(), "-compress", "false", "-submodule", "true" };
             ClientEnvExtractor clientEnvExtractor = new ClientEnvExtractor();
             logger.info("ClientEnvExtractor args: " + Arrays.toString(clientArgs));
             clientEnvExtractor.execute(clientArgs);
@@ -167,8 +150,7 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
         }
 
         // export kylin logs
-        String[] logsArgs = { "-destDir", new File(exportDir, "logs").getAbsolutePath(), "-compress", "false",
-                "-submodule", "true" };
+        String[] logsArgs = { "-destDir", new File(exportDir, "logs").getAbsolutePath(), "-compress", "false", "-submodule", "true" };
         KylinLogExtractor logExtractor = new KylinLogExtractor();
         logger.info("KylinLogExtractor args: " + Arrays.toString(logsArgs));
         logExtractor.execute(logsArgs);
@@ -212,8 +194,7 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
         if (jobInfo.containsKey(ExecutableConstants.MR_JOB_ID)) {
             String mrJobId = jobInfo.get(ExecutableConstants.MR_JOB_ID);
             FileUtils.forceMkdir(destDir);
-            String[] mrJobArgs = { "-mrJobId", mrJobId, "-destDir", destDir.getAbsolutePath(), "-compress", "false",
-                    "-submodule", "true" };
+            String[] mrJobArgs = { "-mrJobId", mrJobId, "-destDir", destDir.getAbsolutePath(), "-compress", "false", "-submodule", "true" };
             new MrJobInfoExtractor().execute(mrJobArgs);
         }
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java b/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
index 70aaac2..068dbda 100644
--- a/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
@@ -55,15 +55,11 @@ public class JobInstanceExtractor extends AbstractInfoExtractor {
     private static final int DEFAULT_PERIOD = 3;
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false)
-            .withDescription("Specify jobs in which project to extract").create("project");
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify jobs in which project to extract").create("project");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false)
-            .withDescription("Specify jobs related to which cube to extract").create("cube");
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("Specify jobs related to which cube to extract").create("cube");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PERIOD = OptionBuilder.withArgName("period").hasArg().isRequired(false)
-            .withDescription("specify how many days of kylin jobs to extract. Default " + DEFAULT_PERIOD + ".")
-            .create("period");
+    private static final Option OPTION_PERIOD = OptionBuilder.withArgName("period").hasArg().isRequired(false).withDescription("specify how many days of kylin jobs to extract. Default " + DEFAULT_PERIOD + ".").create("period");
 
     KylinConfig config;
     ProjectManager projectManager;
@@ -85,8 +81,7 @@ public class JobInstanceExtractor extends AbstractInfoExtractor {
     protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
         String cube = optionsHelper.hasOption(OPTION_CUBE) ? optionsHelper.getOptionValue(OPTION_CUBE) : null;
         String project = optionsHelper.hasOption(OPTION_PROJECT) ? optionsHelper.getOptionValue(OPTION_PROJECT) : null;
-        int period = optionsHelper.hasOption(OPTION_PERIOD)
-                ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_PERIOD)) : DEFAULT_PERIOD;
+        int period = optionsHelper.hasOption(OPTION_PERIOD) ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_PERIOD)) : DEFAULT_PERIOD;
 
         long endTime = System.currentTimeMillis();
         long startTime = endTime - period * 24 * 3600 * 1000; // time in Millis
@@ -112,8 +107,7 @@ public class JobInstanceExtractor extends AbstractInfoExtractor {
                         shouldExtract = true;
                     } else {
                         ProjectInstance projectInstance = projectManager.getProject(project);
-                        if (projectInstance != null
-                                && projectInstance.containsRealization(RealizationType.CUBE, cubeName)) {
+                        if (projectInstance != null && projectInstance.containsRealization(RealizationType.CUBE, cubeName)) {
                             shouldExtract = true;
                         }
                     }
@@ -141,8 +135,7 @@ public class JobInstanceExtractor extends AbstractInfoExtractor {
         result.setMrWaiting(AbstractExecutable.getExtraInfoAsLong(output, CubingJob.MAP_REDUCE_WAIT_TIME, 0L) / 1000);
         result.setExecStartTime(AbstractExecutable.getStartTime(output));
         result.setExecEndTime(AbstractExecutable.getEndTime(output));
-        result.setDuration(AbstractExecutable.getDuration(AbstractExecutable.getStartTime(output),
-                AbstractExecutable.getEndTime(output)) / 1000);
+        result.setDuration(AbstractExecutable.getDuration(AbstractExecutable.getStartTime(output), AbstractExecutable.getEndTime(output)) / 1000);
         for (int i = 0; i < cubeJob.getTasks().size(); ++i) {
             AbstractExecutable task = cubeJob.getTasks().get(i);
             result.addStep(parseToJobStep(task, i, outputs.get(task.getId())));
@@ -187,9 +180,7 @@ public class JobInstanceExtractor extends AbstractInfoExtractor {
         }
         if (task instanceof MapReduceExecutable) {
             result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams());
-            result.setExecWaitTime(
-                    AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L)
-                            / 1000);
+            result.setExecWaitTime(AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L) / 1000);
         }
         if (task instanceof HadoopShellExecutable) {
             result.setExecCmd(((HadoopShellExecutable) task).getJobParams());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java b/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
index 118eb66..6555c4d 100644
--- a/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
@@ -43,9 +43,7 @@ public class KylinLogExtractor extends AbstractInfoExtractor {
     private static final int DEFAULT_LOG_PERIOD = 3;
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_LOG_PERIOD = OptionBuilder.withArgName("logPeriod").hasArg().isRequired(false)
-            .withDescription("specify how many days of kylin logs to extract. Default " + DEFAULT_LOG_PERIOD + ".")
-            .create("logPeriod");
+    private static final Option OPTION_LOG_PERIOD = OptionBuilder.withArgName("logPeriod").hasArg().isRequired(false).withDescription("specify how many days of kylin logs to extract. Default " + DEFAULT_LOG_PERIOD + ".").create("logPeriod");
 
     KylinConfig config;
 
@@ -76,8 +74,7 @@ public class KylinLogExtractor extends AbstractInfoExtractor {
     protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
         beforeExtract();
 
-        int logPeriod = optionsHelper.hasOption(OPTION_LOG_PERIOD)
-                ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_LOG_PERIOD)) : DEFAULT_LOG_PERIOD;
+        int logPeriod = optionsHelper.hasOption(OPTION_LOG_PERIOD) ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_LOG_PERIOD)) : DEFAULT_LOG_PERIOD;
 
         if (logPeriod < 1) {
             logger.warn("No logs to extract.");


[29/67] [abbrv] kylin git commit: minor, unify log4j conversion patterns

Posted by li...@apache.org.
minor, unify log4j conversion patterns


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/8edff35a
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/8edff35a
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/8edff35a

Branch: refs/heads/master
Commit: 8edff35a2229d2541c1d7549bc00bd08e4a59205
Parents: 1958584
Author: lidongsjtu <li...@apache.org>
Authored: Sat May 27 11:22:40 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Sat May 27 11:24:39 2017 +0800

----------------------------------------------------------------------
 build/conf/kylin-tools-log4j.properties | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/8edff35a/build/conf/kylin-tools-log4j.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin-tools-log4j.properties b/build/conf/kylin-tools-log4j.properties
index d47f9a2..d4d32bf 100644
--- a/build/conf/kylin-tools-log4j.properties
+++ b/build/conf/kylin-tools-log4j.properties
@@ -30,7 +30,7 @@ log4j.rootLogger=INFO,stderr
 log4j.appender.stderr=org.apache.log4j.ConsoleAppender
 log4j.appender.stderr.Target=System.err
 log4j.appender.stderr.layout=org.apache.log4j.PatternLayout
-log4j.appender.stderr.layout.ConversionPattern=%d{ISO8601} %-5p [%t %c{1}:%L]: %m%n
+log4j.appender.stderr.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}:%L : %m%n
 
 #log4j.logger.org.apache.hadoop=ERROR
 log4j.logger.org.apache.kylin=DEBUG


[16/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java
index 2c502bb..3eba78b 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java
@@ -97,12 +97,10 @@ public class TopNCounterTest {
     //@Test
     public void testSingleSpaceSaving() throws IOException {
         String dataFile = prepareTestDate();
-        TopNCounterTest.SpaceSavingConsumer spaceSavingCounter = new TopNCounterTest.SpaceSavingConsumer(
-                TOP_K * SPACE_SAVING_ROOM);
+        TopNCounterTest.SpaceSavingConsumer spaceSavingCounter = new TopNCounterTest.SpaceSavingConsumer(TOP_K * SPACE_SAVING_ROOM);
         TopNCounterTest.HashMapConsumer accurateCounter = new TopNCounterTest.HashMapConsumer();
 
-        for (TopNCounterTest.TestDataConsumer consumer : new TopNCounterTest.TestDataConsumer[] { spaceSavingCounter,
-                accurateCounter }) {
+        for (TopNCounterTest.TestDataConsumer consumer : new TopNCounterTest.TestDataConsumer[] { spaceSavingCounter, accurateCounter }) {
             feedDataToConsumer(dataFile, consumer, 0, TOTAL_RECORDS);
         }
 
@@ -111,8 +109,7 @@ public class TopNCounterTest {
         compareResult(spaceSavingCounter, accurateCounter);
     }
 
-    private void compareResult(TopNCounterTest.TestDataConsumer firstConsumer,
-            TopNCounterTest.TestDataConsumer secondConsumer) {
+    private void compareResult(TopNCounterTest.TestDataConsumer firstConsumer, TopNCounterTest.TestDataConsumer secondConsumer) {
         List<Pair<String, Double>> topResult1 = firstConsumer.getTopN(TOP_K);
         outputMsg("Get topN, Space saving takes " + firstConsumer.getSpentTime() / 1000 + " seconds");
         List<Pair<String, Double>> realSequence = secondConsumer.getTopN(TOP_K);
@@ -126,10 +123,8 @@ public class TopNCounterTest {
                 //            if (topResult1.get(i).getFirst().equals(realSequence.get(i).getFirst()) && topResult1.get(i).getSecond().doubleValue() == realSequence.get(i).getSecond().doubleValue()) {
                 outputMsg("Passed; key:" + topResult1.get(i).getFirst() + ", value:" + topResult1.get(i).getSecond());
             } else {
-                outputMsg("Failed; space saving key:" + topResult1.get(i).getFirst() + ", value:"
-                        + topResult1.get(i).getSecond());
-                outputMsg("Failed; correct key:" + realSequence.get(i).getFirst() + ", value:"
-                        + realSequence.get(i).getSecond());
+                outputMsg("Failed; space saving key:" + topResult1.get(i).getFirst() + ", value:" + topResult1.get(i).getSecond());
+                outputMsg("Failed; correct key:" + realSequence.get(i).getFirst() + ", value:" + realSequence.get(i).getSecond());
                 error++;
             }
         }
@@ -172,8 +167,7 @@ public class TopNCounterTest {
 
     }
 
-    private TopNCounterTest.SpaceSavingConsumer[] singleMerge(TopNCounterTest.SpaceSavingConsumer[] consumers)
-            throws IOException, ClassNotFoundException {
+    private TopNCounterTest.SpaceSavingConsumer[] singleMerge(TopNCounterTest.SpaceSavingConsumer[] consumers) throws IOException, ClassNotFoundException {
         List<TopNCounterTest.SpaceSavingConsumer> list = Lists.newArrayList();
         if (consumers.length == 1)
             return consumers;
@@ -189,8 +183,7 @@ public class TopNCounterTest {
 
     }
 
-    private TopNCounterTest.SpaceSavingConsumer[] binaryMerge(TopNCounterTest.SpaceSavingConsumer[] consumers)
-            throws IOException, ClassNotFoundException {
+    private TopNCounterTest.SpaceSavingConsumer[] binaryMerge(TopNCounterTest.SpaceSavingConsumer[] consumers) throws IOException, ClassNotFoundException {
         List<TopNCounterTest.SpaceSavingConsumer> list = Lists.newArrayList();
         if (consumers.length == 1)
             return consumers;
@@ -206,8 +199,7 @@ public class TopNCounterTest {
         return binaryMerge(list.toArray(new TopNCounterTest.SpaceSavingConsumer[list.size()]));
     }
 
-    private void feedDataToConsumer(String dataFile, TopNCounterTest.TestDataConsumer consumer, int startLine,
-            int endLine) throws IOException {
+    private void feedDataToConsumer(String dataFile, TopNCounterTest.TestDataConsumer consumer, int startLine, int endLine) throws IOException {
         long startTime = System.currentTimeMillis();
         BufferedReader bufferedReader = new BufferedReader(new FileReader(dataFile));
 
@@ -222,8 +214,7 @@ public class TopNCounterTest {
         }
 
         bufferedReader.close();
-        outputMsg("feed data to " + consumer.getClass().getCanonicalName() + " take time (seconds): "
-                + (System.currentTimeMillis() - startTime) / 1000);
+        outputMsg("feed data to " + consumer.getClass().getCanonicalName() + " take time (seconds): " + (System.currentTimeMillis() - startTime) / 1000);
     }
 
     private void outputMsg(String msg) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManagerTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManagerTest.java b/core-metadata/src/test/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManagerTest.java
index 7fbef13..690e1fe 100644
--- a/core-metadata/src/test/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManagerTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/metadata/badquery/BadQueryHistoryManagerTest.java
@@ -44,8 +44,7 @@ public class BadQueryHistoryManagerTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testBasics() throws Exception {
-        BadQueryHistory history = BadQueryHistoryManager.getInstance(getTestConfig())
-                .getBadQueriesForProject("default");
+        BadQueryHistory history = BadQueryHistoryManager.getInstance(getTestConfig()).getBadQueriesForProject("default");
         System.out.println(JsonUtil.writeValueAsIndentString(history));
 
         NavigableSet<BadQueryEntry> entries = history.getEntries();
@@ -65,8 +64,7 @@ public class BadQueryHistoryManagerTest extends LocalFileMetadataTestCase {
     public void testAddEntryToProject() throws IOException {
         KylinConfig kylinConfig = getTestConfig();
         BadQueryHistoryManager manager = BadQueryHistoryManager.getInstance(kylinConfig);
-        BadQueryHistory history = manager.addEntryToProject("sql", 1459362239992L, "adj", 100, "server", "t-0", "user",
-                "default");
+        BadQueryHistory history = manager.addEntryToProject("sql", 1459362239992L, "adj", 100, "server", "t-0", "user", "default");
         NavigableSet<BadQueryEntry> entries = history.getEntries();
         assertEquals(3, entries.size());
 
@@ -81,8 +79,7 @@ public class BadQueryHistoryManagerTest extends LocalFileMetadataTestCase {
         assertEquals("t-0", newEntry.getThread());
 
         for (int i = 0; i < kylinConfig.getBadQueryHistoryNum(); i++) {
-            history = manager.addEntryToProject("sql", 1459362239993L + i, "adj", 100 + i, "server", "t-0", "user",
-                    "default");
+            history = manager.addEntryToProject("sql", 1459362239993L + i, "adj", 100 + i, "server", "t-0", "user", "default");
         }
         assertEquals(kylinConfig.getBadQueryHistoryNum(), history.getEntries().size());
     }
@@ -93,12 +90,10 @@ public class BadQueryHistoryManagerTest extends LocalFileMetadataTestCase {
         BadQueryHistoryManager manager = BadQueryHistoryManager.getInstance(kylinConfig);
 
         manager.addEntryToProject("sql", 1459362239000L, "adj", 100, "server", "t-0", "user", "default");
-        BadQueryHistory history = manager.updateEntryToProject("sql", 1459362239000L, "adj2", 120, "server2", "t-1",
-                "user", "default");
+        BadQueryHistory history = manager.updateEntryToProject("sql", 1459362239000L, "adj2", 120, "server2", "t-1", "user", "default");
 
         NavigableSet<BadQueryEntry> entries = history.getEntries();
-        BadQueryEntry newEntry = entries
-                .floor(new BadQueryEntry("sql", "adj2", 1459362239000L, 120, "server2", "t-1", "user"));
+        BadQueryEntry newEntry = entries.floor(new BadQueryEntry("sql", "adj2", 1459362239000L, 120, "server2", "t-1", "user"));
         System.out.println(newEntry);
         assertEquals("adj2", newEntry.getAdj());
         assertEquals("server2", newEntry.getServer());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/metadata/model/DataModelDescTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/metadata/model/DataModelDescTest.java b/core-metadata/src/test/java/org/apache/kylin/metadata/model/DataModelDescTest.java
index a207975..f6d6fc3 100644
--- a/core-metadata/src/test/java/org/apache/kylin/metadata/model/DataModelDescTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/metadata/model/DataModelDescTest.java
@@ -57,8 +57,7 @@ public class DataModelDescTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testNoDupColInDimAndMeasure() {
-        DataModelDesc model = MetadataManager.getInstance(getTestConfig())
-                .getDataModelDesc("test_kylin_inner_join_model_desc");
+        DataModelDesc model = MetadataManager.getInstance(getTestConfig()).getDataModelDesc("test_kylin_inner_join_model_desc");
         String[] metrics = model.getMetrics();
         TblColRef col = model.findColumn("edw.test_cal_dt.cal_dt");
         assertTrue(metrics.length == 2);
@@ -67,8 +66,7 @@ public class DataModelDescTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testGetCopyOf() throws JsonProcessingException {
-        DataModelDesc desc = MetadataManager.getInstance(getTestConfig())
-                .getDataModelDesc("test_kylin_inner_join_model_desc");
+        DataModelDesc desc = MetadataManager.getInstance(getTestConfig()).getDataModelDesc("test_kylin_inner_join_model_desc");
         DataModelDesc copyDesc = DataModelDesc.getCopyOf(desc);
 
         // uuid is different, set to equals for json comparison
@@ -83,8 +81,7 @@ public class DataModelDescTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testPartitionDescCopyOf() throws JsonProcessingException {
-        PartitionDesc desc = MetadataManager.getInstance(getTestConfig())
-                .getDataModelDesc("test_kylin_inner_join_model_desc").partitionDesc;
+        PartitionDesc desc = MetadataManager.getInstance(getTestConfig()).getDataModelDesc("test_kylin_inner_join_model_desc").partitionDesc;
         PartitionDesc copyDesc = PartitionDesc.getCopyOf(desc);
 
         String descStr = JsonUtil.writeValueAsIndentString(desc);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/metadata/model/DefaultPartitionConditionBuilderTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/metadata/model/DefaultPartitionConditionBuilderTest.java b/core-metadata/src/test/java/org/apache/kylin/metadata/model/DefaultPartitionConditionBuilderTest.java
index ec41c6f..1b610ca 100644
--- a/core-metadata/src/test/java/org/apache/kylin/metadata/model/DefaultPartitionConditionBuilderTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/metadata/model/DefaultPartitionConditionBuilderTest.java
@@ -38,10 +38,8 @@ public class DefaultPartitionConditionBuilderTest {
         partitionDesc.setPartitionDateColumnRef(col);
         partitionDesc.setPartitionDateColumn(col.getCanonicalName());
         partitionDesc.setPartitionDateFormat("yyyy-MM-dd");
-        String condition = partitionConditionBuilder.buildDateRangeCondition(partitionDesc,
-                DateFormat.stringToMillis("2016-02-22"), DateFormat.stringToMillis("2016-02-23"));
-        Assert.assertEquals("UNKNOWN_ALIAS.DATE_COLUMN >= '2016-02-22' AND UNKNOWN_ALIAS.DATE_COLUMN < '2016-02-23'",
-                condition);
+        String condition = partitionConditionBuilder.buildDateRangeCondition(partitionDesc, DateFormat.stringToMillis("2016-02-22"), DateFormat.stringToMillis("2016-02-23"));
+        Assert.assertEquals("UNKNOWN_ALIAS.DATE_COLUMN >= '2016-02-22' AND UNKNOWN_ALIAS.DATE_COLUMN < '2016-02-23'", condition);
     }
 
     @Test
@@ -51,8 +49,7 @@ public class DefaultPartitionConditionBuilderTest {
         partitionDesc.setPartitionTimeColumnRef(col);
         partitionDesc.setPartitionTimeColumn(col.getCanonicalName());
         partitionDesc.setPartitionTimeFormat("HH");
-        String condition = partitionConditionBuilder.buildDateRangeCondition(partitionDesc,
-                DateFormat.stringToMillis("2016-02-22 00:00:00"), DateFormat.stringToMillis("2016-02-23 01:00:00"));
+        String condition = partitionConditionBuilder.buildDateRangeCondition(partitionDesc, DateFormat.stringToMillis("2016-02-22 00:00:00"), DateFormat.stringToMillis("2016-02-23 01:00:00"));
         Assert.assertEquals("UNKNOWN_ALIAS.HOUR_COLUMN >= '00' AND UNKNOWN_ALIAS.HOUR_COLUMN < '01'", condition);
     }
 
@@ -67,11 +64,8 @@ public class DefaultPartitionConditionBuilderTest {
         partitionDesc.setPartitionTimeColumnRef(col2);
         partitionDesc.setPartitionTimeColumn(col2.getCanonicalName());
         partitionDesc.setPartitionTimeFormat("H");
-        String condition = partitionConditionBuilder.buildDateRangeCondition(partitionDesc,
-                DateFormat.stringToMillis("2016-02-22 00:00:00"), DateFormat.stringToMillis("2016-02-23 01:00:00"));
-        Assert.assertEquals(
-                "((UNKNOWN_ALIAS.DATE_COLUMN = '2016-02-22' AND UNKNOWN_ALIAS.HOUR_COLUMN >= '0') OR (UNKNOWN_ALIAS.DATE_COLUMN > '2016-02-22')) AND ((UNKNOWN_ALIAS.DATE_COLUMN = '2016-02-23' AND UNKNOWN_ALIAS.HOUR_COLUMN < '1') OR (UNKNOWN_ALIAS.DATE_COLUMN < '2016-02-23'))",
-                condition);
+        String condition = partitionConditionBuilder.buildDateRangeCondition(partitionDesc, DateFormat.stringToMillis("2016-02-22 00:00:00"), DateFormat.stringToMillis("2016-02-23 01:00:00"));
+        Assert.assertEquals("((UNKNOWN_ALIAS.DATE_COLUMN = '2016-02-22' AND UNKNOWN_ALIAS.HOUR_COLUMN >= '0') OR (UNKNOWN_ALIAS.DATE_COLUMN > '2016-02-22')) AND ((UNKNOWN_ALIAS.DATE_COLUMN = '2016-02-23' AND UNKNOWN_ALIAS.HOUR_COLUMN < '1') OR (UNKNOWN_ALIAS.DATE_COLUMN < '2016-02-23'))", condition);
     }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/metadata/model/JoinsTreeTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/metadata/model/JoinsTreeTest.java b/core-metadata/src/test/java/org/apache/kylin/metadata/model/JoinsTreeTest.java
index a600415..d5e9de7 100644
--- a/core-metadata/src/test/java/org/apache/kylin/metadata/model/JoinsTreeTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/metadata/model/JoinsTreeTest.java
@@ -50,7 +50,7 @@ public class JoinsTreeTest extends LocalFileMetadataTestCase {
         MetadataManager mgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv());
         DataModelDesc model = mgr.getDataModelDesc("ci_left_join_model");
         JoinsTree joinsTree = model.getJoinsTree();
-
+        
         Chain chain = joinsTree.tableChains.get("BUYER_COUNTRY");
         assertTrue(chain.table == model.findTable("BUYER_COUNTRY"));
         assertTrue(chain.fkSide.table == model.findTable("BUYER_ACCOUNT"));
@@ -59,7 +59,7 @@ public class JoinsTreeTest extends LocalFileMetadataTestCase {
         assertTrue(chain.fkSide.fkSide.fkSide.join == null);
         assertTrue(chain.fkSide.fkSide.fkSide.fkSide == null);
     }
-
+    
     @Test
     public void testMatch() {
         MetadataManager mgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/test/java/org/apache/kylin/source/datagen/DataGenTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/source/datagen/DataGenTest.java b/core-metadata/src/test/java/org/apache/kylin/source/datagen/DataGenTest.java
index 7d25b34..5e3ca9f 100644
--- a/core-metadata/src/test/java/org/apache/kylin/source/datagen/DataGenTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/source/datagen/DataGenTest.java
@@ -45,7 +45,7 @@ public class DataGenTest extends LocalFileMetadataTestCase {
         DataModelDesc model = getModel("ci_inner_join_model");
         ModelDataGenerator gen = new ModelDataGenerator(model, 100);
         gen.outprint = true;
-
+        
         gen.generate();
     }
 
@@ -54,7 +54,7 @@ public class DataGenTest extends LocalFileMetadataTestCase {
         DataModelDesc model = getModel("ssb");
         ModelDataGenerator gen = new ModelDataGenerator(model, 100);
         gen.outprint = true;
-
+        
         gen.generate();
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java b/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
index 2ffb3bb..78cf97c 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
@@ -68,8 +68,7 @@ public class StorageContext {
     //the limit here correspond to the limit concept in SQL
     //also take into consideration Statement.setMaxRows in JDBC
     private int getLimit() {
-        if (overlookOuterLimit || BackdoorToggles.getStatementMaxRows() == null
-                || BackdoorToggles.getStatementMaxRows() == 0) {
+        if (overlookOuterLimit || BackdoorToggles.getStatementMaxRows() == null || BackdoorToggles.getStatementMaxRows() == 0) {
             return limit;
         } else {
             return Math.min(limit, BackdoorToggles.getStatementMaxRows());
@@ -78,8 +77,7 @@ public class StorageContext {
 
     public void setLimit(int l) {
         if (limit != Integer.MAX_VALUE) {
-            logger.warn("Setting limit to {} but in current olap context, the limit is already {}, won't apply", l,
-                    limit);
+            logger.warn("Setting limit to {} but in current olap context, the limit is already {}, won't apply", l, limit);
         } else {
             limit = l;
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java b/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java
index 73fde2b..7e811a4 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java
@@ -18,13 +18,13 @@
 
 package org.apache.kylin.storage.adhoc;
 
-import java.util.List;
-
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.List;
+
 public abstract class AdHocRunnerBase {
 
     private static final Logger logger = LoggerFactory.getLogger(AdHocRunnerBase.class);
@@ -44,6 +44,5 @@ public abstract class AdHocRunnerBase {
 
     public abstract void init();
 
-    public abstract void executeQuery(String query, List<List<String>> results, List<SelectedColumnMeta> columnMetas)
-            throws Exception;
-}
+    public abstract void executeQuery(String query, List<List<String>> results, List<SelectedColumnMeta> columnMetas) throws Exception;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java
index 84c431d..cecea85 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java
@@ -72,8 +72,7 @@ public class CubeScanRangePlanner extends ScanRangePlannerBase {
 
     protected StorageContext context;
 
-    public CubeScanRangePlanner(CubeSegment cubeSegment, Cuboid cuboid, TupleFilter filter, Set<TblColRef> dimensions,
-            Set<TblColRef> groupByDims, //
+    public CubeScanRangePlanner(CubeSegment cubeSegment, Cuboid cuboid, TupleFilter filter, Set<TblColRef> dimensions, Set<TblColRef> groupByDims, //
             Collection<FunctionDesc> metrics, TupleFilter havingFilter, StorageContext context) {
         this.context = context;
 
@@ -100,13 +99,11 @@ public class CubeScanRangePlanner extends ScanRangePlannerBase {
 
         //replace the constant values in filter to dictionary codes
         Set<TblColRef> groupByPushDown = Sets.newHashSet(groupByDims);
-        this.gtFilter = GTUtil.convertFilterColumnsAndConstants(filter, gtInfo, mapping.getCuboidDimensionsInGTOrder(),
-                groupByPushDown);
+        this.gtFilter = GTUtil.convertFilterColumnsAndConstants(filter, gtInfo, mapping.getCuboidDimensionsInGTOrder(), groupByPushDown);
         this.havingFilter = havingFilter;
 
         this.gtDimensions = mapping.makeGridTableColumns(dimensions);
-        this.gtAggrGroups = mapping
-                .makeGridTableColumns(replaceDerivedColumns(groupByPushDown, cubeSegment.getCubeDesc()));
+        this.gtAggrGroups = mapping.makeGridTableColumns(replaceDerivedColumns(groupByPushDown, cubeSegment.getCubeDesc()));
         this.gtAggrMetrics = mapping.makeGridTableColumns(metrics);
         this.gtAggrFuncs = mapping.makeAggrFuncs(metrics);
 
@@ -124,8 +121,7 @@ public class CubeScanRangePlanner extends ScanRangePlannerBase {
     /**
      * Construct  GTScanRangePlanner with incomplete information. For UT only.
      */
-    public CubeScanRangePlanner(GTInfo info, Pair<ByteArray, ByteArray> gtStartAndEnd, TblColRef gtPartitionCol,
-            TupleFilter gtFilter) {
+    public CubeScanRangePlanner(GTInfo info, Pair<ByteArray, ByteArray> gtStartAndEnd, TblColRef gtPartitionCol, TupleFilter gtFilter) {
 
         this.maxScanRanges = KylinConfig.getInstanceFromEnv().getQueryStorageVisitScanRangeMax();
         this.maxFuzzyKeys = KylinConfig.getInstanceFromEnv().getQueryScanFuzzyKeyMax();
@@ -150,12 +146,9 @@ public class CubeScanRangePlanner extends ScanRangePlannerBase {
         List<GTScanRange> scanRanges = this.planScanRanges();
         if (scanRanges != null && scanRanges.size() != 0) {
             scanRequest = new GTScanRequestBuilder().setInfo(gtInfo).setRanges(scanRanges).setDimensions(gtDimensions).//
-                    setAggrGroupBy(gtAggrGroups).setAggrMetrics(gtAggrMetrics).setAggrMetricsFuncs(gtAggrFuncs)
-                    .setFilterPushDown(gtFilter).//
-                    setAllowStorageAggregation(context.isNeedStorageAggregation())
-                    .setAggCacheMemThreshold(cubeSegment.getConfig().getQueryCoprocessorMemGB()).//
-                    setStoragePushDownLimit(context.getFinalPushDownLimit()).setHavingFilterPushDown(havingFilter)
-                    .createGTScanRequest();
+                    setAggrGroupBy(gtAggrGroups).setAggrMetrics(gtAggrMetrics).setAggrMetricsFuncs(gtAggrFuncs).setFilterPushDown(gtFilter).//
+                    setAllowStorageAggregation(context.isNeedStorageAggregation()).setAggCacheMemThreshold(cubeSegment.getConfig().getQueryCoprocessorMemGB()).//
+                    setStoragePushDownLimit(context.getFinalPushDownLimit()).setHavingFilterPushDown(havingFilter).createGTScanRequest();
         } else {
             scanRequest = null;
         }
@@ -210,15 +203,11 @@ public class CubeScanRangePlanner extends ScanRangePlannerBase {
                 int beginCompare = rangeStartEndComparator.comparator.compare(range.begin, gtStartAndEnd.getSecond());
                 int endCompare = rangeStartEndComparator.comparator.compare(gtStartAndEnd.getFirst(), range.end);
 
-                if ((isPartitionColUsingDatetimeEncoding && endCompare <= 0 && beginCompare < 0)
-                        || (!isPartitionColUsingDatetimeEncoding && endCompare <= 0 && beginCompare <= 0)) {
+                if ((isPartitionColUsingDatetimeEncoding && endCompare <= 0 && beginCompare < 0) || (!isPartitionColUsingDatetimeEncoding && endCompare <= 0 && beginCompare <= 0)) {
                     //segment range is [Closed,Open), but segmentStartAndEnd.getSecond() might be rounded when using dict encoding, so use <= when has equals in condition. 
                 } else {
-                    logger.debug(
-                            "Pre-check partition col filter failed, partitionColRef {}, segment start {}, segment end {}, range begin {}, range end {}", //
-                            gtPartitionCol, makeReadable(gtStartAndEnd.getFirst()),
-                            makeReadable(gtStartAndEnd.getSecond()), makeReadable(range.begin),
-                            makeReadable(range.end));
+                    logger.debug("Pre-check partition col filter failed, partitionColRef {}, segment start {}, segment end {}, range begin {}, range end {}", //
+                            gtPartitionCol, makeReadable(gtStartAndEnd.getFirst()), makeReadable(gtStartAndEnd.getSecond()), makeReadable(range.begin), makeReadable(range.end));
                     return null;
                 }
             }
@@ -253,8 +242,7 @@ public class CubeScanRangePlanner extends ScanRangePlannerBase {
             return result;
         }
 
-        List<Map<Integer, ByteArray>> fuzzyValueCombinations = FuzzyValueCombination.calculate(fuzzyValueSet,
-                maxFuzzyKeys);
+        List<Map<Integer, ByteArray>> fuzzyValueCombinations = FuzzyValueCombination.calculate(fuzzyValueSet, maxFuzzyKeys);
 
         for (Map<Integer, ByteArray> fuzzyValue : fuzzyValueCombinations) {
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
index 7ba1796..ee12743 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
@@ -51,11 +51,10 @@ public class CubeSegmentScanner implements IGTScanner {
     final GTScanRequest scanRequest;
 
     public CubeSegmentScanner(CubeSegment cubeSeg, Cuboid cuboid, Set<TblColRef> dimensions, Set<TblColRef> groups, //
-            Collection<FunctionDesc> metrics, TupleFilter originalfilter, TupleFilter havingFilter,
-            StorageContext context) {
-
+            Collection<FunctionDesc> metrics, TupleFilter originalfilter, TupleFilter havingFilter, StorageContext context) {
+        
         logger.info("Init CubeSegmentScanner for segment {}", cubeSeg.getName());
-
+        
         this.cuboid = cuboid;
         this.cubeSeg = cubeSeg;
 
@@ -71,20 +70,19 @@ public class CubeSegmentScanner implements IGTScanner {
 
         CubeScanRangePlanner scanRangePlanner;
         try {
-            scanRangePlanner = new CubeScanRangePlanner(cubeSeg, cuboid, filter, dimensions, groups, metrics,
-                    havingFilter, context);
+            scanRangePlanner = new CubeScanRangePlanner(cubeSeg, cuboid, filter, dimensions, groups, metrics, havingFilter, context);
         } catch (RuntimeException e) {
             throw e;
         } catch (Exception e) {
             throw new RuntimeException(e);
         }
-
+        
         scanRequest = scanRangePlanner.planScanRequest();
-
+        
         String gtStorage = ((GTCubeStorageQueryBase) context.getStorageQuery()).getGTStorage();
         scanner = new ScannerWorker(cubeSeg, cuboid, scanRequest, gtStorage, context);
     }
-
+    
     public boolean isSegmentSkipped() {
         return scanner.isSegmentSkipped();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeTupleConverter.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeTupleConverter.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeTupleConverter.java
index ccc6d33..73fc380 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeTupleConverter.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeTupleConverter.java
@@ -71,8 +71,7 @@ public class CubeTupleConverter implements ITupleConverter {
     private final int nSelectedDims;
 
     public CubeTupleConverter(CubeSegment cubeSeg, Cuboid cuboid, //
-            Set<TblColRef> selectedDimensions, Set<FunctionDesc> selectedMetrics, int[] gtColIdx,
-            TupleInfo returnTupleInfo) {
+            Set<TblColRef> selectedDimensions, Set<FunctionDesc> selectedMetrics, int[] gtColIdx, TupleInfo returnTupleInfo) {
         this.cubeSeg = cubeSeg;
         this.cuboid = cuboid;
         this.gtColIdx = gtColIdx;
@@ -110,8 +109,7 @@ public class CubeTupleConverter implements ITupleConverter {
 
             MeasureType<?> measureType = metric.getMeasureType();
             if (measureType.needAdvancedTupleFilling()) {
-                Map<TblColRef, Dictionary<String>> dictionaryMap = buildDictionaryMap(
-                        measureType.getColumnsNeedDictionary(metric));
+                Map<TblColRef, Dictionary<String>> dictionaryMap = buildDictionaryMap(measureType.getColumnsNeedDictionary(metric));
                 advMeasureFillers.add(measureType.getAdvancedTupleFiller(metric, returnTupleInfo, dictionaryMap));
                 advMeasureIndexInGTValues.add(i);
             } else {
@@ -122,8 +120,7 @@ public class CubeTupleConverter implements ITupleConverter {
         }
 
         // prepare derived columns and filler
-        Map<Array<TblColRef>, List<DeriveInfo>> hostToDerivedInfo = cuboid.getCubeDesc()
-                .getHostToDerivedInfo(cuboid.getColumns(), null);
+        Map<Array<TblColRef>, List<DeriveInfo>> hostToDerivedInfo = cuboid.getCubeDesc().getHostToDerivedInfo(cuboid.getColumns(), null);
         for (Entry<Array<TblColRef>, List<DeriveInfo>> entry : hostToDerivedInfo.entrySet()) {
             TblColRef[] hostCols = entry.getKey().data;
             for (DeriveInfo deriveInfo : entry.getValue()) {
@@ -276,27 +273,22 @@ public class CubeTupleConverter implements ITupleConverter {
         String[] pkCols = join.getPrimaryKey();
         String snapshotResPath = cubeSegment.getSnapshotResPath(tableName);
         if (snapshotResPath == null)
-            throw new IllegalStateException("No snaphot for table '" + tableName + "' found on cube segment"
-                    + cubeSegment.getCubeInstance().getName() + "/" + cubeSegment);
+            throw new IllegalStateException("No snaphot for table '" + tableName + "' found on cube segment" + cubeSegment.getCubeInstance().getName() + "/" + cubeSegment);
 
         try {
             SnapshotTable snapshot = snapshotMgr.getSnapshotTable(snapshotResPath);
             TableDesc tableDesc = metaMgr.getTableDesc(tableName);
-            EnhancedStringLookupTable enhancedStringLookupTable = new EnhancedStringLookupTable(tableDesc, pkCols,
-                    snapshot);
-            logger.info("Time to get lookup up table for {} is {} ", join.getPKSide().getTableName(),
-                    (System.currentTimeMillis() - ts));
+            EnhancedStringLookupTable enhancedStringLookupTable = new EnhancedStringLookupTable(tableDesc, pkCols, snapshot);
+            logger.info("Time to get lookup up table for {} is {} ", join.getPKSide().getTableName(), (System.currentTimeMillis() - ts));
             return enhancedStringLookupTable;
         } catch (IOException e) {
-            throw new IllegalStateException(
-                    "Failed to load lookup table " + tableName + " from snapshot " + snapshotResPath, e);
+            throw new IllegalStateException("Failed to load lookup table " + tableName + " from snapshot " + snapshotResPath, e);
         }
     }
 
     private static class EnhancedStringLookupTable extends LookupStringTable {
 
-        public EnhancedStringLookupTable(TableDesc tableDesc, String[] keyColumns, IReadableTable table)
-                throws IOException {
+        public EnhancedStringLookupTable(TableDesc tableDesc, String[] keyColumns, IReadableTable table) throws IOException {
             super(tableDesc, keyColumns, table);
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
index 7efa76f..4ac8967 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
@@ -85,8 +85,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
                 continue;
             }
 
-            scanner = new CubeSegmentScanner(cubeSeg, request.getCuboid(), request.getDimensions(), request.getGroups(),
-                    request.getMetrics(), request.getFilter(), request.getHavingFilter(), request.getContext());
+            scanner = new CubeSegmentScanner(cubeSeg, request.getCuboid(), request.getDimensions(), request.getGroups(), request.getMetrics(), request.getFilter(), request.getHavingFilter(), request.getContext());
             if (!scanner.isSegmentSkipped())
                 scanners.add(scanner);
         }
@@ -94,12 +93,10 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         if (scanners.isEmpty())
             return ITupleIterator.EMPTY_TUPLE_ITERATOR;
 
-        return new SequentialCubeTupleIterator(scanners, request.getCuboid(), request.getDimensions(),
-                request.getMetrics(), returnTupleInfo, request.getContext());
+        return new SequentialCubeTupleIterator(scanners, request.getCuboid(), request.getDimensions(), request.getMetrics(), returnTupleInfo, request.getContext());
     }
 
-    protected GTCubeStorageQueryRequest getStorageQueryRequest(StorageContext context, SQLDigest sqlDigest,
-            TupleInfo returnTupleInfo) {
+    protected GTCubeStorageQueryRequest getStorageQueryRequest(StorageContext context, SQLDigest sqlDigest, TupleInfo returnTupleInfo) {
         context.setStorageQuery(this);
 
         //cope with queries with no aggregations
@@ -145,23 +142,18 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         TupleFilter.collectColumns(filter, filterColumnD);
 
         // set limit push down
-        enableStorageLimitIfPossible(cuboid, groups, derivedPostAggregation, groupsD, filter, loosenedColumnD,
-                sqlDigest.aggregations, context);
+        enableStorageLimitIfPossible(cuboid, groups, derivedPostAggregation, groupsD, filter, loosenedColumnD, sqlDigest.aggregations, context);
         // set whether to aggregate results from multiple partitions
         enableStreamAggregateIfBeneficial(cuboid, groupsD, context);
         // set query deadline
         context.setDeadline(cubeInstance);
 
         // push down having clause filter if possible
-        TupleFilter havingFilter = checkHavingCanPushDown(sqlDigest.havingFilter, groupsD, sqlDigest.aggregations,
-                metrics);
+        TupleFilter havingFilter = checkHavingCanPushDown(sqlDigest.havingFilter, groupsD, sqlDigest.aggregations, metrics);
 
-        logger.info("Cuboid identified: cube={}, cuboidId={}, groupsD={}, filterD={}, limitPushdown={}, storageAggr={}",
-                cubeInstance.getName(), cuboid.getId(), groupsD, filterColumnD, context.getFinalPushDownLimit(),
-                context.isNeedStorageAggregation());
+        logger.info("Cuboid identified: cube={}, cuboidId={}, groupsD={}, filterD={}, limitPushdown={}, storageAggr={}", cubeInstance.getName(), cuboid.getId(), groupsD, filterColumnD, context.getFinalPushDownLimit(), context.isNeedStorageAggregation());
 
-        return new GTCubeStorageQueryRequest(cuboid, dimensionsD, groupsD, filterColumnD, metrics, filterD,
-                havingFilter, context);
+        return new GTCubeStorageQueryRequest(cuboid, dimensionsD, groupsD, filterColumnD, metrics, filterD, havingFilter, context);
     }
 
     protected abstract String getGTStorage();
@@ -170,13 +162,11 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         return Cuboid.identifyCuboid(cubeDesc, dimensionsD, metrics);
     }
 
-    protected ITupleConverter newCubeTupleConverter(CubeSegment cubeSeg, Cuboid cuboid,
-            Set<TblColRef> selectedDimensions, Set<FunctionDesc> selectedMetrics, int[] gtColIdx, TupleInfo tupleInfo) {
+    protected ITupleConverter newCubeTupleConverter(CubeSegment cubeSeg, Cuboid cuboid, Set<TblColRef> selectedDimensions, Set<FunctionDesc> selectedMetrics, int[] gtColIdx, TupleInfo tupleInfo) {
         return new CubeTupleConverter(cubeSeg, cuboid, selectedDimensions, selectedMetrics, gtColIdx, tupleInfo);
     }
 
-    protected void buildDimensionsAndMetrics(SQLDigest sqlDigest, Collection<TblColRef> dimensions,
-            Collection<FunctionDesc> metrics) {
+    protected void buildDimensionsAndMetrics(SQLDigest sqlDigest, Collection<TblColRef> dimensions, Collection<FunctionDesc> metrics) {
         for (FunctionDesc func : sqlDigest.aggregations) {
             if (!func.isDimensionAsMetric()) {
                 // use the FunctionDesc from cube desc as much as possible, that has more info such as HLLC precision
@@ -186,8 +176,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
 
         for (TblColRef column : sqlDigest.allColumns) {
             // skip measure columns
-            if (sqlDigest.metricColumns.contains(column)
-                    && !(sqlDigest.groupbyColumns.contains(column) || sqlDigest.filterColumns.contains(column))) {
+            if (sqlDigest.metricColumns.contains(column) && !(sqlDigest.groupbyColumns.contains(column) || sqlDigest.filterColumns.contains(column))) {
                 continue;
             }
 
@@ -236,8 +225,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
             if (f instanceof CompareTupleFilter) {
                 CompareTupleFilter compFilter = (CompareTupleFilter) f;
                 // is COL=const ?
-                if (compFilter.getOperator() == FilterOperatorEnum.EQ && compFilter.getValues().size() == 1
-                        && compFilter.getColumn() != null) {
+                if (compFilter.getOperator() == FilterOperatorEnum.EQ && compFilter.getValues().size() == 1 && compFilter.getColumn() != null) {
                     result.add(compFilter.getColumn());
                 }
             }
@@ -264,8 +252,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         return resultD;
     }
 
-    public boolean isNeedStorageAggregation(Cuboid cuboid, Collection<TblColRef> groupD,
-            Collection<TblColRef> singleValueD) {
+    public boolean isNeedStorageAggregation(Cuboid cuboid, Collection<TblColRef> groupD, Collection<TblColRef> singleValueD) {
         HashSet<TblColRef> temp = Sets.newHashSet();
         temp.addAll(groupD);
         temp.addAll(singleValueD);
@@ -352,9 +339,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         }
     }
 
-    private void enableStorageLimitIfPossible(Cuboid cuboid, Collection<TblColRef> groups,
-            Set<TblColRef> derivedPostAggregation, Collection<TblColRef> groupsD, TupleFilter filter,
-            Set<TblColRef> loosenedColumnD, Collection<FunctionDesc> functionDescs, StorageContext context) {
+    private void enableStorageLimitIfPossible(Cuboid cuboid, Collection<TblColRef> groups, Set<TblColRef> derivedPostAggregation, Collection<TblColRef> groupsD, TupleFilter filter, Set<TblColRef> loosenedColumnD, Collection<FunctionDesc> functionDescs, StorageContext context) {
         boolean possible = true;
 
         if (!TupleFilter.isEvaluableRecursively(filter)) {
@@ -375,17 +360,15 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         // derived aggregation is bad, unless expanded columns are already in group by
         if (!groups.containsAll(derivedPostAggregation)) {
             possible = false;
-            logger.debug("Storage limit push down is impossible because derived column require post aggregation: "
-                    + derivedPostAggregation);
+            logger.debug("Storage limit push down is impossible because derived column require post aggregation: " + derivedPostAggregation);
         }
 
         //if groupsD is clustered at "head" of the rowkey, then limit push down is possible
         int size = groupsD.size();
         if (!groupsD.containsAll(cuboid.getColumns().subList(0, size))) {
             possible = false;
-            logger.debug(
-                    "Storage limit push down is impossible because groupD is not clustered at head, groupsD: " + groupsD //
-                            + " with cuboid columns: " + cuboid.getColumns());
+            logger.debug("Storage limit push down is impossible because groupD is not clustered at head, groupsD: " + groupsD //
+                    + " with cuboid columns: " + cuboid.getColumns());
         }
 
         //if exists measures like max(cal_dt), then it's not a perfect cuboid match, cannot apply limit
@@ -413,8 +396,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         }
         if (!shardByInGroups.isEmpty()) {
             enabled = false;
-            logger.debug("Aggregate partition results is not beneficial because shard by columns in groupD: "
-                    + shardByInGroups);
+            logger.debug("Aggregate partition results is not beneficial because shard by columns in groupD: " + shardByInGroups);
         }
 
         if (!context.isNeedStorageAggregation()) {
@@ -446,8 +428,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         }
     }
 
-    private TupleFilter checkHavingCanPushDown(TupleFilter havingFilter, Set<TblColRef> groupsD,
-            List<FunctionDesc> aggregations, Set<FunctionDesc> metrics) {
+    private TupleFilter checkHavingCanPushDown(TupleFilter havingFilter, Set<TblColRef> groupsD, List<FunctionDesc> aggregations, Set<FunctionDesc> metrics) {
         // must have only one segment
         Segments<CubeSegment> readySegs = cubeInstance.getSegments(SegmentStatusEnum.READY);
         if (readySegs.size() != 1)

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryRequest.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryRequest.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryRequest.java
index 38b457d..7793515 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryRequest.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryRequest.java
@@ -39,8 +39,7 @@ public class GTCubeStorageQueryRequest implements Serializable {
     private StorageContext context;
 
     public GTCubeStorageQueryRequest(Cuboid cuboid, Set<TblColRef> dimensions, Set<TblColRef> groups, //
-            Set<TblColRef> filterCols, Set<FunctionDesc> metrics, TupleFilter filter, TupleFilter havingFilter,
-            StorageContext context) {
+            Set<TblColRef> filterCols, Set<FunctionDesc> metrics, TupleFilter filter, TupleFilter havingFilter, StorageContext context) {
         this.cuboid = cuboid;
         this.dimensions = dimensions;
         this.groups = groups;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/PartitionResultIterator.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/PartitionResultIterator.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/PartitionResultIterator.java
index 499d60c..474e1e0 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/PartitionResultIterator.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/PartitionResultIterator.java
@@ -18,14 +18,13 @@
 
 package org.apache.kylin.storage.gtrecord;
 
-import java.nio.ByteBuffer;
-import java.util.NoSuchElementException;
-
+import com.google.common.collect.UnmodifiableIterator;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.gridtable.GTInfo;
 import org.apache.kylin.gridtable.GTRecord;
 
-import com.google.common.collect.UnmodifiableIterator;
+import java.nio.ByteBuffer;
+import java.util.NoSuchElementException;
 
 /**
  * Support iterate over {@code GTRecord}s in storage partition result.

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/ScannerWorker.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/ScannerWorker.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/ScannerWorker.java
index 2d86bf5..8f64bd1 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/ScannerWorker.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/ScannerWorker.java
@@ -39,8 +39,7 @@ public class ScannerWorker {
     private static final Logger logger = LoggerFactory.getLogger(ScannerWorker.class);
     private IGTScanner internal = null;
 
-    public ScannerWorker(ISegment segment, Cuboid cuboid, GTScanRequest scanRequest, String gtStorage,
-            StorageContext context) {
+    public ScannerWorker(ISegment segment, Cuboid cuboid, GTScanRequest scanRequest, String gtStorage, StorageContext context) {
         if (scanRequest == null) {
             logger.info("Segment {} will be skipped", segment);
             internal = new EmptyGTScanner();
@@ -50,16 +49,13 @@ public class ScannerWorker {
         final GTInfo info = scanRequest.getInfo();
 
         try {
-            IGTStorage rpc = (IGTStorage) Class.forName(gtStorage)
-                    .getConstructor(ISegment.class, Cuboid.class, GTInfo.class, StorageContext.class)
-                    .newInstance(segment, cuboid, info, context); // default behavior
+            IGTStorage rpc = (IGTStorage) Class.forName(gtStorage).getConstructor(ISegment.class, Cuboid.class, GTInfo.class, StorageContext.class).newInstance(segment, cuboid, info, context); // default behavior
             internal = rpc.getGTScanner(scanRequest);
-        } catch (IOException | InstantiationException | InvocationTargetException | IllegalAccessException
-                | ClassNotFoundException | NoSuchMethodException e) {
+        } catch (IOException | InstantiationException | InvocationTargetException | IllegalAccessException | ClassNotFoundException | NoSuchMethodException e) {
             throw new RuntimeException(e);
         }
     }
-
+    
     public boolean isSegmentSkipped() {
         return internal instanceof EmptyGTScanner;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java
index 7bbbc63..3bac5ec 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java
@@ -24,6 +24,7 @@ import java.util.List;
 import java.util.NoSuchElementException;
 import java.util.Set;
 
+import com.google.common.collect.UnmodifiableIterator;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.cube.gridtable.CuboidToGridTableMapping;
 import org.apache.kylin.gridtable.GTInfo;
@@ -42,8 +43,6 @@ import org.apache.kylin.storage.StorageContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.UnmodifiableIterator;
-
 public class SegmentCubeTupleIterator implements ITupleIterator {
 
     private static final Logger logger = LoggerFactory.getLogger(SegmentCubeTupleIterator.class);
@@ -87,7 +86,8 @@ public class SegmentCubeTupleIterator implements ITupleIterator {
                 scanner.cubeSeg, cuboid, selectedDimensions, selectedMetrics, gtColIdx, tupleInfo);
     }
 
-    private Iterator<Object[]> getGTValuesIterator(final Iterator<GTRecord> records, final GTScanRequest scanRequest,
+    private Iterator<Object[]> getGTValuesIterator(
+            final Iterator<GTRecord> records, final GTScanRequest scanRequest,
             final int[] gtDimsIdx, final int[] gtMetricsIdx) {
 
         boolean hasMultiplePartitions = records instanceof SortMergedPartitionResultIterator;
@@ -98,8 +98,7 @@ public class SegmentCubeTupleIterator implements ITupleIterator {
                     return scanRequest.getInfo();
                 }
 
-                public void close() throws IOException {
-                }
+                public void close() throws IOException {}
 
                 public Iterator<GTRecord> iterator() {
                     return records;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
index bea2761..9d5d816 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
@@ -51,25 +51,21 @@ public class SequentialCubeTupleIterator implements ITupleIterator {
     private int scanCount;
     private int scanCountDelta;
 
-    public SequentialCubeTupleIterator(List<CubeSegmentScanner> scanners, Cuboid cuboid,
-            Set<TblColRef> selectedDimensions, //
+    public SequentialCubeTupleIterator(List<CubeSegmentScanner> scanners, Cuboid cuboid, Set<TblColRef> selectedDimensions, //
             Set<FunctionDesc> selectedMetrics, TupleInfo returnTupleInfo, StorageContext context) {
         this.context = context;
         this.scanners = scanners;
 
         segmentCubeTupleIterators = Lists.newArrayList();
         for (CubeSegmentScanner scanner : scanners) {
-            segmentCubeTupleIterators.add(new SegmentCubeTupleIterator(scanner, cuboid, selectedDimensions,
-                    selectedMetrics, returnTupleInfo, context));
+            segmentCubeTupleIterators.add(new SegmentCubeTupleIterator(scanner, cuboid, selectedDimensions, selectedMetrics, returnTupleInfo, context));
         }
 
         if (context.mergeSortPartitionResults()) {
             //query with limit
             logger.info("Using SortedIteratorMergerWithLimit to merge segment results");
-            Iterator<Iterator<ITuple>> transformed = (Iterator<Iterator<ITuple>>) (Iterator<?>) segmentCubeTupleIterators
-                    .iterator();
-            tupleIterator = new SortedIteratorMergerWithLimit<ITuple>(transformed, context.getFinalPushDownLimit(),
-                    getTupleDimensionComparator(cuboid, returnTupleInfo)).getIterator();
+            Iterator<Iterator<ITuple>> transformed = (Iterator<Iterator<ITuple>>) (Iterator<?>) segmentCubeTupleIterators.iterator();
+            tupleIterator = new SortedIteratorMergerWithLimit<ITuple>(transformed, context.getFinalPushDownLimit(), getTupleDimensionComparator(cuboid, returnTupleInfo)).getIterator();
         } else {
             //normal case
             logger.info("Using Iterators.concat to merge segment results");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SortMergedPartitionResultIterator.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SortMergedPartitionResultIterator.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SortMergedPartitionResultIterator.java
index 14e0d86..21e61e3 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SortMergedPartitionResultIterator.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SortMergedPartitionResultIterator.java
@@ -18,28 +18,28 @@
 
 package org.apache.kylin.storage.gtrecord;
 
+import com.google.common.collect.Iterators;
+import com.google.common.collect.PeekingIterator;
+import com.google.common.collect.UnmodifiableIterator;
+import org.apache.kylin.gridtable.GTInfo;
+import org.apache.kylin.gridtable.GTRecord;
+
 import java.util.Comparator;
 import java.util.List;
 import java.util.NoSuchElementException;
 import java.util.PriorityQueue;
 
-import org.apache.kylin.gridtable.GTInfo;
-import org.apache.kylin.gridtable.GTRecord;
-
-import com.google.common.collect.Iterators;
-import com.google.common.collect.PeekingIterator;
-import com.google.common.collect.UnmodifiableIterator;
-
 /**
  * Merge-sort {@code GTRecord}s in all partitions, assume each partition contains sorted elements.
  */
 public class SortMergedPartitionResultIterator extends UnmodifiableIterator<GTRecord> {
 
-    final GTRecord record; // reuse to avoid object creation
+    final GTRecord record ; // reuse to avoid object creation
     PriorityQueue<PeekingIterator<GTRecord>> heap;
 
-    SortMergedPartitionResultIterator(List<PartitionResultIterator> partitionResults, GTInfo info,
-            final Comparator<GTRecord> comparator) {
+    SortMergedPartitionResultIterator(
+            List<PartitionResultIterator> partitionResults,
+            GTInfo info, final Comparator<GTRecord> comparator) {
 
         this.record = new GTRecord(info);
         Comparator<PeekingIterator<GTRecord>> heapComparator = new Comparator<PeekingIterator<GTRecord>>() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SortedIteratorMerger.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SortedIteratorMerger.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SortedIteratorMerger.java
index a75a580..d5aa9d0 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SortedIteratorMerger.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SortedIteratorMerger.java
@@ -38,13 +38,12 @@ public class SortedIteratorMerger<E> {
     }
 
     public Iterator<E> getIterator() {
-        final PriorityQueue<PeekingImpl<E>> heap = new PriorityQueue<PeekingImpl<E>>(11,
-                new Comparator<PeekingImpl<E>>() {
-                    @Override
-                    public int compare(PeekingImpl<E> o1, PeekingImpl<E> o2) {
-                        return comparator.compare(o1.peek(), o2.peek());
-                    }
-                });
+        final PriorityQueue<PeekingImpl<E>> heap = new PriorityQueue<PeekingImpl<E>>(11, new Comparator<PeekingImpl<E>>() {
+            @Override
+            public int compare(PeekingImpl<E> o1, PeekingImpl<E> o2) {
+                return comparator.compare(o1.peek(), o2.peek());
+            }
+        });
 
         while (shardSubsets.hasNext()) {
             Iterator<E> iterator = shardSubsets.next();
@@ -83,8 +82,7 @@ public class SortedIteratorMerger<E> {
             if (poll.hasNext()) {
 
                 //TODO: remove this check when validated
-                Preconditions.checkState(comparator.compare(current, poll.peek()) < 0,
-                        "Not sorted! current: " + current + " Next: " + poll.peek());
+                Preconditions.checkState(comparator.compare(current, poll.peek()) < 0, "Not sorted! current: " + current + " Next: " + poll.peek());
 
                 heap.offer(poll);
             }
@@ -96,6 +94,7 @@ public class SortedIteratorMerger<E> {
             throw new UnsupportedOperationException();
         }
 
+     
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java
index 3e96169..ef12ff0 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java
@@ -18,10 +18,9 @@
 
 package org.apache.kylin.storage.gtrecord;
 
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-
+import com.google.common.base.Function;
+import com.google.common.collect.Iterators;
+import com.google.common.collect.Lists;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.gridtable.GTInfo;
 import org.apache.kylin.gridtable.GTRecord;
@@ -31,9 +30,9 @@ import org.apache.kylin.storage.StorageContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Iterators;
-import com.google.common.collect.Lists;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
 
 /**
  * scatter the blob returned from region server to a iterable of gtrecords
@@ -49,8 +48,7 @@ public class StorageResponseGTScatter implements IGTScanner {
     private final ImmutableBitSet groupByDims;
     private final boolean needSorted; // whether scanner should return sorted records
 
-    public StorageResponseGTScatter(GTScanRequest scanRequest, IPartitionStreamer partitionStreamer,
-            StorageContext context) {
+    public StorageResponseGTScatter(GTScanRequest scanRequest, IPartitionStreamer partitionStreamer, StorageContext context) {
         this.info = scanRequest.getInfo();
         this.partitionStreamer = partitionStreamer;
         this.blocks = partitionStreamer.asByteArrayIterator();
@@ -72,12 +70,11 @@ public class StorageResponseGTScatter implements IGTScanner {
 
     @Override
     public Iterator<GTRecord> iterator() {
-        Iterator<PartitionResultIterator> iterators = Iterators.transform(blocks,
-                new Function<byte[], PartitionResultIterator>() {
-                    public PartitionResultIterator apply(byte[] input) {
-                        return new PartitionResultIterator(input, info, columns);
-                    }
-                });
+        Iterator<PartitionResultIterator> iterators = Iterators.transform(blocks, new Function<byte[], PartitionResultIterator>() {
+            public PartitionResultIterator apply(byte[] input) {
+                return new PartitionResultIterator(input, info, columns);
+            }
+        });
 
         if (!needSorted) {
             logger.debug("Using Iterators.concat to pipeline partition results");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
index ba41173..1b113ee 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
@@ -106,16 +106,13 @@ public class HybridInstance extends RootPersistentEntity implements IRealization
             RealizationRegistry registry = RealizationRegistry.getInstance(config);
             List<IRealization> realizationList = Lists.newArrayList();
             for (int i = 0; i < realizationEntries.size(); i++) {
-                IRealization realization = registry.getRealization(realizationEntries.get(i).getType(),
-                        realizationEntries.get(i).getRealization());
+                IRealization realization = registry.getRealization(realizationEntries.get(i).getType(), realizationEntries.get(i).getRealization());
                 if (realization == null) {
-                    logger.error("Realization '" + realizationEntries.get(i) + " is not found, remove from Hybrid '"
-                            + this.getName() + "'");
+                    logger.error("Realization '" + realizationEntries.get(i) + " is not found, remove from Hybrid '" + this.getName() + "'");
                     continue;
                 }
                 if (realization.isReady() == false) {
-                    logger.error("Realization '" + realization.getName() + " is disabled, remove from Hybrid '"
-                            + this.getName() + "'");
+                    logger.error("Realization '" + realization.getName() + " is disabled, remove from Hybrid '" + this.getName() + "'");
                     continue;
                 }
                 realizationList.add(realization);
@@ -223,7 +220,7 @@ public class HybridInstance extends RootPersistentEntity implements IRealization
         init();
         return allColumnDescs;
     }
-
+    
     @Override
     public List<MeasureDesc> getMeasures() {
         init();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridManager.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridManager.java b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridManager.java
index 7bab3de..cf40416 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridManager.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridManager.java
@@ -23,6 +23,7 @@ import java.util.List;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 
+import com.google.common.collect.Lists;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
@@ -40,13 +41,10 @@ import org.apache.kylin.metadata.realization.RealizationType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
-
 /**
  */
 public class HybridManager implements IRealizationProvider {
-    public static final Serializer<HybridInstance> HYBRID_SERIALIZER = new JsonSerializer<HybridInstance>(
-            HybridInstance.class);
+    public static final Serializer<HybridInstance> HYBRID_SERIALIZER = new JsonSerializer<HybridInstance>(HybridInstance.class);
 
     private static final Logger logger = LoggerFactory.getLogger(HybridManager.class);
 
@@ -114,8 +112,7 @@ public class HybridManager implements IRealizationProvider {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
             if ("hybrid".equals(entity)) {
                 String hybridName = cacheKey;
 
@@ -124,8 +121,7 @@ public class HybridManager implements IRealizationProvider {
                 else
                     reloadHybridInstance(hybridName);
 
-                for (ProjectInstance prj : ProjectManager.getInstance(config).findProjects(RealizationType.HYBRID,
-                        hybridName)) {
+                for (ProjectInstance prj : ProjectManager.getInstance(config).findProjects(RealizationType.HYBRID, hybridName)) {
                     broadcaster.notifyProjectSchemaUpdate(prj.getName());
                 }
             } else if ("cube".equals(entity)) {
@@ -155,8 +151,7 @@ public class HybridManager implements IRealizationProvider {
         List<HybridInstance> result = Lists.newArrayList();
         for (HybridInstance hybridInstance : hybridMap.values()) {
             for (RealizationEntry realizationEntry : hybridInstance.getRealizationEntries()) {
-                if (realizationEntry.getType() == type
-                        && realizationEntry.getRealization().equalsIgnoreCase(realizationName)) {
+                if (realizationEntry.getType() == type && realizationEntry.getRealization().equalsIgnoreCase(realizationName)) {
                     result.add(hybridInstance);
                 }
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridStorageQuery.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridStorageQuery.java b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridStorageQuery.java
index c16fc8a..0107434 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridStorageQuery.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridStorageQuery.java
@@ -46,8 +46,7 @@ public class HybridStorageQuery implements IStorageQuery {
     }
 
     @Override
-    public ITupleIterator search(final StorageContext context, final SQLDigest sqlDigest,
-            final TupleInfo returnTupleInfo) {
+    public ITupleIterator search(final StorageContext context, final SQLDigest sqlDigest, final TupleInfo returnTupleInfo) {
         List<ITupleIterator> tupleIterators = Lists.newArrayList();
         for (int i = 0; i < realizations.length; i++) {
             if (realizations[i].isReady() && realizations[i].isCapable(sqlDigest).capable) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/translate/ColumnValueRange.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/translate/ColumnValueRange.java b/core-storage/src/main/java/org/apache/kylin/storage/translate/ColumnValueRange.java
index 2410ed3..56b1106 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/translate/ColumnValueRange.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/translate/ColumnValueRange.java
@@ -163,8 +163,7 @@ public class ColumnValueRange {
     }
 
     private boolean between(String v, String beginValue, String endValue) {
-        return (beginValue == null || order.compare(beginValue, v) <= 0)
-                && (endValue == null || order.compare(v, endValue) <= 0);
+        return (beginValue == null || order.compare(beginValue, v) <= 0) && (endValue == null || order.compare(v, endValue) <= 0);
     }
 
     // remove invalid EQ/IN values and round start/end according to dictionary

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/translate/DerivedFilterTranslator.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/translate/DerivedFilterTranslator.java b/core-storage/src/main/java/org/apache/kylin/storage/translate/DerivedFilterTranslator.java
index f168d30..f4150fe 100755
--- a/core-storage/src/main/java/org/apache/kylin/storage/translate/DerivedFilterTranslator.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/translate/DerivedFilterTranslator.java
@@ -50,8 +50,7 @@ public class DerivedFilterTranslator {
 
     private static final Logger logger = LoggerFactory.getLogger(DerivedFilterTranslator.class);
 
-    public static Pair<TupleFilter, Boolean> translate(LookupStringTable lookup, DeriveInfo hostInfo,
-            CompareTupleFilter compf) {
+    public static Pair<TupleFilter, Boolean> translate(LookupStringTable lookup, DeriveInfo hostInfo, CompareTupleFilter compf) {
 
         TblColRef derivedCol = compf.getColumn();
         TblColRef[] hostCols = hostInfo.columns;
@@ -79,8 +78,7 @@ public class DerivedFilterTranslator {
         SingleColumnTuple tuple = new SingleColumnTuple(derivedCol);
         for (String[] row : lookup.getAllRows()) {
             tuple.value = row[di];
-            if (compf.evaluate(tuple,
-                    FilterCodeSystemFactory.getFilterCodeSystem(derivedCol.getColumnDesc().getType()))) {
+            if (compf.evaluate(tuple, FilterCodeSystemFactory.getFilterCodeSystem(derivedCol.getColumnDesc().getType()))) {
                 collect(row, pi, satisfyingHostRecords);
             }
         }
@@ -89,7 +87,8 @@ public class DerivedFilterTranslator {
         boolean loosened;
         if (satisfyingHostRecords.size() > KylinConfig.getInstanceFromEnv().getDerivedInThreshold()) {
             logger.info("Deciding to loosen filter on derived filter as host candidates number {} exceeds threshold {}", //
-                    satisfyingHostRecords.size(), KylinConfig.getInstanceFromEnv().getDerivedInThreshold());
+                    satisfyingHostRecords.size(), KylinConfig.getInstanceFromEnv().getDerivedInThreshold()
+            );
             translated = buildRangeFilter(hostCols, satisfyingHostRecords);
             loosened = true;
         } else {
@@ -163,8 +162,7 @@ public class DerivedFilterTranslator {
         return and;
     }
 
-    private static void findMinMax(Set<Array<String>> satisfyingHostRecords, TblColRef[] hostCols, String[] min,
-            String[] max) {
+    private static void findMinMax(Set<Array<String>> satisfyingHostRecords, TblColRef[] hostCols, String[] min, String[] max) {
 
         RowKeyColumnOrder[] orders = new RowKeyColumnOrder[hostCols.length];
         for (int i = 0; i < hostCols.length; i++) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/main/java/org/apache/kylin/storage/translate/HBaseKeyRange.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/translate/HBaseKeyRange.java b/core-storage/src/main/java/org/apache/kylin/storage/translate/HBaseKeyRange.java
index 37b500e..bfddb1f 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/translate/HBaseKeyRange.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/translate/HBaseKeyRange.java
@@ -69,9 +69,7 @@ public class HBaseKeyRange implements Comparable<HBaseKeyRange> {
     private long partitionColumnStartDate = Long.MIN_VALUE;
     private long partitionColumnEndDate = Long.MAX_VALUE;
 
-    public HBaseKeyRange(CubeSegment cubeSeg, Cuboid cuboid, byte[] startKey, byte[] stopKey,
-            List<Pair<byte[], byte[]>> fuzzyKeys, List<Collection<ColumnValueRange>> flatColumnValueFilter,
-            long partitionColumnStartDate, long partitionColumnEndDate) {
+    public HBaseKeyRange(CubeSegment cubeSeg, Cuboid cuboid, byte[] startKey, byte[] stopKey, List<Pair<byte[], byte[]>> fuzzyKeys, List<Collection<ColumnValueRange>> flatColumnValueFilter, long partitionColumnStartDate, long partitionColumnEndDate) {
         this.cubeSeg = cubeSeg;
         this.cuboid = cuboid;
         this.startKey = startKey;
@@ -83,8 +81,7 @@ public class HBaseKeyRange implements Comparable<HBaseKeyRange> {
         initDebugString();
     }
 
-    public HBaseKeyRange(Collection<TblColRef> dimensionColumns, Collection<ColumnValueRange> andDimensionRanges,
-            CubeSegment cubeSeg, CubeDesc cubeDesc) {
+    public HBaseKeyRange(Collection<TblColRef> dimensionColumns, Collection<ColumnValueRange> andDimensionRanges, CubeSegment cubeSeg, CubeDesc cubeDesc) {
         this.cubeSeg = cubeSeg;
         long cuboidId = this.calculateCuboidID(cubeDesc, dimensionColumns);
         this.cuboid = Cuboid.findById(cubeDesc, cuboidId);
@@ -114,8 +111,7 @@ public class HBaseKeyRange implements Comparable<HBaseKeyRange> {
             stopValues.put(column, dimRange.getEndValue());
             fuzzyValues.put(column, dimRange.getEqualValues());
 
-            TblColRef partitionDateColumnRef = cubeSeg.getCubeDesc().getModel().getPartitionDesc()
-                    .getPartitionDateColumnRef();
+            TblColRef partitionDateColumnRef = cubeSeg.getCubeDesc().getModel().getPartitionDesc().getPartitionDateColumnRef();
             if (column.equals(partitionDateColumnRef)) {
                 initPartitionRange(dimRange);
             }
@@ -272,7 +268,6 @@ public class HBaseKeyRange implements Comparable<HBaseKeyRange> {
     }
 
     public boolean hitSegment() {
-        return cubeSeg.getDateRangeStart() <= getPartitionColumnEndDate()
-                && cubeSeg.getDateRangeEnd() >= getPartitionColumnStartDate();
+        return cubeSeg.getDateRangeStart() <= getPartitionColumnEndDate() && cubeSeg.getDateRangeEnd() >= getPartitionColumnStartDate();
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/test/java/org/apache/kylin/storage/StorageMockUtils.java
----------------------------------------------------------------------
diff --git a/core-storage/src/test/java/org/apache/kylin/storage/StorageMockUtils.java b/core-storage/src/test/java/org/apache/kylin/storage/StorageMockUtils.java
index 68a820d..8099527 100644
--- a/core-storage/src/test/java/org/apache/kylin/storage/StorageMockUtils.java
+++ b/core-storage/src/test/java/org/apache/kylin/storage/StorageMockUtils.java
@@ -36,13 +36,13 @@ import org.apache.kylin.metadata.tuple.TupleInfo;
 /**
  */
 public class StorageMockUtils {
-
+    
     final DataModelDesc model;
-
+    
     public StorageMockUtils(DataModelDesc model) {
         this.model = model;
     }
-
+    
     public TupleInfo newTupleInfo(List<TblColRef> groups, List<FunctionDesc> aggregations) {
         TupleInfo info = new TupleInfo();
         int idx = 0;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-storage/src/test/java/org/apache/kylin/storage/cache/EhcacheTest.java
----------------------------------------------------------------------
diff --git a/core-storage/src/test/java/org/apache/kylin/storage/cache/EhcacheTest.java b/core-storage/src/test/java/org/apache/kylin/storage/cache/EhcacheTest.java
index c9b36e6..e089387 100644
--- a/core-storage/src/test/java/org/apache/kylin/storage/cache/EhcacheTest.java
+++ b/core-storage/src/test/java/org/apache/kylin/storage/cache/EhcacheTest.java
@@ -36,8 +36,7 @@ public class EhcacheTest {
 
     @Test
     public void basicTest() throws InterruptedException {
-        System.out.println("runtime used memory: "
-                + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024 + "M");
+        System.out.println("runtime used memory: " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024 + "M");
 
         Configuration conf = new Configuration();
         conf.setMaxBytesLocalHeap("100M");
@@ -55,8 +54,7 @@ public class EhcacheTest {
 
         cacheManager.addCache(testCache);
 
-        System.out.println("runtime used memory: "
-                + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024 + "M");
+        System.out.println("runtime used memory: " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024 + "M");
         byte[] blob = new byte[(1024 * 80 * 1024)];//400M
         Random random = new Random();
         for (int i = 0; i < blob.length; i++) {
@@ -73,8 +71,7 @@ public class EhcacheTest {
         System.out.println(testCache.get("1") == null);
         System.out.println(testCache.getSize());
         System.out.println(testCache.getStatistics().getLocalHeapSizeInBytes());
-        System.out.println("runtime used memory: "
-                + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024 + "M");
+        System.out.println("runtime used memory: " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024 + "M");
 
         blob = new byte[(1024 * 80 * 1024)];//400M
         for (int i = 0; i < blob.length; i++) {
@@ -86,8 +83,7 @@ public class EhcacheTest {
         System.out.println(testCache.get("2") == null);
         System.out.println(testCache.getSize());
         System.out.println(testCache.getStatistics().getLocalHeapSizeInBytes());
-        System.out.println("runtime used memory: "
-                + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024 + "M");
+        System.out.println("runtime used memory: " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024 + "M");
 
         blob = new byte[(1024 * 80 * 1024)];//400M
         for (int i = 0; i < blob.length; i++) {
@@ -99,8 +95,7 @@ public class EhcacheTest {
         System.out.println(testCache.get("3") == null);
         System.out.println(testCache.getSize());
         System.out.println(testCache.getStatistics().getLocalHeapSizeInBytes());
-        System.out.println("runtime used memory: "
-                + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024 + "M");
+        System.out.println("runtime used memory: " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024 + "M");
 
         cacheManager.shutdown();
     }


[09/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
index fef9fa3..abc8621 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
@@ -114,8 +114,7 @@ public class JobControllerV2 extends BasicController {
             @RequestParam(value = "projectName", required = false) String projectName, //
             @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, //
             @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize, //
-            @RequestParam(value = "sortby", required = false, defaultValue = "last_modify") String sortby,
-            @RequestParam(value = "reverse", required = false, defaultValue = "true") Boolean reverse) {
+            @RequestParam(value = "sortby", required = false, defaultValue = "last_modify") String sortby, @RequestParam(value = "reverse", required = false, defaultValue = "true") Boolean reverse) {
         MsgPicker.setMsg(lang);
 
         HashMap<String, Object> data = new HashMap<String, Object>();
@@ -127,8 +126,7 @@ public class JobControllerV2 extends BasicController {
             }
         }
 
-        List<JobInstance> jobInstanceList = jobService.searchJobs(cubeName, projectName, statusList,
-                JobTimeFilterEnum.getByCode(timeFilter));
+        List<JobInstance> jobInstanceList = jobService.searchJobs(cubeName, projectName, statusList, JobTimeFilterEnum.getByCode(timeFilter));
 
         if (sortby.equals("last_modify")) {
             if (reverse) {
@@ -173,8 +171,7 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId) {
         MsgPicker.setMsg(lang);
@@ -190,11 +187,9 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/steps/{stepId}/output", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/steps/{stepId}/output", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getStepOutputV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId,
-            @PathVariable String stepId) {
+    public EnvelopeResponse getStepOutputV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId, @PathVariable String stepId) {
         MsgPicker.setMsg(lang);
 
         Map<String, String> result = new HashMap<String, String>();
@@ -211,8 +206,7 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/resume", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/resume", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse resumeV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId) {
         MsgPicker.setMsg(lang);
@@ -229,11 +223,9 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/cancel", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/cancel", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse cancelV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId)
-            throws IOException {
+    public EnvelopeResponse cancelV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId) throws IOException {
         MsgPicker.setMsg(lang);
 
         final JobInstance jobInstance = jobService.getJobInstance(jobId);
@@ -247,8 +239,7 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/pause", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/pause", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse pauseV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId) {
         MsgPicker.setMsg(lang);
@@ -264,11 +255,9 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/steps/{stepId}/rollback", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/steps/{stepId}/rollback", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse rollbackV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId,
-            @PathVariable String stepId) {
+    public EnvelopeResponse rollbackV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId, @PathVariable String stepId) {
         MsgPicker.setMsg(lang);
 
         final JobInstance jobInstance = jobService.getJobInstance(jobId);
@@ -283,11 +272,9 @@ public class JobControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{jobId}/drop", method = { RequestMethod.DELETE }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{jobId}/drop", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse dropJobV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId)
-            throws IOException {
+    public EnvelopeResponse dropJobV2(@RequestHeader("Accept-Language") String lang, @PathVariable String jobId) throws IOException {
         MsgPicker.setMsg(lang);
 
         JobInstance jobInstance = jobService.getJobInstance(jobId);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
index 0199b77..aa907a6 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
@@ -76,8 +76,7 @@ import com.google.common.collect.Sets;
 public class ModelControllerV2 extends BasicController {
     private static final Logger logger = LoggerFactory.getLogger(ModelControllerV2.class);
 
-    public static final char[] VALID_MODELNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_"
-            .toCharArray();
+    public static final char[] VALID_MODELNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_".toCharArray();
 
     @Autowired
     @Qualifier("modelMgmtService")
@@ -93,12 +92,7 @@ public class ModelControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getModelsPaging(@RequestHeader("Accept-Language") String lang,
-            @RequestParam(value = "modelName", required = false) String modelName,
-            @RequestParam(value = "projectName", required = false) String projectName,
-            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
-            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize)
-            throws IOException {
+    public EnvelopeResponse getModelsPaging(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "modelName", required = false) String modelName, @RequestParam(value = "projectName", required = false) String projectName, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) throws IOException {
         MsgPicker.setMsg(lang);
 
         HashMap<String, Object> data = new HashMap<String, Object>();
@@ -137,15 +131,13 @@ public class ModelControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateModelDescV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody ModelRequest modelRequest) throws IOException {
+    public EnvelopeResponse updateModelDescV2(@RequestHeader("Accept-Language") String lang, @RequestBody ModelRequest modelRequest) throws IOException {
         MsgPicker.setMsg(lang);
 
         DataModelDesc modelDesc = deserializeDataModelDescV2(modelRequest);
         modelService.validateModelDesc(modelDesc);
 
-        String projectName = (null == modelRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
-                : modelRequest.getProject();
+        String projectName = (null == modelRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : modelRequest.getProject();
 
         ResourceStore store = ResourceStore.getStore(KylinConfig.getInstanceFromEnv());
         Checkpoint cp = store.checkpoint();
@@ -168,18 +160,15 @@ public class ModelControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }
 
-    @RequestMapping(value = "/draft", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/draft", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateModelDescDraftV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody ModelRequest modelRequest) throws IOException {
+    public EnvelopeResponse updateModelDescDraftV2(@RequestHeader("Accept-Language") String lang, @RequestBody ModelRequest modelRequest) throws IOException {
         MsgPicker.setMsg(lang);
 
         DataModelDesc modelDesc = deserializeDataModelDescV2(modelRequest);
         modelService.validateModelDesc(modelDesc);
 
-        String projectName = (null == modelRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME
-                : modelRequest.getProject();
+        String projectName = (null == modelRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : modelRequest.getProject();
 
         ResourceStore store = ResourceStore.getStore(KylinConfig.getInstanceFromEnv());
         Checkpoint cp = store.checkpoint();
@@ -202,11 +191,9 @@ public class ModelControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }
 
-    @RequestMapping(value = "/{modelName}", method = { RequestMethod.DELETE }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{modelName}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void deleteModelV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName)
-            throws IOException {
+    public void deleteModelV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -217,11 +204,9 @@ public class ModelControllerV2 extends BasicController {
         modelService.dropModel(desc);
     }
 
-    @RequestMapping(value = "/{modelName}/clone", method = { RequestMethod.PUT }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{modelName}/clone", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse cloneModelV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName,
-            @RequestBody ModelRequest modelRequest) throws IOException {
+    public EnvelopeResponse cloneModelV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName, @RequestBody ModelRequest modelRequest) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -281,22 +266,18 @@ public class ModelControllerV2 extends BasicController {
         return desc;
     }
 
-    @RequestMapping(value = "/checkNameAvailability/{modelName}", method = RequestMethod.GET, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/checkNameAvailability/{modelName}", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse checkNameAvailabilityV2(@RequestHeader("Accept-Language") String lang,
-            @PathVariable String modelName) throws IOException {
+    public EnvelopeResponse checkNameAvailabilityV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName) throws IOException {
         MsgPicker.setMsg(lang);
 
         boolean ret = modelService.checkNameAvailability(modelName);
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, ret, "");
     }
 
-    @RequestMapping(value = "/{modelName}/usedCols", method = RequestMethod.GET, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{modelName}/usedCols", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getUsedColsV2(@RequestHeader("Accept-Language") String lang,
-            @PathVariable String modelName) {
+    public EnvelopeResponse getUsedColsV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName) {
         MsgPicker.setMsg(lang);
 
         Map<String, Set<String>> data = new HashMap<>();
@@ -312,8 +293,7 @@ public class ModelControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }
 
-    private void populateUsedColResponse(TblColRef tblColRef, Set<CubeInstance> cubeInstances,
-            Map<String, Set<String>> ret) {
+    private void populateUsedColResponse(TblColRef tblColRef, Set<CubeInstance> cubeInstances, Map<String, Set<String>> ret) {
         String columnIdentity = tblColRef.getIdentity();
         if (!ret.containsKey(columnIdentity)) {
             ret.put(columnIdentity, Sets.<String> newHashSet());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelDescControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelDescControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelDescControllerV2.java
index db3e901..47aa902 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelDescControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelDescControllerV2.java
@@ -61,8 +61,7 @@ public class ModelDescControllerV2 extends BasicController {
      * @return
      * @throws IOException
      */
-    @RequestMapping(value = "/{modelName}", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{modelName}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getModelV2(@RequestHeader("Accept-Language") String lang, @PathVariable String modelName) {
         MsgPicker.setMsg(lang);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/ProjectControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/ProjectControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/ProjectControllerV2.java
index c316fa5..4c43b61 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/ProjectControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/ProjectControllerV2.java
@@ -54,8 +54,7 @@ import org.springframework.web.bind.annotation.ResponseBody;
 public class ProjectControllerV2 extends BasicController {
     private static final Logger logger = LoggerFactory.getLogger(ProjectControllerV2.class);
 
-    private static final char[] VALID_PROJECTNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_"
-            .toCharArray();
+    private static final char[] VALID_PROJECTNAME = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_".toCharArray();
 
     @Autowired
     @Qualifier("projectService")
@@ -63,9 +62,7 @@ public class ProjectControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getProjectsV2(@RequestHeader("Accept-Language") String lang,
-            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
-            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
+    public EnvelopeResponse getProjectsV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
         MsgPicker.setMsg(lang);
 
         int offset = pageOffset * pageSize;
@@ -74,12 +71,9 @@ public class ProjectControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, projectService.listProjects(limit, offset), "");
     }
 
-    @RequestMapping(value = "/readable", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/readable", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getReadableProjectsV2(@RequestHeader("Accept-Language") String lang,
-            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
-            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
+    public EnvelopeResponse getReadableProjectsV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) {
         MsgPicker.setMsg(lang);
 
         HashMap<String, Object> data = new HashMap<String, Object>();
@@ -104,8 +98,7 @@ public class ProjectControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse saveProjectV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody ProjectRequest projectRequest) throws IOException {
+    public EnvelopeResponse saveProjectV2(@RequestHeader("Accept-Language") String lang, @RequestBody ProjectRequest projectRequest) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -116,8 +109,7 @@ public class ProjectControllerV2 extends BasicController {
         }
 
         if (!StringUtils.containsOnly(projectDesc.getName(), VALID_PROJECTNAME)) {
-            logger.info("Invalid Project name {}, only letters, numbers and underline supported.",
-                    projectDesc.getName());
+            logger.info("Invalid Project name {}, only letters, numbers and underline supported.", projectDesc.getName());
             throw new BadRequestException(String.format(msg.getINVALID_PROJECT_NAME(), projectDesc.getName()));
         }
 
@@ -129,8 +121,7 @@ public class ProjectControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse updateProjectV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody ProjectRequest projectRequest) throws IOException {
+    public EnvelopeResponse updateProjectV2(@RequestHeader("Accept-Language") String lang, @RequestBody ProjectRequest projectRequest) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -159,11 +150,9 @@ public class ProjectControllerV2 extends BasicController {
         return projectDesc;
     }
 
-    @RequestMapping(value = "/{projectName}", method = { RequestMethod.DELETE }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{projectName}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void deleteProjectV2(@RequestHeader("Accept-Language") String lang, @PathVariable String projectName)
-            throws IOException {
+    public void deleteProjectV2(@RequestHeader("Accept-Language") String lang, @PathVariable String projectName) throws IOException {
         MsgPicker.setMsg(lang);
 
         ProjectInstance project = projectService.getProjectManager().getProject(projectName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/QueryControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/QueryControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/QueryControllerV2.java
index d268c4e..7f71801 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/QueryControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/QueryControllerV2.java
@@ -72,8 +72,7 @@ public class QueryControllerV2 extends BasicController {
     @Qualifier("queryService")
     private QueryService queryService;
 
-    @RequestMapping(value = "/query", method = RequestMethod.POST, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/query", method = RequestMethod.POST, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse queryV2(@RequestHeader("Accept-Language") String lang, @RequestBody SQLRequest sqlRequest) {
         MsgPicker.setMsg(lang);
@@ -83,48 +82,37 @@ public class QueryControllerV2 extends BasicController {
 
     // TODO should be just "prepare" a statement, get back expected ResultSetMetaData
 
-    @RequestMapping(value = "/query/prestate", method = RequestMethod.POST, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/query/prestate", method = RequestMethod.POST, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse prepareQueryV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody PrepareSqlRequest sqlRequest) {
+    public EnvelopeResponse prepareQueryV2(@RequestHeader("Accept-Language") String lang, @RequestBody PrepareSqlRequest sqlRequest) {
         MsgPicker.setMsg(lang);
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, queryService.doQueryWithCache(sqlRequest), "");
     }
 
-    @RequestMapping(value = "/saved_queries", method = RequestMethod.POST, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/saved_queries", method = RequestMethod.POST, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void saveQueryV2(@RequestHeader("Accept-Language") String lang, @RequestBody SaveSqlRequest sqlRequest)
-            throws IOException {
+    public void saveQueryV2(@RequestHeader("Accept-Language") String lang, @RequestBody SaveSqlRequest sqlRequest) throws IOException {
         MsgPicker.setMsg(lang);
 
         String creator = SecurityContextHolder.getContext().getAuthentication().getName();
-        Query newQuery = new Query(sqlRequest.getName(), sqlRequest.getProject(), sqlRequest.getSql(),
-                sqlRequest.getDescription());
+        Query newQuery = new Query(sqlRequest.getName(), sqlRequest.getProject(), sqlRequest.getSql(), sqlRequest.getDescription());
 
         queryService.saveQuery(creator, newQuery);
     }
 
-    @RequestMapping(value = "/saved_queries/{id}", method = RequestMethod.DELETE, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/saved_queries/{id}", method = RequestMethod.DELETE, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void removeQueryV2(@RequestHeader("Accept-Language") String lang, @PathVariable String id)
-            throws IOException {
+    public void removeQueryV2(@RequestHeader("Accept-Language") String lang, @PathVariable String id) throws IOException {
         MsgPicker.setMsg(lang);
 
         String creator = SecurityContextHolder.getContext().getAuthentication().getName();
         queryService.removeQuery(creator, id);
     }
 
-    @RequestMapping(value = "/saved_queries/{project}", method = RequestMethod.GET, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/saved_queries/{project}", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getQueriesV2(@RequestHeader("Accept-Language") String lang, @PathVariable String project,
-            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
-            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize)
-            throws IOException {
+    public EnvelopeResponse getQueriesV2(@RequestHeader("Accept-Language") String lang, @PathVariable String project, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) throws IOException {
         MsgPicker.setMsg(lang);
 
         HashMap<String, Object> data = new HashMap<String, Object>();
@@ -153,11 +141,9 @@ public class QueryControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }
 
-    @RequestMapping(value = "/query/format/{format}", method = RequestMethod.GET, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/query/format/{format}", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void downloadQueryResultV2(@RequestHeader("Accept-Language") String lang, @PathVariable String format,
-            SQLRequest sqlRequest, HttpServletResponse response) {
+    public void downloadQueryResultV2(@RequestHeader("Accept-Language") String lang, @PathVariable String format, SQLRequest sqlRequest, HttpServletResponse response) {
         MsgPicker.setMsg(lang);
 
         SQLResponse result = queryService.doQueryWithCache(sqlRequest);
@@ -187,15 +173,12 @@ public class QueryControllerV2 extends BasicController {
         }
     }
 
-    @RequestMapping(value = "/tables_and_columns", method = RequestMethod.GET, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/tables_and_columns", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getMetadataV2(@RequestHeader("Accept-Language") String lang, MetaRequest metaRequest)
-            throws SQLException, IOException {
+    public EnvelopeResponse getMetadataV2(@RequestHeader("Accept-Language") String lang, MetaRequest metaRequest) throws SQLException, IOException {
         MsgPicker.setMsg(lang);
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, queryService.getMetadataV2(metaRequest.getProject()),
-                "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, queryService.getMetadataV2(metaRequest.getProject()), "");
     }
 
     public void setQueryService(QueryService queryService) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/StreamingControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/StreamingControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/StreamingControllerV2.java
index 3289ca8..c5bebf5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/StreamingControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/StreamingControllerV2.java
@@ -78,38 +78,26 @@ public class StreamingControllerV2 extends BasicController {
     @Qualifier("tableService")
     private TableService tableService;
 
-    @RequestMapping(value = "/getConfig", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/getConfig", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getStreamingsV2(@RequestHeader("Accept-Language") String lang,
-            @RequestParam(value = "table", required = false) String table,
-            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
-            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize)
-            throws IOException {
+    public EnvelopeResponse getStreamingsV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "table", required = false) String table, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) throws IOException {
         MsgPicker.setMsg(lang);
 
         int offset = pageOffset * pageSize;
         int limit = pageSize;
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
-                streamingService.getStreamingConfigs(table, limit, offset), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, streamingService.getStreamingConfigs(table, limit, offset), "");
     }
 
-    @RequestMapping(value = "/getKfkConfig", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/getKfkConfig", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getKafkaConfigsV2(@RequestHeader("Accept-Language") String lang,
-            @RequestParam(value = "kafkaConfigName", required = false) String kafkaConfigName,
-            @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset,
-            @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize)
-            throws IOException {
+    public EnvelopeResponse getKafkaConfigsV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "kafkaConfigName", required = false) String kafkaConfigName, @RequestParam(value = "pageOffset", required = false, defaultValue = "0") Integer pageOffset, @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize) throws IOException {
         MsgPicker.setMsg(lang);
 
         int offset = pageOffset * pageSize;
         int limit = pageSize;
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
-                kafkaConfigService.getKafkaConfigs(kafkaConfigName, limit, offset), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, kafkaConfigService.getKafkaConfigs(kafkaConfigName, limit, offset), "");
     }
 
     /**
@@ -120,8 +108,7 @@ public class StreamingControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void saveStreamingConfigV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody StreamingRequest streamingRequest) throws IOException {
+    public void saveStreamingConfigV2(@RequestHeader("Accept-Language") String lang, @RequestBody StreamingRequest streamingRequest) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -174,8 +161,7 @@ public class StreamingControllerV2 extends BasicController {
             if (saveKafkaSuccess == false || saveStreamingSuccess == false) {
 
                 if (saveStreamingSuccess == true) {
-                    StreamingConfig sConfig = streamingService.getStreamingManager()
-                            .getStreamingConfig(streamingConfig.getName());
+                    StreamingConfig sConfig = streamingService.getStreamingManager().getStreamingConfig(streamingConfig.getName());
                     try {
                         streamingService.dropStreamingConfig(sConfig);
                     } catch (IOException e) {
@@ -197,8 +183,7 @@ public class StreamingControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.PUT }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void updateStreamingConfigV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody StreamingRequest streamingRequest) throws IOException {
+    public void updateStreamingConfigV2(@RequestHeader("Accept-Language") String lang, @RequestBody StreamingRequest streamingRequest) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -221,11 +206,9 @@ public class StreamingControllerV2 extends BasicController {
         }
     }
 
-    @RequestMapping(value = "/{configName}", method = { RequestMethod.DELETE }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{configName}", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void deleteConfigV2(@RequestHeader("Accept-Language") String lang, @PathVariable String configName)
-            throws IOException {
+    public void deleteConfigV2(@RequestHeader("Accept-Language") String lang, @PathVariable String configName) throws IOException {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/TableControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/TableControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/TableControllerV2.java
index 86c317b..4e2506c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/TableControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/TableControllerV2.java
@@ -65,13 +65,10 @@ public class TableControllerV2 extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getTableDescV2(@RequestHeader("Accept-Language") String lang,
-            @RequestParam(value = "ext", required = false) boolean withExt,
-            @RequestParam(value = "project", required = true) String project) throws IOException {
+    public EnvelopeResponse getTableDescV2(@RequestHeader("Accept-Language") String lang, @RequestParam(value = "ext", required = false) boolean withExt, @RequestParam(value = "project", required = true) String project) throws IOException {
         MsgPicker.setMsg(lang);
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, tableService.getTableDescByProject(project, withExt),
-                "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, tableService.getTableDescByProject(project, withExt), "");
     }
 
     /**
@@ -81,11 +78,9 @@ public class TableControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/{tableName:.+}", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/{tableName:.+}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse getTableDescV2(@RequestHeader("Accept-Language") String lang,
-            @PathVariable String tableName) {
+    public EnvelopeResponse getTableDescV2(@RequestHeader("Accept-Language") String lang, @PathVariable String tableName) {
         MsgPicker.setMsg(lang);
         Message msg = MsgPicker.getMsg();
 
@@ -95,27 +90,20 @@ public class TableControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, table, "");
     }
 
-    @RequestMapping(value = "/load", method = { RequestMethod.POST }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/load", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse loadHiveTablesV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody HiveTableRequestV2 requestV2) throws Exception {
+    public EnvelopeResponse loadHiveTablesV2(@RequestHeader("Accept-Language") String lang, @RequestBody HiveTableRequestV2 requestV2) throws Exception {
         MsgPicker.setMsg(lang);
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
-                tableService.loadHiveTables(requestV2.getTables(), requestV2.getProject(), requestV2.isNeedProfile()),
-                "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, tableService.loadHiveTables(requestV2.getTables(), requestV2.getProject(), requestV2.isNeedProfile()), "");
     }
 
-    @RequestMapping(value = "/load", method = { RequestMethod.DELETE }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/load", method = { RequestMethod.DELETE }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public EnvelopeResponse unLoadHiveTablesV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody HiveTableRequestV2 requestV2) throws IOException {
+    public EnvelopeResponse unLoadHiveTablesV2(@RequestHeader("Accept-Language") String lang, @RequestBody HiveTableRequestV2 requestV2) throws IOException {
         MsgPicker.setMsg(lang);
 
-        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS,
-                tableService.unloadHiveTables(requestV2.getTables(), requestV2.getProject()), "");
+        return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, tableService.unloadHiveTables(requestV2.getTables(), requestV2.getProject()), "");
     }
 
     /**
@@ -125,11 +113,9 @@ public class TableControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/cardinality", method = { RequestMethod.POST }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/cardinality", method = { RequestMethod.POST }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    public void generateCardinalityV2(@RequestHeader("Accept-Language") String lang,
-            @RequestBody HiveTableRequestV2 requestV2) throws Exception {
+    public void generateCardinalityV2(@RequestHeader("Accept-Language") String lang, @RequestBody HiveTableRequestV2 requestV2) throws Exception {
         MsgPicker.setMsg(lang);
 
         String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
@@ -147,8 +133,7 @@ public class TableControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/hive", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/hive", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     private EnvelopeResponse showHiveDatabasesV2(@RequestHeader("Accept-Language") String lang) throws Exception {
         MsgPicker.setMsg(lang);
@@ -163,11 +148,9 @@ public class TableControllerV2 extends BasicController {
      * @throws IOException
      */
 
-    @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET }, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
-    private EnvelopeResponse showHiveTablesV2(@RequestHeader("Accept-Language") String lang,
-            @PathVariable String database) throws Exception {
+    private EnvelopeResponse showHiveTablesV2(@RequestHeader("Accept-Language") String lang, @PathVariable String database) throws Exception {
         MsgPicker.setMsg(lang);
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, tableService.getHiveTableNames(database), "");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/controller2/UserControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/UserControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/UserControllerV2.java
index 1c8cf7d..ebf8b36 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/UserControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/UserControllerV2.java
@@ -57,8 +57,7 @@ public class UserControllerV2 extends BasicController {
     @Qualifier("userService")
     UserService userService;
 
-    @RequestMapping(value = "/authentication", method = RequestMethod.POST, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/authentication", method = RequestMethod.POST, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse authenticateV2(@RequestHeader("Accept-Language") String lang) {
         EnvelopeResponse response = authenticatedUserV2(lang);
@@ -66,8 +65,7 @@ public class UserControllerV2 extends BasicController {
         return response;
     }
 
-    @RequestMapping(value = "/authentication", method = RequestMethod.GET, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/authentication", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse authenticatedUserV2(@RequestHeader("Accept-Language") String lang) {
         MsgPicker.setMsg(lang);
@@ -94,8 +92,7 @@ public class UserControllerV2 extends BasicController {
         throw new BadRequestException(msg.getAUTH_INFO_NOT_FOUND());
     }
 
-    @RequestMapping(value = "/authentication/authorities", method = RequestMethod.GET, produces = {
-            "application/vnd.apache.kylin-v2+json" })
+    @RequestMapping(value = "/authentication/authorities", method = RequestMethod.GET, produces = { "application/vnd.apache.kylin-v2+json" })
     @ResponseBody
     public EnvelopeResponse getAuthoritiesV2(@RequestHeader("Accept-Language") String lang) throws IOException {
         MsgPicker.setMsg(lang);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/exception/BadRequestException.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/exception/BadRequestException.java b/server-base/src/main/java/org/apache/kylin/rest/exception/BadRequestException.java
index ec6006a..53d619a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/exception/BadRequestException.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/exception/BadRequestException.java
@@ -28,7 +28,7 @@ import org.springframework.web.bind.annotation.ResponseStatus;
  */
 @ResponseStatus(value = HttpStatus.BAD_REQUEST)
 public class BadRequestException extends RuntimeException {
-
+    
     private static final long serialVersionUID = -6798154278095441848L;
 
     private String code;
@@ -51,6 +51,7 @@ public class BadRequestException extends RuntimeException {
         this.code = code;
     }
 
+
     public String getCode() {
         return code;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/init/InitialTaskManager.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/init/InitialTaskManager.java b/server-base/src/main/java/org/apache/kylin/rest/init/InitialTaskManager.java
index 77a16a9..8bba674 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/init/InitialTaskManager.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/init/InitialTaskManager.java
@@ -42,7 +42,7 @@ public class InitialTaskManager implements InitializingBean {
     private void runInitialTasks() {
         // init metrics system for kylin
         QueryMetricsFacade.init();
-
+        
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         String initTasks = kylinConfig.getInitTasks();
         if (!StringUtils.isEmpty(initTasks)) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/job/HybridCubeCLI.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/job/HybridCubeCLI.java b/server-base/src/main/java/org/apache/kylin/rest/job/HybridCubeCLI.java
index 8670c25..64fcbc8 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/job/HybridCubeCLI.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/job/HybridCubeCLI.java
@@ -18,10 +18,6 @@
 
 package org.apache.kylin.rest.job;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
@@ -43,6 +39,10 @@ import org.apache.kylin.storage.hybrid.HybridManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * 1. Create new HybridCube
  * bin/kylin.sh org.apache.kylin.tool.HybridCubeCLI -action create -name hybrid_name -project project_name -model model_name -cubes cube1,cube2
@@ -55,21 +55,15 @@ public class HybridCubeCLI extends AbstractApplication {
 
     private static final Logger logger = LoggerFactory.getLogger(HybridCubeCLI.class);
 
-    private static final Option OPTION_ACTION = OptionBuilder.withArgName("action").hasArg().isRequired(true)
-            .withDescription("create/update/delete").create("action");
+    private static final Option OPTION_ACTION = OptionBuilder.withArgName("action").hasArg().isRequired(true).withDescription("create/update/delete").create("action");
 
-    private static final Option OPTION_HYBRID_NAME = OptionBuilder.withArgName("name").hasArg().isRequired(true)
-            .withDescription("HybridCube name").create("name");
+    private static final Option OPTION_HYBRID_NAME = OptionBuilder.withArgName("name").hasArg().isRequired(true).withDescription("HybridCube name").create("name");
 
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(true)
-            .withDescription("the target project for the hybrid cube").create("project");
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(true).withDescription("the target project for the hybrid cube").create("project");
 
-    private static final Option OPTION_MODEL = OptionBuilder.withArgName("model").hasArg().isRequired(true)
-            .withDescription("the target model for the hybrid cube").create("model");
+    private static final Option OPTION_MODEL = OptionBuilder.withArgName("model").hasArg().isRequired(true).withDescription("the target model for the hybrid cube").create("model");
 
-    private static final Option OPTION_CUBES = OptionBuilder.withArgName("cubes").hasArg().isRequired(false)
-            .withDescription("the cubes used in HybridCube, seperated by comma, empty if to delete HybridCube")
-            .create("cubes");
+    private static final Option OPTION_CUBES = OptionBuilder.withArgName("cubes").hasArg().isRequired(false).withDescription("the cubes used in HybridCube, seperated by comma, empty if to delete HybridCube").create("cubes");
 
     private final Options options;
 
@@ -158,32 +152,27 @@ public class HybridCubeCLI extends AbstractApplication {
 
     }
 
-    private HybridInstance create(String hybridName, List<RealizationEntry> realizationEntries, String projectName,
-            String owner) throws IOException {
+    private HybridInstance create(String hybridName, List<RealizationEntry> realizationEntries, String projectName, String owner) throws IOException {
         checkSegmentOffset(realizationEntries);
         HybridInstance hybridInstance = HybridInstance.create(kylinConfig, hybridName, realizationEntries);
         store.putResource(hybridInstance.getResourcePath(), hybridInstance, HybridManager.HYBRID_SERIALIZER);
-        ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID,
-                hybridInstance.getName(), projectName, owner);
+        ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID, hybridInstance.getName(), projectName, owner);
         hybridManager.reloadHybridInstance(hybridName);
         logger.info("HybridInstance was created at: " + hybridInstance.getResourcePath());
         return hybridInstance;
     }
 
-    private void update(HybridInstance hybridInstance, List<RealizationEntry> realizationEntries, String projectName,
-            String owner) throws IOException {
+    private void update(HybridInstance hybridInstance, List<RealizationEntry> realizationEntries, String projectName, String owner) throws IOException {
         checkSegmentOffset(realizationEntries);
         hybridInstance.setRealizationEntries(realizationEntries);
         store.putResource(hybridInstance.getResourcePath(), hybridInstance, HybridManager.HYBRID_SERIALIZER);
-        ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID,
-                hybridInstance.getName(), projectName, owner);
+        ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID, hybridInstance.getName(), projectName, owner);
         hybridManager.reloadHybridInstance(hybridInstance.getName());
         logger.info("HybridInstance was updated at: " + hybridInstance.getResourcePath());
     }
 
     private void delete(HybridInstance hybridInstance) throws IOException {
-        ProjectManager.getInstance(kylinConfig).removeRealizationsFromProjects(RealizationType.HYBRID,
-                hybridInstance.getName());
+        ProjectManager.getInstance(kylinConfig).removeRealizationsFromProjects(RealizationType.HYBRID, hybridInstance.getName());
         store.deleteResource(hybridInstance.getResourcePath());
         hybridManager.reloadAllHybridInstance();
         logger.info("HybridInstance was deleted at: " + hybridInstance.getResourcePath());
@@ -208,8 +197,7 @@ public class HybridCubeCLI extends AbstractApplication {
                 lastOffset = segment.getSourceOffsetEnd();
             } else {
                 if (lastOffset > segment.getSourceOffsetStart()) {
-                    throw new RuntimeException("Segments has overlap, could not hybrid. Last Segment End: " + lastOffset
-                            + ", Next Segment Start: " + segment.getSourceOffsetStart());
+                    throw new RuntimeException("Segments has overlap, could not hybrid. Last Segment End: " + lastOffset + ", Next Segment Start: " + segment.getSourceOffsetStart());
                 }
                 lastOffset = segment.getSourceOffsetEnd();
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanupJob.java b/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanupJob.java
index f85789b..d92107b 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanupJob.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/job/StorageCleanupJob.java
@@ -57,8 +57,8 @@ import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableManager;
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.source.ISourceMetadataExplorer;
 import org.apache.kylin.source.SourceFactory;
+import org.apache.kylin.source.ISourceMetadataExplorer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -69,18 +69,15 @@ import com.google.common.collect.Maps;
 public class StorageCleanupJob extends AbstractApplication {
 
     @SuppressWarnings("static-access")
-    protected static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false)
-            .withDescription("Delete the unused storage").create("delete");
-    protected static final Option OPTION_FORCE = OptionBuilder.withArgName("force").hasArg().isRequired(false)
-            .withDescription("Warning: will delete all kylin intermediate hive tables").create("force");
+    protected static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false).withDescription("Delete the unused storage").create("delete");
+    protected static final Option OPTION_FORCE = OptionBuilder.withArgName("force").hasArg().isRequired(false).withDescription("Warning: will delete all kylin intermediate hive tables").create("force");
 
     protected static final Logger logger = LoggerFactory.getLogger(StorageCleanupJob.class);
     public static final int deleteTimeout = 10; // Unit minute
 
     protected boolean delete = false;
     protected boolean force = false;
-    protected static ExecutableManager executableManager = ExecutableManager
-            .getInstance(KylinConfig.getInstanceFromEnv());
+    protected static ExecutableManager executableManager = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv());
 
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
         CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
@@ -103,8 +100,7 @@ public class StorageCleanupJob extends AbstractApplication {
                     String tablename = seg.getStorageLocationIdentifier();
                     if (allTablesNeedToBeDropped.contains(tablename)) {
                         allTablesNeedToBeDropped.remove(tablename);
-                        logger.info("Exclude table " + tablename + " from drop list, as the table belongs to cube "
-                                + cube.getName() + " with status " + cube.getStatus());
+                        logger.info("Exclude table " + tablename + " from drop list, as the table belongs to cube " + cube.getName() + " with status " + cube.getStatus());
                     }
                 }
             }
@@ -118,8 +114,7 @@ public class StorageCleanupJob extends AbstractApplication {
                     try {
                         futureTask.get(deleteTimeout, TimeUnit.MINUTES);
                     } catch (TimeoutException e) {
-                        logger.warn("It fails to delete htable " + htableName + ", for it cost more than "
-                                + deleteTimeout + " minutes!");
+                        logger.warn("It fails to delete htable " + htableName + ", for it cost more than " + deleteTimeout + " minutes!");
                         futureTask.cancel(true);
                     } catch (Exception e) {
                         e.printStackTrace();
@@ -213,8 +208,7 @@ public class StorageCleanupJob extends AbstractApplication {
             if (!state.isFinalState()) {
                 String path = JobBuilderSupport.getJobWorkingDir(engineConfig.getHdfsWorkingDirectory(), jobId);
                 allHdfsPathsNeedToBeDeleted.remove(path);
-                logger.info("Skip " + path + " from deletion list, as the path belongs to job " + jobId
-                        + " with status " + state);
+                logger.info("Skip " + path + " from deletion list, as the path belongs to job " + jobId + " with status " + state);
             }
         }
 
@@ -225,8 +219,7 @@ public class StorageCleanupJob extends AbstractApplication {
                 if (jobUuid != null && jobUuid.equals("") == false) {
                     String path = JobBuilderSupport.getJobWorkingDir(engineConfig.getHdfsWorkingDirectory(), jobUuid);
                     allHdfsPathsNeedToBeDeleted.remove(path);
-                    logger.info("Skip " + path + " from deletion list, as the path belongs to segment " + seg
-                            + " of cube " + cube.getName());
+                    logger.info("Skip " + path + " from deletion list, as the path belongs to segment " + seg + " of cube " + cube.getName());
                 }
             }
         }
@@ -352,21 +345,17 @@ public class StorageCleanupJob extends AbstractApplication {
                     String segmentId = uuid.replace("_", "-");
 
                     if (segmentId2JobId.containsKey(segmentId)) {
-                        String path = JobBuilderSupport.getJobWorkingDir(engineConfig.getHdfsWorkingDirectory(),
-                                segmentId2JobId.get(segmentId)) + "/" + tableToDelete;
+                        String path = JobBuilderSupport.getJobWorkingDir(engineConfig.getHdfsWorkingDirectory(), segmentId2JobId.get(segmentId)) + "/" + tableToDelete;
                         Path externalDataPath = new Path(path);
                         FileSystem fs = HadoopUtil.getWorkingFileSystem();
                         if (fs.exists(externalDataPath)) {
                             fs.delete(externalDataPath, true);
                             logger.info("Hive table {}'s external path {} deleted", tableToDelete, path);
                         } else {
-                            logger.info(
-                                    "Hive table {}'s external path {} not exist. It's normal if kylin.source.hive.keep-flat-table set false (By default)",
-                                    tableToDelete, path);
+                            logger.info("Hive table {}'s external path {} not exist. It's normal if kylin.source.hive.keep-flat-table set false (By default)", tableToDelete, path);
                         }
                     } else {
-                        logger.warn("Hive table {}'s job ID not found, segmentId2JobId: {}", tableToDelete,
-                                segmentId2JobId.toString());
+                        logger.warn("Hive table {}'s job ID not found, segmentId2JobId: {}", tableToDelete, segmentId2JobId.toString());
                     }
                 }
             } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetrics.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetrics.java b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetrics.java
index 74857a3..eb1bed6 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetrics.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetrics.java
@@ -18,15 +18,15 @@
 
 package org.apache.kylin.rest.metrics;
 
-import javax.annotation.concurrent.ThreadSafe;
-
 import org.apache.hadoop.metrics2.annotation.Metric;
 import org.apache.hadoop.metrics2.annotation.Metrics;
-import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.lib.MetricsRegistry;
 import org.apache.hadoop.metrics2.lib.MutableCounterLong;
 import org.apache.hadoop.metrics2.lib.MutableQuantiles;
 import org.apache.hadoop.metrics2.lib.MutableRate;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+
+import javax.annotation.concurrent.ThreadSafe;
 
 /**
  * properties and methods about query.
@@ -69,14 +69,10 @@ public class QueryMetrics {
         for (int i = 0; i < intervals.length; i++) {
             int interval = intervals[i];
 
-            queryLatencyTimeMillisQuantiles[i] = registry.newQuantiles("QueryLatency" + interval + "s",
-                    "Query queue time in milli second", "ops", "", interval);
-            scanRowCountQuantiles[i] = registry.newQuantiles("ScanRowCount" + interval + "s",
-                    "Scan row count in milli second", "ops", "", interval);
-            resultRowCountQuantiles[i] = registry.newQuantiles("ResultRowCount" + interval + "s",
-                    "Result row count in milli second", "ops", "", interval);
-            cacheHitCountQuantiles[i] = registry.newQuantiles("CacheHitCount" + interval + "s",
-                    "Cache Hit Count in milli second", "ops", "", interval);
+            queryLatencyTimeMillisQuantiles[i] = registry.newQuantiles("QueryLatency" + interval + "s", "Query queue time in milli second", "ops", "", interval);
+            scanRowCountQuantiles[i] = registry.newQuantiles("ScanRowCount" + interval + "s", "Scan row count in milli second", "ops", "", interval);
+            resultRowCountQuantiles[i] = registry.newQuantiles("ResultRowCount" + interval + "s", "Result row count in milli second", "ops", "", interval);
+            cacheHitCountQuantiles[i] = registry.newQuantiles("CacheHitCount" + interval + "s", "Cache Hit Count in milli second", "ops", "", interval);
         }
 
         queryLatency = registry.newRate("QueryLatency", "", true);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
index 37003f8..48a8e58 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
@@ -18,10 +18,6 @@
 
 package org.apache.kylin.rest.metrics;
 
-import java.util.concurrent.ConcurrentHashMap;
-
-import javax.annotation.concurrent.ThreadSafe;
-
 import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.kylin.common.KylinConfig;
@@ -30,6 +26,9 @@ import org.apache.kylin.rest.response.SQLResponse;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.annotation.concurrent.ThreadSafe;
+import java.util.concurrent.ConcurrentHashMap;
+
 /**
  * The entrance of metrics features.
  */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/msg/CnMessage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/msg/CnMessage.java b/server-base/src/main/java/org/apache/kylin/rest/msg/CnMessage.java
index a9f7807..53cbaba 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/msg/CnMessage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/msg/CnMessage.java
@@ -21,7 +21,7 @@ package org.apache.kylin.rest.msg;
 /**
  * Created by luwei on 17-4-12.
  */
-public class CnMessage extends Message {
+public class CnMessage extends Message{
 
     private static CnMessage instance = null;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/request/HiveTableRequestV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/request/HiveTableRequestV2.java b/server-base/src/main/java/org/apache/kylin/rest/request/HiveTableRequestV2.java
index 8f46816..f043d15 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/request/HiveTableRequestV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/request/HiveTableRequestV2.java
@@ -32,6 +32,7 @@ public class HiveTableRequestV2 {
 
     private boolean needProfile = true;
 
+
     public String[] getTables() {
         return tables;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/request/JobBuildRequest2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/request/JobBuildRequest2.java b/server-base/src/main/java/org/apache/kylin/rest/request/JobBuildRequest2.java
index 16316c8..6e9117d 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/request/JobBuildRequest2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/request/JobBuildRequest2.java
@@ -18,10 +18,10 @@
 
 package org.apache.kylin.rest.request;
 
-import java.util.Map;
-
 import com.google.common.collect.Maps;
 
+import java.util.Map;
+
 public class JobBuildRequest2 {
 
     private long sourceOffsetStart;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/request/PrepareSqlRequest.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/request/PrepareSqlRequest.java b/server-base/src/main/java/org/apache/kylin/rest/request/PrepareSqlRequest.java
index 50db98a..e296248 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/request/PrepareSqlRequest.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/request/PrepareSqlRequest.java
@@ -41,7 +41,7 @@ public class PrepareSqlRequest extends SQLRequest {
         this.params = params;
     }
 
-    public static class StateParam implements Serializable {
+    public static class StateParam implements Serializable{
         private String className;
         private String value;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/response/ErrorResponse.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/response/ErrorResponse.java b/server-base/src/main/java/org/apache/kylin/rest/response/ErrorResponse.java
index 64e5f82..508a35f 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/response/ErrorResponse.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/response/ErrorResponse.java
@@ -38,7 +38,7 @@ public class ErrorResponse extends EnvelopeResponse {
 
     public ErrorResponse(String url, Exception exception) {
         super();
-
+        
         this.url = url;
         this.exception = exception.getLocalizedMessage();
         this.msg = exception.getLocalizedMessage();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/response/SQLResponse.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/response/SQLResponse.java b/server-base/src/main/java/org/apache/kylin/rest/response/SQLResponse.java
index 10972ad..d841dee 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/response/SQLResponse.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/response/SQLResponse.java
@@ -66,8 +66,7 @@ public class SQLResponse implements Serializable {
     public SQLResponse() {
     }
 
-    public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, int affectedRowCount,
-            boolean isException, String exceptionMessage) {
+    public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, int affectedRowCount, boolean isException, String exceptionMessage) {
         this.columnMetas = columnMetas;
         this.results = results;
         this.affectedRowCount = affectedRowCount;
@@ -75,8 +74,7 @@ public class SQLResponse implements Serializable {
         this.exceptionMessage = exceptionMessage;
     }
 
-    public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, String cube,
-            int affectedRowCount, boolean isException, String exceptionMessage) {
+    public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, String cube, int affectedRowCount, boolean isException, String exceptionMessage) {
         this.columnMetas = columnMetas;
         this.results = results;
         this.cube = cube;
@@ -85,8 +83,7 @@ public class SQLResponse implements Serializable {
         this.exceptionMessage = exceptionMessage;
     }
 
-    public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, String cube,
-            int affectedRowCount, boolean isException, String exceptionMessage, boolean isPartial, boolean isAdhoc) {
+    public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, String cube, int affectedRowCount, boolean isException, String exceptionMessage, boolean isPartial, boolean isAdhoc) {
         this.columnMetas = columnMetas;
         this.results = results;
         this.cube = cube;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
index 7100bd6..b595c72 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
@@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.client.Table;
 
 /**
  */
-@Deprecated //use ResourceStore interface instead.
+@Deprecated  //use ResourceStore interface instead.
 public interface AclHBaseStorage {
 
     String ACL_INFO_FAMILY = "i";

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java b/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
index 73cd7ed..592791c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
@@ -47,8 +47,7 @@ public class AuthoritiesPopulator extends DefaultLdapAuthoritiesPopulator {
      * @param contextSource
      * @param groupSearchBase
      */
-    public AuthoritiesPopulator(ContextSource contextSource, String groupSearchBase, String adminRole,
-            String defaultRole) {
+    public AuthoritiesPopulator(ContextSource contextSource, String groupSearchBase, String adminRole, String defaultRole) {
         super(contextSource, groupSearchBase);
         this.adminRoleAsAuthority = new SimpleGrantedAuthority(adminRole);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/security/CrossDomainFilter.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/CrossDomainFilter.java b/server-base/src/main/java/org/apache/kylin/rest/security/CrossDomainFilter.java
index 539836e..7d9d9ac 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/CrossDomainFilter.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/CrossDomainFilter.java
@@ -52,14 +52,11 @@ public class CrossDomainFilter implements Filter {
      * javax.servlet.ServletResponse, javax.servlet.FilterChain)
      */
     @Override
-    public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
-            throws IOException, ServletException {
+    public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
         if (KylinConfig.getInstanceFromEnv().isWebCrossDomainEnabled()) {
             ((HttpServletResponse) response).addHeader("Access-Control-Allow-Origin", "*");
-            ((HttpServletResponse) response).addHeader("Access-Control-Allow-Methods",
-                    "GET, POST, PUT, DELETE, OPTIONS");
-            ((HttpServletResponse) response).addHeader("Access-Control-Allow-Headers",
-                    "Origin, No-Cache, X-Requested-With, If-Modified-Since, Pragma, Last-Modified, Cache-Control, Expires, Content-Type, X-E4M-With, Accept, Authorization");
+            ((HttpServletResponse) response).addHeader("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS");
+            ((HttpServletResponse) response).addHeader("Access-Control-Allow-Headers", "Origin, No-Cache, X-Requested-With, If-Modified-Since, Pragma, Last-Modified, Cache-Control, Expires, Content-Type, X-E4M-With, Accept, Authorization");
         }
 
         chain.doFilter(request, response);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/server-base/src/main/java/org/apache/kylin/rest/security/KylinAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/KylinAuthenticationProvider.java b/server-base/src/main/java/org/apache/kylin/rest/security/KylinAuthenticationProvider.java
index efc5196..dc475c9 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/KylinAuthenticationProvider.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/KylinAuthenticationProvider.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.rest.security;
 
+import com.google.common.hash.HashFunction;
+import com.google.common.hash.Hashing;
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.rest.service.UserService;
 import org.slf4j.Logger;
@@ -33,9 +35,6 @@ import org.springframework.security.core.userdetails.UserDetails;
 import org.springframework.security.core.userdetails.UsernameNotFoundException;
 import org.springframework.util.Assert;
 
-import com.google.common.hash.HashFunction;
-import com.google.common.hash.Hashing;
-
 import net.sf.ehcache.Cache;
 import net.sf.ehcache.CacheManager;
 import net.sf.ehcache.Element;
@@ -92,8 +91,7 @@ public class KylinAuthenticationProvider implements AuthenticationProvider {
 
             if (authed.getDetails() == null) {
                 //authed.setAuthenticated(false);
-                throw new UsernameNotFoundException(
-                        "User not found in LDAP, check whether he/she has been added to the groups.");
+                throw new UsernameNotFoundException("User not found in LDAP, check whether he/she has been added to the groups.");
             }
 
             if (authed.getDetails() instanceof UserDetails) {


[30/67] [abbrv] kylin git commit: KYLIN-2634 report clearer error on HDFS resource failure

Posted by li...@apache.org.
KYLIN-2634 report clearer error on HDFS resource failure


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/eafbe732
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/eafbe732
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/eafbe732

Branch: refs/heads/master
Commit: eafbe7325e3ce96e7bcedb4f14f6266d16c6cf3c
Parents: 8edff35
Author: Li Yang <li...@apache.org>
Authored: Sat May 27 15:21:04 2017 +0800
Committer: Roger Shi <ro...@gmail.com>
Committed: Sat May 27 15:24:35 2017 +0800

----------------------------------------------------------------------
 .../kylin/storage/hbase/HBaseResourceStore.java | 36 ++++++++++++++------
 1 file changed, 25 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/eafbe732/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index a2e0229..81349ef 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -149,9 +149,11 @@ public class HBaseResourceStore extends ResourceStore {
     @Override
     public String getMetaStoreUUID() throws IOException {
         if (!exists(ResourceStore.METASTORE_UUID_TAG)) {
-            putResource(ResourceStore.METASTORE_UUID_TAG, new StringEntity(createMetaStoreUUID()), 0, StringEntity.serializer);
+            putResource(ResourceStore.METASTORE_UUID_TAG, new StringEntity(createMetaStoreUUID()), 0,
+                    StringEntity.serializer);
         }
-        StringEntity entity = getResource(ResourceStore.METASTORE_UUID_TAG, StringEntity.class, StringEntity.serializer);
+        StringEntity entity = getResource(ResourceStore.METASTORE_UUID_TAG, StringEntity.class,
+                StringEntity.serializer);
         return entity.toString();
     }
 
@@ -202,7 +204,8 @@ public class HBaseResourceStore extends ResourceStore {
     }
 
     @Override
-    protected List<RawResource> getAllResourcesImpl(String folderPath, long timeStart, long timeEndExclusive) throws IOException {
+    protected List<RawResource> getAllResourcesImpl(String folderPath, long timeStart, long timeEndExclusive)
+            throws IOException {
         FilterList filter = generateTimeFilterList(timeStart, timeEndExclusive);
         final List<RawResource> result = Lists.newArrayList();
         try {
@@ -226,11 +229,13 @@ public class HBaseResourceStore extends ResourceStore {
     private FilterList generateTimeFilterList(long timeStart, long timeEndExclusive) {
         FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
         if (timeStart != Long.MIN_VALUE) {
-            SingleColumnValueFilter timeStartFilter = new SingleColumnValueFilter(B_FAMILY, B_COLUMN_TS, CompareFilter.CompareOp.GREATER_OR_EQUAL, Bytes.toBytes(timeStart));
+            SingleColumnValueFilter timeStartFilter = new SingleColumnValueFilter(B_FAMILY, B_COLUMN_TS,
+                    CompareFilter.CompareOp.GREATER_OR_EQUAL, Bytes.toBytes(timeStart));
             filterList.addFilter(timeStartFilter);
         }
         if (timeEndExclusive != Long.MAX_VALUE) {
-            SingleColumnValueFilter timeEndFilter = new SingleColumnValueFilter(B_FAMILY, B_COLUMN_TS, CompareFilter.CompareOp.LESS, Bytes.toBytes(timeEndExclusive));
+            SingleColumnValueFilter timeEndFilter = new SingleColumnValueFilter(B_FAMILY, B_COLUMN_TS,
+                    CompareFilter.CompareOp.LESS, Bytes.toBytes(timeEndExclusive));
             filterList.addFilter(timeEndFilter);
         }
         return filterList.getFilters().size() == 0 ? null : filterList;
@@ -245,7 +250,11 @@ public class HBaseResourceStore extends ResourceStore {
             Path redirectPath = bigCellHDFSPath(resPath);
             FileSystem fileSystem = HadoopUtil.getWorkingFileSystem();
 
-            return fileSystem.open(redirectPath);
+            try {
+                return fileSystem.open(redirectPath);
+            } catch (IOException ex) {
+                throw new IOException("Failed to read resource at " + resPath, ex);
+            }
         } else {
             return new ByteArrayInputStream(value);
         }
@@ -291,7 +300,8 @@ public class HBaseResourceStore extends ResourceStore {
     }
 
     @Override
-    protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS) throws IOException, IllegalStateException {
+    protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS)
+            throws IOException, IllegalStateException {
         Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
         try {
             byte[] row = Bytes.toBytes(resPath);
@@ -299,10 +309,12 @@ public class HBaseResourceStore extends ResourceStore {
             Put put = buildPut(resPath, newTS, row, content, table);
 
             boolean ok = table.checkAndPut(row, B_FAMILY, B_COLUMN_TS, bOldTS, put);
-            logger.trace("Update row " + resPath + " from oldTs: " + oldTS + ", to newTs: " + newTS + ", operation result: " + ok);
+            logger.trace("Update row " + resPath + " from oldTs: " + oldTS + ", to newTs: " + newTS
+                    + ", operation result: " + ok);
             if (!ok) {
                 long real = getResourceTimestampImpl(resPath);
-                throw new IllegalStateException("Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but it is " + real);
+                throw new IllegalStateException(
+                        "Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but it is " + real);
             }
 
             return newTS;
@@ -355,7 +367,8 @@ public class HBaseResourceStore extends ResourceStore {
 
     }
 
-    private Result internalGetFromHTable(Table table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
+    private Result internalGetFromHTable(Table table, String path, boolean fetchContent, boolean fetchTimestamp)
+            throws IOException {
         byte[] rowkey = Bytes.toBytes(path);
 
         Get get = new Get(rowkey);
@@ -400,7 +413,8 @@ public class HBaseResourceStore extends ResourceStore {
     }
 
     private Put buildPut(String resPath, long ts, byte[] row, byte[] content, Table table) throws IOException {
-        int kvSizeLimit = Integer.parseInt(getConnection().getConfiguration().get("hbase.client.keyvalue.maxsize", "10485760"));
+        int kvSizeLimit = Integer
+                .parseInt(getConnection().getConfiguration().get("hbase.client.keyvalue.maxsize", "10485760"));
         if (content.length > kvSizeLimit) {
             writeLargeCellToHdfs(resPath, content, table);
             content = BytesUtil.EMPTY_BYTE_ARRAY;


[40/67] [abbrv] kylin git commit: KYLIN-2515 add IAdhocConverter to allow customized adhoc query converte

Posted by li...@apache.org.
KYLIN-2515 add IAdhocConverter to allow customized adhoc query converte


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6ce9983f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6ce9983f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6ce9983f

Branch: refs/heads/master
Commit: 6ce9983f70fb497a50b660289cd11a8714cc731e
Parents: 9fafd27
Author: Hongbin Ma <ma...@apache.org>
Authored: Fri May 26 16:50:49 2017 +0800
Committer: Roger Shi <ro...@gmail.com>
Committed: Sat May 27 21:11:03 2017 +0800

----------------------------------------------------------------------
 .../apache/kylin/common/KylinConfigBase.java    |   4 +
 .../kylin/storage/adhoc/AdHocRunnerBase.java    |   6 +-
 .../kylin/storage/adhoc/HiveAdhocConverter.java | 180 +++++++++++++++++++
 .../kylin/storage/adhoc/IAdhocConverter.java    |  25 +++
 .../storage/adhoc/HiveAdhocConverterTest.java   |  62 +++++++
 .../org/apache/kylin/rest/util/AdHocUtil.java   |  35 ++--
 6 files changed, 297 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/6ce9983f/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 05be701..7f366d8 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -968,6 +968,10 @@ abstract public class KylinConfigBase implements Serializable {
     public String getAdHocRunnerClassName() {
         return getOptional("kylin.query.ad-hoc.runner.class-name", "");
     }
+    
+    public String getAdHocConverterClassName() {
+        return getOptional("kylin.query.ad-hoc.converter.class-name", "org.apache.kylin.storage.adhoc.HiveAdhocConverter");
+    }
 
     public String getJdbcUrl() {
         return getOptional("kylin.query.ad-hoc.jdbc.url", "");

http://git-wip-us.apache.org/repos/asf/kylin/blob/6ce9983f/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java b/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java
index 7e811a4..7b870c6 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/adhoc/AdHocRunnerBase.java
@@ -18,13 +18,13 @@
 
 package org.apache.kylin.storage.adhoc;
 
+import java.util.List;
+
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.List;
-
 public abstract class AdHocRunnerBase {
 
     private static final Logger logger = LoggerFactory.getLogger(AdHocRunnerBase.class);
@@ -45,4 +45,4 @@ public abstract class AdHocRunnerBase {
     public abstract void init();
 
     public abstract void executeQuery(String query, List<List<String>> results, List<SelectedColumnMeta> columnMetas) throws Exception;
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6ce9983f/core-storage/src/main/java/org/apache/kylin/storage/adhoc/HiveAdhocConverter.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/HiveAdhocConverter.java b/core-storage/src/main/java/org/apache/kylin/storage/adhoc/HiveAdhocConverter.java
new file mode 100644
index 0000000..1a43557
--- /dev/null
+++ b/core-storage/src/main/java/org/apache/kylin/storage/adhoc/HiveAdhocConverter.java
@@ -0,0 +1,180 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.storage.adhoc;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Stack;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+//TODO: Some workaround ways to make sql readable by hive parser, should replaced it with a more well-designed way
+public class HiveAdhocConverter implements IAdhocConverter {
+
+    private static final Logger logger = LoggerFactory.getLogger(HiveAdhocConverter.class);
+
+    private static final Pattern EXTRACT_PATTERN = Pattern.compile("\\s+extract\\s*(\\()\\s*(.*?)\\s*from(\\s+)", Pattern.CASE_INSENSITIVE);
+    private static final Pattern FROM_PATTERN = Pattern.compile("\\s+from\\s+(\\()\\s*select\\s", Pattern.CASE_INSENSITIVE);
+    private static final Pattern CAST_PATTERN = Pattern.compile("CAST\\((.*?) (?i)AS\\s*(.*?)\\s*\\)", Pattern.CASE_INSENSITIVE);
+    private static final Pattern CONCAT_PATTERN = Pattern.compile("(['_a-z0-9A-Z]+)\\|\\|(['_a-z0-9A-Z]+)", Pattern.CASE_INSENSITIVE);
+
+    public static String replaceString(String originString, String fromString, String toString) {
+        return originString.replace(fromString, toString);
+    }
+
+    public static String extractReplace(String originString) {
+        Matcher extractMatcher = EXTRACT_PATTERN.matcher(originString);
+        String replacedString = originString;
+        Map<Integer, Integer> parenthesesPairs = null;
+
+        while (extractMatcher.find()) {
+            if (parenthesesPairs == null) {
+                parenthesesPairs = findParenthesesPairs(originString);
+            }
+
+            String functionStr = extractMatcher.group(2);
+            int startIdx = extractMatcher.end(3);
+            int endIdx = parenthesesPairs.get(extractMatcher.start(1));
+            String extractInner = originString.substring(startIdx, endIdx);
+            int originStart = extractMatcher.start(0) + 1;
+            int originEnd = endIdx + 1;
+
+            replacedString = replaceString(replacedString, originString.substring(originStart, originEnd), functionStr + "(" + extractInner + ")");
+        }
+
+        return replacedString;
+    }
+
+    public static String castRepalce(String originString) {
+        Matcher castMatcher = CAST_PATTERN.matcher(originString);
+        String replacedString = originString;
+
+        while (castMatcher.find()) {
+            String castStr = castMatcher.group();
+            String type = castMatcher.group(2);
+            String supportedType = "";
+            switch (type.toUpperCase()) {
+            case "INTEGER":
+                supportedType = "int";
+                break;
+            case "SHORT":
+                supportedType = "smallint";
+                break;
+            case "LONG":
+                supportedType = "bigint";
+                break;
+            default:
+                supportedType = type;
+            }
+
+            if (!supportedType.equals(type)) {
+                String replacedCastStr = castStr.replace(type, supportedType);
+                replacedString = replaceString(replacedString, castStr, replacedCastStr);
+            }
+        }
+
+        return replacedString;
+    }
+
+    public static String subqueryRepalce(String originString) {
+        Matcher subqueryMatcher = FROM_PATTERN.matcher(originString);
+        String replacedString = originString;
+        Map<Integer, Integer> parenthesesPairs = null;
+
+        while (subqueryMatcher.find()) {
+            if (parenthesesPairs == null) {
+                parenthesesPairs = findParenthesesPairs(originString);
+            }
+
+            int startIdx = subqueryMatcher.start(1);
+            int endIdx = parenthesesPairs.get(startIdx) + 1;
+
+            replacedString = replaceString(replacedString, originString.substring(startIdx, endIdx), originString.substring(startIdx, endIdx) + " as alias");
+        }
+
+        return replacedString;
+    }
+
+    public static String concatReplace(String originString) {
+        Matcher concatMatcher = CONCAT_PATTERN.matcher(originString);
+        String replacedString = originString;
+
+        while (concatMatcher.find()) {
+            String leftString = concatMatcher.group(1);
+            String rightString = concatMatcher.group(2);
+            replacedString = replaceString(replacedString, leftString + "||" + rightString, "concat(" + leftString + "," + rightString + ")");
+        }
+
+        return replacedString;
+    }
+
+    public static String doConvert(String originStr) {
+        // Step1.Replace " with `
+        String convertedSql = replaceString(originStr, "\"", "`");
+
+        // Step2.Replace extract functions
+        convertedSql = extractReplace(convertedSql);
+
+        // Step3.Replace cast type string
+        convertedSql = castRepalce(convertedSql);
+
+        // Step4.Replace sub query
+        convertedSql = subqueryRepalce(convertedSql);
+
+        // Step5.Replace char_length with length
+        convertedSql = replaceString(convertedSql, "char_length", "length");
+
+        // Step6.Replace "||" with concat
+        convertedSql = concatReplace(convertedSql);
+
+        return convertedSql;
+    }
+
+    private static Map<Integer, Integer> findParenthesesPairs(String sql) {
+        Map<Integer, Integer> result = new HashMap<>();
+        if (sql.length() > 1) {
+            Stack<Integer> lStack = new Stack<>();
+            boolean inStrVal = false;
+            for (int i = 0; i < sql.length(); i++) {
+                switch (sql.charAt(i)) {
+                case '(':
+                    if (!inStrVal) {
+                        lStack.push(i);
+                    }
+                    break;
+                case ')':
+                    if (!inStrVal && !lStack.empty()) {
+                        result.put(lStack.pop(), i);
+                    }
+                    break;
+                default:
+                    break;
+                }
+            }
+        }
+        return result;
+    }
+
+    @Override
+    public String convert(String originSql) {
+        return doConvert(originSql);
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6ce9983f/core-storage/src/main/java/org/apache/kylin/storage/adhoc/IAdhocConverter.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/adhoc/IAdhocConverter.java b/core-storage/src/main/java/org/apache/kylin/storage/adhoc/IAdhocConverter.java
new file mode 100644
index 0000000..d5815bb
--- /dev/null
+++ b/core-storage/src/main/java/org/apache/kylin/storage/adhoc/IAdhocConverter.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.storage.adhoc;
+
+/**
+ * convert the query to satisfy the parser of adhoc query engine
+ */
+public interface IAdhocConverter {
+    String convert(String originSql);
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6ce9983f/core-storage/src/test/java/org/apache/kylin/storage/adhoc/HiveAdhocConverterTest.java
----------------------------------------------------------------------
diff --git a/core-storage/src/test/java/org/apache/kylin/storage/adhoc/HiveAdhocConverterTest.java b/core-storage/src/test/java/org/apache/kylin/storage/adhoc/HiveAdhocConverterTest.java
new file mode 100644
index 0000000..62f6792
--- /dev/null
+++ b/core-storage/src/test/java/org/apache/kylin/storage/adhoc/HiveAdhocConverterTest.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.storage.adhoc;
+
+import junit.framework.TestCase;
+import org.junit.Test;
+
+
+public class HiveAdhocConverterTest extends TestCase {
+    @Test
+    public void testSringReplace() {
+        String originString = "select count(*) as cnt from test_kylin_fact where char_length(lstg_format_name) < 10";
+        String replacedString = HiveAdhocConverter
+            .replaceString(originString, "char_length", "length");
+        assertEquals(replacedString, "select count(*) as cnt from test_kylin_fact where length(lstg_format_name) < 10");
+    }
+
+    @Test
+    public void testExtractReplace() {
+        String originString = "ignore EXTRACT(YEAR FROM KYLIN_CAL_DT.CAL_DT) ignore";
+        String replacedString = HiveAdhocConverter.extractReplace(originString);
+        assertEquals(replacedString, "ignore YEAR(KYLIN_CAL_DT.CAL_DT) ignore");
+    }
+
+    @Test
+    public void testCastReplace() {
+        String originString = "ignore EXTRACT(YEAR FROM CAST(KYLIN_CAL_DT.CAL_DT AS INTEGER)) ignore";
+        String replacedString = HiveAdhocConverter.castRepalce(originString);
+        assertEquals(replacedString, "ignore EXTRACT(YEAR FROM CAST(KYLIN_CAL_DT.CAL_DT AS int)) ignore");
+    }
+
+    @Test
+    public void testSubqueryReplace() {
+        String originString = "select seller_id,lstg_format_name,sum(price) from (select * from test_kylin_fact where (lstg_format_name='FP-GTC') limit 20) group by seller_id,lstg_format_name";
+        String replacedString = HiveAdhocConverter.subqueryRepalce(originString);
+        assertEquals(replacedString, "select seller_id,lstg_format_name,sum(price) from (select * from test_kylin_fact where (lstg_format_name='FP-GTC') limit 20) as alias group by seller_id,lstg_format_name");
+    }
+
+    @Test
+    public void testConcatReplace() {
+        String originString = "select count(*) as cnt from test_kylin_fact where lstg_format_name||'a'='ABINa'";
+        String replacedString = HiveAdhocConverter.concatReplace(originString);
+        assertEquals(replacedString, "select count(*) as cnt from test_kylin_fact where concat(lstg_format_name,'a')='ABINa'");
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6ce9983f/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java b/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
index f6b3496..648ef91 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/util/AdHocUtil.java
@@ -18,18 +18,19 @@
 
 package org.apache.kylin.rest.util;
 
+import java.sql.SQLException;
+import java.util.List;
+
 import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
 import org.apache.kylin.query.routing.NoRealizationFoundException;
-import org.apache.kylin.storage.adhoc.AdHocRunnerBase;
 import org.apache.kylin.rest.exception.InternalErrorException;
-import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
+import org.apache.kylin.storage.adhoc.AdHocRunnerBase;
+import org.apache.kylin.storage.adhoc.IAdhocConverter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.sql.SQLException;
-import java.util.List;
-
 public class AdHocUtil {
     private static final Logger logger = LoggerFactory.getLogger(AdHocUtil.class);
 
@@ -40,22 +41,33 @@ public class AdHocUtil {
 
         if (isExpectedCause && kylinConfig.isAdhocEnabled()) {
             Class runnerClass = Class.forName(kylinConfig.getAdHocRunnerClassName());
-            Object instance = runnerClass.newInstance();
+            Class converterClass = Class.forName(kylinConfig.getAdHocConverterClassName());
+            Object runnerObj = runnerClass.newInstance();
+            Object converterObj = converterClass.newInstance();
 
-            if (!(instance instanceof AdHocRunnerBase)) {
-                throw new InternalErrorException("Ad-hoc runner class should be sub-class of AdHocRunnerBase.");
+            if (!(runnerObj instanceof AdHocRunnerBase)) {
+                throw new InternalErrorException("Ad-hoc runner class should be sub-class of AdHocRunnerBase");
             }
 
-            AdHocRunnerBase runner = (AdHocRunnerBase) instance;
+            if (!(converterObj instanceof IAdhocConverter)) {
+                throw new InternalErrorException("Ad-hoc converter class should implement of IAdhocConverter");
+            }
+
+            AdHocRunnerBase runner = (AdHocRunnerBase) runnerObj;
+            IAdhocConverter converter = (IAdhocConverter) converterObj;
             runner.setConfig(kylinConfig);
 
             logger.debug("Ad-hoc query enabled for Kylin");
-            // running query to ad-hoc jdbc
 
             runner.init();
 
             try {
-                runner.executeQuery(sql, results, columnMetas);
+                String adhocSql = converter.convert(sql);
+                if (!sql.equals(adhocSql)) {
+                    logger.info("the original query is converted to {} before delegating to ", adhocSql);
+                }
+
+                runner.executeQuery(adhocSql, results, columnMetas);
                 isAdHoc = true;
             } catch (Exception exception) {
                 throw exception;
@@ -67,4 +79,3 @@ public class AdHocUtil {
         return isAdHoc;
     }
 }
-


[46/67] [abbrv] kylin git commit: fix release test cont..

Posted by li...@apache.org.
fix release test cont..


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/cfff185c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/cfff185c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/cfff185c

Branch: refs/heads/master
Commit: cfff185c4b7809449d816f137dcb042f9e30067c
Parents: 96e8c7f
Author: Hongbin Ma <ma...@apache.org>
Authored: Mon May 29 00:22:14 2017 +0800
Committer: Dong Li <li...@apache.org>
Committed: Mon May 29 00:24:38 2017 +0800

----------------------------------------------------------------------
 build/smoke-test/sql/sql1.json | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/cfff185c/build/smoke-test/sql/sql1.json
----------------------------------------------------------------------
diff --git a/build/smoke-test/sql/sql1.json b/build/smoke-test/sql/sql1.json
index abbd529..7b90d0f 100644
--- a/build/smoke-test/sql/sql1.json
+++ b/build/smoke-test/sql/sql1.json
@@ -3,7 +3,7 @@
   "partial": false,
   "affectedRowCount": 0,
   "isException": false,
-  "queryAdHoc": false,
+  "adHoc": false,
   "results": [
     [
       "10000"


[25/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/MemoryBudgetController.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/MemoryBudgetController.java b/core-common/src/main/java/org/apache/kylin/common/util/MemoryBudgetController.java
index 8fa32a7..7a0b919 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/MemoryBudgetController.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/MemoryBudgetController.java
@@ -107,8 +107,7 @@ public class MemoryBudgetController {
             try {
                 reserve(consumer, requestMB);
                 if (debug && waitStart > 0)
-                    logger.debug(consumer + " waited " + (System.currentTimeMillis() - waitStart) + " ms on the "
-                            + requestMB + " MB request");
+                    logger.debug(consumer + " waited " + (System.currentTimeMillis() - waitStart) + " ms on the " + requestMB + " MB request");
                 return;
             } catch (NotEnoughBudgetException ex) {
                 // retry
@@ -177,8 +176,7 @@ public class MemoryBudgetController {
 
         if (debug) {
             if (getSystemAvailMB() < getRemainingBudgetMB()) {
-                logger.debug("Remaining budget is " + getRemainingBudgetMB() + " MB free, but system only has "
-                        + getSystemAvailMB() + " MB free. If this persists, some memory calculation must be wrong.");
+                logger.debug("Remaining budget is " + getRemainingBudgetMB() + " MB free, but system only has " + getSystemAvailMB() + " MB free. If this persists, some memory calculation must be wrong.");
             }
         }
     }
@@ -229,8 +227,7 @@ public class MemoryBudgetController {
             booking.remove(entry.consumer);
         }
         if (debug) {
-            logger.debug(entry.consumer + " reserved " + entry.reservedMB + " MB, total reserved " + totalReservedMB
-                    + " MB, remaining budget " + getRemainingBudgetMB() + " MB");
+            logger.debug(entry.consumer + " reserved " + entry.reservedMB + " MB, total reserved " + totalReservedMB + " MB, remaining budget " + getRemainingBudgetMB() + " MB");
         }
 
         if (delta < 0) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/OrderedProperties.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/OrderedProperties.java b/core-common/src/main/java/org/apache/kylin/common/util/OrderedProperties.java
index 39ddc24..ffeced1 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/OrderedProperties.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/OrderedProperties.java
@@ -345,8 +345,7 @@ public final class OrderedProperties implements Serializable {
          * @return the new instance
          */
         public OrderedProperties build() {
-            Map<String, String> properties = (this.comparator != null) ? new TreeMap<String, String>(comparator)
-                    : new LinkedHashMap<String, String>();
+            Map<String, String> properties = (this.comparator != null) ? new TreeMap<String, String>(comparator) : new LinkedHashMap<String, String>();
             return new OrderedProperties(properties);
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/Primes.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/Primes.java b/core-common/src/main/java/org/apache/kylin/common/util/Primes.java
index cb60331..b83950e 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/Primes.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/Primes.java
@@ -31,34 +31,9 @@ public class Primes {
      * As a result, <code>int</code> numbers which are not reduced by those primes are guaranteed
      * to be either prime or semi prime.
      */
-    public static final int[] PRIMES = { 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73,
-            79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191,
-            193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311,
-            313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439,
-            443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577,
-            587, 593, 599, 601, 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709,
-            719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857,
-            859, 863, 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997, 1009,
-            1013, 1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093, 1097, 1103, 1109, 1117,
-            1123, 1129, 1151, 1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229, 1231, 1237, 1249, 1259,
-            1277, 1279, 1283, 1289, 1291, 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, 1381, 1399, 1409,
-            1423, 1427, 1429, 1433, 1439, 1447, 1451, 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, 1523,
-            1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583, 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637,
-            1657, 1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, 1741, 1747, 1753, 1759, 1777, 1783, 1787,
-            1789, 1801, 1811, 1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, 1901, 1907, 1913, 1931, 1933,
-            1949, 1951, 1973, 1979, 1987, 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, 2063, 2069, 2081,
-            2083, 2087, 2089, 2099, 2111, 2113, 2129, 2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, 2221,
-            2237, 2239, 2243, 2251, 2267, 2269, 2273, 2281, 2287, 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351,
-            2357, 2371, 2377, 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, 2437, 2441, 2447, 2459, 2467, 2473, 2477,
-            2503, 2521, 2531, 2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, 2621, 2633, 2647, 2657, 2659,
-            2663, 2671, 2677, 2683, 2687, 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, 2749, 2753, 2767,
-            2777, 2789, 2791, 2797, 2801, 2803, 2819, 2833, 2837, 2843, 2851, 2857, 2861, 2879, 2887, 2897, 2903, 2909,
-            2917, 2927, 2939, 2953, 2957, 2963, 2969, 2971, 2999, 3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067,
-            3079, 3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, 3187, 3191, 3203, 3209, 3217, 3221, 3229,
-            3251, 3253, 3257, 3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, 3343, 3347, 3359, 3361, 3371,
-            3373, 3389, 3391, 3407, 3413, 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, 3517, 3527, 3529,
-            3533, 3539, 3541, 3547, 3557, 3559, 3571, 3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, 3643, 3659,
-            3671 };
+    public static final int[] PRIMES = { 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, 1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, 1153, 1163,
+            1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, 1289, 1291, 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, 1381, 1399, 1409, 1423, 1427, 1429, 1433, 1439, 1447, 1451, 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, 1523, 1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583, 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657, 1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, 1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, 1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, 1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, 1979, 1987, 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, 2063, 2069, 2081, 2083, 2087, 2089, 2099, 2111, 2113, 2129, 2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, 2221, 2237, 2239, 2243, 2251, 2267, 2269, 2273, 2281, 2287, 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357, 2371, 2377, 2381, 2383, 2389, 2393,
+            2399, 2411, 2417, 2423, 2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, 2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, 2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, 2683, 2687, 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, 2749, 2753, 2767, 2777, 2789, 2791, 2797, 2801, 2803, 2819, 2833, 2837, 2843, 2851, 2857, 2861, 2879, 2887, 2897, 2903, 2909, 2917, 2927, 2939, 2953, 2957, 2963, 2969, 2971, 2999, 3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067, 3079, 3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, 3187, 3191, 3203, 3209, 3217, 3221, 3229, 3251, 3253, 3257, 3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, 3343, 3347, 3359, 3361, 3371, 3373, 3389, 3391, 3407, 3413, 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, 3517, 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, 3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, 3643, 3659, 3671 };
 
     /**
      * Hide utility class.

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/RangeUtil.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/RangeUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/RangeUtil.java
index ea08ec8..6ef8c089 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/RangeUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/RangeUtil.java
@@ -37,8 +37,7 @@ public class RangeUtil {
     /**
      * for NavigableMap sorted by C, given a range of C, return the sub map whose key falls in the range
      */
-    public static <C extends Comparable<?>, V> NavigableMap<C, V> filter(NavigableMap<C, V> values,
-            Range<C> filterRange) {
+    public static <C extends Comparable<?>, V> NavigableMap<C, V> filter(NavigableMap<C, V> values, Range<C> filterRange) {
         if (filterRange == null || filterRange.isEmpty()) {
             return Maps.newTreeMap();
         } else if (filterRange.equals(Range.all())) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/SortUtil.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/SortUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/SortUtil.java
index 834d2d4..1ea9d97 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/SortUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/SortUtil.java
@@ -26,8 +26,7 @@ import com.google.common.collect.TreeMultimap;
 /**
  */
 public class SortUtil {
-    public static <T extends Comparable, E extends Comparable> Iterator<T> extractAndSort(Iterator<T> input,
-            Function<T, E> extractor) {
+    public static <T extends Comparable, E extends Comparable> Iterator<T> extractAndSort(Iterator<T> input, Function<T, E> extractor) {
         TreeMultimap<E, T> reorgnized = TreeMultimap.create();
         while (input.hasNext()) {
             T t = input.next();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java b/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
index 14253a0..a2326ee 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
@@ -54,8 +54,7 @@ public class ZipFileUtils {
             zipInputStream = new ZipInputStream(new FileInputStream(zipFileName));
             ZipEntry zipEntry = null;
             while ((zipEntry = zipInputStream.getNextEntry()) != null) {
-                logger.info("decompressing " + zipEntry.getName() + " is directory:" + zipEntry.isDirectory()
-                        + " available: " + zipInputStream.available());
+                logger.info("decompressing " + zipEntry.getName() + " is directory:" + zipEntry.isDirectory() + " available: " + zipInputStream.available());
 
                 File temp = new File(outputFolder, zipEntry.getName());
                 if (zipEntry.isDirectory()) {
@@ -77,8 +76,7 @@ public class ZipFileUtils {
         }
     }
 
-    private static void compressDirectoryToZipfile(String rootDir, String sourceDir, ZipOutputStream out)
-            throws IOException {
+    private static void compressDirectoryToZipfile(String rootDir, String sourceDir, ZipOutputStream out) throws IOException {
         File[] files = new File(sourceDir).listFiles();
         if (files == null)
             return;
@@ -86,9 +84,7 @@ public class ZipFileUtils {
             if (sourceFile.isDirectory()) {
                 compressDirectoryToZipfile(rootDir, sourceDir + normDir(sourceFile.getName()), out);
             } else {
-                ZipEntry entry = new ZipEntry(
-                        normDir(StringUtils.isEmpty(rootDir) ? sourceDir : sourceDir.replace(rootDir, ""))
-                                + sourceFile.getName());
+                ZipEntry entry = new ZipEntry(normDir(StringUtils.isEmpty(rootDir) ? sourceDir : sourceDir.replace(rootDir, "")) + sourceFile.getName());
                 entry.setTime(sourceFile.lastModified());
                 out.putNextEntry(entry);
                 FileInputStream in = new FileInputStream(sourceDir + sourceFile.getName());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/StorageURLTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/StorageURLTest.java b/core-common/src/test/java/org/apache/kylin/common/StorageURLTest.java
index ab47958..eaa7548 100644
--- a/core-common/src/test/java/org/apache/kylin/common/StorageURLTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/StorageURLTest.java
@@ -87,7 +87,7 @@ public class StorageURLTest {
             assertEquals("hello@hbase,a", id.toString());
         }
     }
-
+    
     @Test
     public void testValueOfCache() {
         StorageURL id1 = StorageURL.valueOf("hello@hbase");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/persistence/LocalFileResourceStoreTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/persistence/LocalFileResourceStoreTest.java b/core-common/src/test/java/org/apache/kylin/common/persistence/LocalFileResourceStoreTest.java
index 0252b9c..63eb04b 100644
--- a/core-common/src/test/java/org/apache/kylin/common/persistence/LocalFileResourceStoreTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/persistence/LocalFileResourceStoreTest.java
@@ -68,16 +68,16 @@ public class LocalFileResourceStoreTest extends LocalFileMetadataTestCase {
             ByteArrayInputStream is = new ByteArrayInputStream(bytes);
             store.putResource("/res2", is, 2000);
             is.close();
-
+            
             store.putResource("/res1", str, 2000, StringEntity.serializer);
             store.deleteResource("/res1");
 
             assertEquals(null, store.getResource("/res1"));
             assertEquals(2000, (raw = store.getResource("/res2")).timestamp);
             raw.inputStream.close();
-
+            
             cp.rollback();
-
+            
             assertEquals(null, store.getResource("/res2"));
             assertEquals(1000, (raw = store.getResource("/res1")).timestamp);
             raw.inputStream.close();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/restclient/RestClientTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/restclient/RestClientTest.java b/core-common/src/test/java/org/apache/kylin/common/restclient/RestClientTest.java
index c7a4dd6..af05e5e 100644
--- a/core-common/src/test/java/org/apache/kylin/common/restclient/RestClientTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/restclient/RestClientTest.java
@@ -19,13 +19,15 @@
 package org.apache.kylin.common.restclient;
 
 import java.io.IOException;
-
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+
 public class RestClientTest {
 
+
+
     private static final Logger logger = LoggerFactory.getLogger(RestClientTest.class);
 
     @SuppressWarnings("unused")

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java b/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java
index d3787d1..5512147b 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java
@@ -190,10 +190,8 @@ public class BasicTest {
         System.out.println(time(c.getTimeInMillis()));
 
         a.setTimeInMillis(current);
-        b.set(a.get(Calendar.YEAR), a.get(Calendar.MONTH), a.get(Calendar.DAY_OF_MONTH), a.get(Calendar.HOUR_OF_DAY),
-                a.get(Calendar.MINUTE));
-        c.set(a.get(Calendar.YEAR), a.get(Calendar.MONTH), a.get(Calendar.DAY_OF_MONTH), a.get(Calendar.HOUR_OF_DAY),
-                0);
+        b.set(a.get(Calendar.YEAR), a.get(Calendar.MONTH), a.get(Calendar.DAY_OF_MONTH), a.get(Calendar.HOUR_OF_DAY), a.get(Calendar.MINUTE));
+        c.set(a.get(Calendar.YEAR), a.get(Calendar.MONTH), a.get(Calendar.DAY_OF_MONTH), a.get(Calendar.HOUR_OF_DAY), 0);
 
         System.out.println(time(b.getTimeInMillis()));
         System.out.println(time(c.getTimeInMillis()));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/util/CacheBuilderTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/CacheBuilderTest.java b/core-common/src/test/java/org/apache/kylin/common/util/CacheBuilderTest.java
index 742c3c0..e30f0b3 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/CacheBuilderTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/CacheBuilderTest.java
@@ -28,13 +28,12 @@ import com.google.common.cache.RemovalNotification;
 public class CacheBuilderTest {
     @Test
     public void foo() {
-        Cache<Object, Object> build = CacheBuilder.newBuilder().maximumSize(1).weakValues()
-                .removalListener(new RemovalListener<Object, Object>() {
-                    @Override
-                    public void onRemoval(RemovalNotification<Object, Object> notification) {
-                        System.out.println(notification.getCause());
-                    }
-                }).build();
+        Cache<Object, Object> build = CacheBuilder.newBuilder().maximumSize(1).weakValues().removalListener(new RemovalListener<Object, Object>() {
+            @Override
+            public void onRemoval(RemovalNotification<Object, Object> notification) {
+                System.out.println(notification.getCause());
+            }
+        }).build();
 
         build.put(1, 1);
         build.put(1, 2);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/util/ClassUtilTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/ClassUtilTest.java b/core-common/src/test/java/org/apache/kylin/common/util/ClassUtilTest.java
index 0b7607d..75fa574 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/ClassUtilTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/ClassUtilTest.java
@@ -25,10 +25,8 @@ public class ClassUtilTest {
 
     @Test
     public void testFindContainingJar() throws ClassNotFoundException {
-        Assert.assertTrue(ClassUtil.findContainingJar(Class.forName("org.apache.commons.beanutils.BeanUtils"))
-                .contains("commons-beanutils"));
-        Assert.assertTrue(ClassUtil.findContainingJar(Class.forName("org.apache.commons.beanutils.BeanUtils"), "core")
-                .contains("commons-beanutils-core"));
+        Assert.assertTrue(ClassUtil.findContainingJar(Class.forName("org.apache.commons.beanutils.BeanUtils")).contains("commons-beanutils"));
+        Assert.assertTrue(ClassUtil.findContainingJar(Class.forName("org.apache.commons.beanutils.BeanUtils"), "core").contains("commons-beanutils-core"));
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/util/HiveCmdBuilderTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/HiveCmdBuilderTest.java b/core-common/src/test/java/org/apache/kylin/common/util/HiveCmdBuilderTest.java
index 0eccfe0..d69d4d2 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/HiveCmdBuilderTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/HiveCmdBuilderTest.java
@@ -60,15 +60,12 @@ public class HiveCmdBuilderTest {
         hiveCmdBuilder.addStatement("SHOW\n TABLES;");
         hiveCmdBuilder.setHiveConfProps(hiveProps);
         hiveCmdBuilder.overwriteHiveProps(hivePropsOverwrite);
-        assertEquals(
-                "hive -e \"USE default;\nDROP TABLE test;\nSHOW\n TABLES;\n\" --hiveconf hive.execution.engine=tez",
-                hiveCmdBuilder.build());
+        assertEquals("hive -e \"USE default;\nDROP TABLE test;\nSHOW\n TABLES;\n\" --hiveconf hive.execution.engine=tez", hiveCmdBuilder.build());
     }
 
     @Test
     public void testBeeline() throws IOException {
-        String lineSeparator = java.security.AccessController
-                .doPrivileged(new sun.security.action.GetPropertyAction("line.separator"));
+        String lineSeparator = java.security.AccessController.doPrivileged(new sun.security.action.GetPropertyAction("line.separator"));
         System.setProperty("kylin.source.hive.client", "beeline");
         System.setProperty("kylin.source.hive.beeline-params", "-u jdbc_url");
 
@@ -84,10 +81,8 @@ public class HiveCmdBuilderTest {
         hqlFile = hqlFile.substring(0, hqlFile.length() - ";exit $ret_code".length());
 
         String hqlStatement = FileUtils.readFileToString(new File(hqlFile), Charset.defaultCharset());
-        assertEquals(
-                "USE default;" + lineSeparator + "DROP TABLE test;" + lineSeparator + "SHOW\n TABLES;" + lineSeparator,
-                hqlStatement);
+        assertEquals("USE default;" + lineSeparator + "DROP TABLE test;" + lineSeparator + "SHOW\n TABLES;" + lineSeparator, hqlStatement);
 
         FileUtils.forceDelete(new File(hqlFile));
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/util/HotLoadKylinPropertiesTestCase.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/HotLoadKylinPropertiesTestCase.java b/core-common/src/test/java/org/apache/kylin/common/util/HotLoadKylinPropertiesTestCase.java
index aa180a9..9f5b278 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/HotLoadKylinPropertiesTestCase.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/HotLoadKylinPropertiesTestCase.java
@@ -18,15 +18,15 @@
 
 package org.apache.kylin.common.util;
 
+import org.apache.kylin.common.KylinConfig;
+import org.junit.After;
+import org.junit.Before;
+
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.util.Properties;
 
-import org.apache.kylin.common.KylinConfig;
-import org.junit.After;
-import org.junit.Before;
-
 /**
  * @author kangkaisen
  */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java b/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java
index bc4dc63..6dcdaf5 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java
@@ -53,9 +53,7 @@ public class InstallJarIntoMavenTest {
             String artifactId = name.substring(0, match.start());
             String version = name.substring(match.start() + 1, lastDot);
 
-            fw.write(String.format(
-                    "mvn install:install-file -Dfile=%s -DgroupId=%s -DartifactId=%s -Dversion=%s -Dpackaging=jar",
-                    name, "org.apache." + groupId, artifactId, version));
+            fw.write(String.format("mvn install:install-file -Dfile=%s -DgroupId=%s -DartifactId=%s -Dversion=%s -Dpackaging=jar", name, "org.apache." + groupId, artifactId, version));
             fw.write("\n");
         }
         fw.close();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/util/JacksonTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/JacksonTest.java b/core-common/src/test/java/org/apache/kylin/common/util/JacksonTest.java
index f9062f1..286cdd3 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/JacksonTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/JacksonTest.java
@@ -31,6 +31,7 @@ public class JacksonTest {
         a.put("3", "3");
         a.put("2", "2");
 
+
         JacksonBean bean = new JacksonBean();
         bean.setA("valuea");
         bean.setConfiguration(a);
@@ -38,13 +39,11 @@ public class JacksonTest {
         String s = JsonUtil.writeValueAsString(bean);
         System.out.println(s);
 
-        JacksonBean desBean = (JacksonBean) JsonUtil.readValue(
-                "{\"a\":\"valuea\",\"b\":0,\"configuration\":{\"2\":\"2\",\"3\":\"3\",\"1\":\"1\"}}",
-                JacksonBean.class);
-
+        JacksonBean desBean = (JacksonBean) JsonUtil.readValue("{\"a\":\"valuea\",\"b\":0,\"configuration\":{\"2\":\"2\",\"3\":\"3\",\"1\":\"1\"}}", JacksonBean.class);
+        
         String x2 = JsonUtil.writeValueAsString(desBean);
         System.out.println(x2);
-
+        
         System.out.println(desBean);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/util/LocalFileMetadataTestCase.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/LocalFileMetadataTestCase.java b/core-common/src/test/java/org/apache/kylin/common/util/LocalFileMetadataTestCase.java
index fcde1a7..67f39b4 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/LocalFileMetadataTestCase.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/LocalFileMetadataTestCase.java
@@ -77,7 +77,7 @@ public class LocalFileMetadataTestCase extends AbstractKylinTestCase {
     public void cleanupTestMetadata() {
         cleanAfterClass();
     }
-
+    
     protected String getLocalWorkingDirectory() {
         String dir = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory();
         if (dir.startsWith("file://"))

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/util/RangeTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/RangeTest.java b/core-common/src/test/java/org/apache/kylin/common/util/RangeTest.java
index 22e1863..d4e4cbf 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/RangeTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/RangeTest.java
@@ -101,8 +101,7 @@ public class RangeTest {
         Assert.assertTrue(RangeUtil.remove(r1, c3).equals(Lists.newArrayList(Range.closed(2, 4), Range.closed(5, 5))));
 
         Assert.assertTrue(RangeUtil.remove(r1, d1).equals(Lists.newArrayList(Range.openClosed(3, 5))));
-        Assert.assertTrue(
-                RangeUtil.remove(r1, d2).equals(Lists.newArrayList(Range.closedOpen(2, 3), Range.openClosed(4, 5))));
+        Assert.assertTrue(RangeUtil.remove(r1, d2).equals(Lists.newArrayList(Range.closedOpen(2, 3), Range.openClosed(4, 5))));
         Assert.assertTrue(RangeUtil.remove(r1, d3).equals(Lists.newArrayList(Range.closedOpen(2, 4))));
 
     }
@@ -165,8 +164,7 @@ public class RangeTest {
         Assert.assertTrue(RangeUtil.remove(r1, b9).equals(Lists.newArrayList()));
 
         Assert.assertTrue(RangeUtil.remove(r1, c1).equals(Lists.newArrayList(Range.closedOpen(3, 5))));
-        Assert.assertTrue(
-                RangeUtil.remove(r1, c2).equals(Lists.newArrayList(Range.openClosed(2, 3), Range.closedOpen(4, 5))));
+        Assert.assertTrue(RangeUtil.remove(r1, c2).equals(Lists.newArrayList(Range.openClosed(2, 3), Range.closedOpen(4, 5))));
         Assert.assertTrue(RangeUtil.remove(r1, c3).equals(Lists.newArrayList(Range.openClosed(2, 4))));
 
         Assert.assertTrue(RangeUtil.remove(r1, d1).equals(Lists.newArrayList(Range.open(3, 5))));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java b/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java
index 1a67bb5..15f54f9 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java
@@ -41,11 +41,9 @@ public class TimeUtilTest {
 
         a.setTimeInMillis(timeMillis);
         if (unit == NormalizedTimeUnit.MINUTE) {
-            b.set(a.get(Calendar.YEAR), a.get(Calendar.MONTH), a.get(Calendar.DAY_OF_MONTH),
-                    a.get(Calendar.HOUR_OF_DAY), a.get(Calendar.MINUTE));
+            b.set(a.get(Calendar.YEAR), a.get(Calendar.MONTH), a.get(Calendar.DAY_OF_MONTH), a.get(Calendar.HOUR_OF_DAY), a.get(Calendar.MINUTE));
         } else if (unit == NormalizedTimeUnit.HOUR) {
-            b.set(a.get(Calendar.YEAR), a.get(Calendar.MONTH), a.get(Calendar.DAY_OF_MONTH),
-                    a.get(Calendar.HOUR_OF_DAY), 0);
+            b.set(a.get(Calendar.YEAR), a.get(Calendar.MONTH), a.get(Calendar.DAY_OF_MONTH), a.get(Calendar.HOUR_OF_DAY), 0);
         }
         return b.getTimeInMillis();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/GTForwardingScanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/GTForwardingScanner.java b/core-cube/src/main/java/org/apache/kylin/GTForwardingScanner.java
index 8e82427..de8c88d 100644
--- a/core-cube/src/main/java/org/apache/kylin/GTForwardingScanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/GTForwardingScanner.java
@@ -18,14 +18,14 @@
 
 package org.apache.kylin;
 
-import static com.google.common.base.Preconditions.checkNotNull;
+import org.apache.kylin.gridtable.GTInfo;
+import org.apache.kylin.gridtable.GTRecord;
+import org.apache.kylin.gridtable.IGTScanner;
 
 import java.io.IOException;
 import java.util.Iterator;
 
-import org.apache.kylin.gridtable.GTInfo;
-import org.apache.kylin.gridtable.GTRecord;
-import org.apache.kylin.gridtable.IGTScanner;
+import static com.google.common.base.Preconditions.checkNotNull;
 
 /**
  * A {@link IGTScanner} which forwards all its method calls to another scanner.

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
index 45898b8..20cb0a9 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
@@ -70,8 +70,7 @@ public class CubeCapabilityChecker {
             //1. dimension as measure
 
             if (!unmatchedAggregations.isEmpty()) {
-                tryDimensionAsMeasures(unmatchedAggregations, result,
-                        cube.getDescriptor().listDimensionColumnsIncludingDerived());
+                tryDimensionAsMeasures(unmatchedAggregations, result, cube.getDescriptor().listDimensionColumnsIncludingDerived());
             }
         } else {
             //for non query-on-facttable 
@@ -111,10 +110,8 @@ public class CubeCapabilityChecker {
             return result;
         }
 
-        if (cube.getStorageType() == IStorageAware.ID_HBASE
-                && MassInTupleFilter.containsMassInTupleFilter(digest.filter)) {
-            logger.info(
-                    "Exclude cube " + cube.getName() + " because only v2 storage + v2 query engine supports massin");
+        if (cube.getStorageType() == IStorageAware.ID_HBASE && MassInTupleFilter.containsMassInTupleFilter(digest.filter)) {
+            logger.info("Exclude cube " + cube.getName() + " because only v2 storage + v2 query engine supports massin");
             return result;
         }
 
@@ -161,8 +158,7 @@ public class CubeCapabilityChecker {
         return result;
     }
 
-    private static void tryDimensionAsMeasures(Collection<FunctionDesc> unmatchedAggregations, CapabilityResult result,
-            Set<TblColRef> dimCols) {
+    private static void tryDimensionAsMeasures(Collection<FunctionDesc> unmatchedAggregations, CapabilityResult result, Set<TblColRef> dimCols) {
 
         Iterator<FunctionDesc> it = unmatchedAggregations.iterator();
         while (it.hasNext()) {
@@ -180,8 +176,7 @@ public class CubeCapabilityChecker {
                 continue;
             }
             List<TblColRef> neededCols = parameterDesc.getColRefs();
-            if (neededCols.size() > 0 && dimCols.containsAll(neededCols)
-                    && FunctionDesc.BUILT_IN_AGGREGATIONS.contains(functionDesc.getExpression())) {
+            if (neededCols.size() > 0 && dimCols.containsAll(neededCols) && FunctionDesc.BUILT_IN_AGGREGATIONS.contains(functionDesc.getExpression())) {
                 result.influences.add(new CapabilityResult.DimensionAsMeasure(functionDesc));
                 it.remove();
                 continue;
@@ -190,9 +185,7 @@ public class CubeCapabilityChecker {
     }
 
     // custom measure types can cover unmatched dimensions or measures
-    private static void tryCustomMeasureTypes(Collection<TblColRef> unmatchedDimensions,
-            Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, CubeInstance cube,
-            CapabilityResult result) {
+    private static void tryCustomMeasureTypes(Collection<TblColRef> unmatchedDimensions, Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, CubeInstance cube, CapabilityResult result) {
         CubeDesc cubeDesc = cube.getDescriptor();
         List<String> influencingMeasures = Lists.newArrayList();
         for (MeasureDesc measure : cubeDesc.getMeasures()) {
@@ -203,16 +196,14 @@ public class CubeCapabilityChecker {
             if (measureType instanceof BasicMeasureType)
                 continue;
 
-            CapabilityInfluence inf = measureType.influenceCapabilityCheck(unmatchedDimensions, unmatchedAggregations,
-                    digest, measure);
+            CapabilityInfluence inf = measureType.influenceCapabilityCheck(unmatchedDimensions, unmatchedAggregations, digest, measure);
             if (inf != null) {
                 result.influences.add(inf);
                 influencingMeasures.add(measure.getName() + "@" + measureType.getClass());
             }
         }
         if (influencingMeasures.size() != 0)
-            logger.info("Cube {} CapabilityInfluences: {}", cube.getCanonicalName(),
-                    StringUtils.join(influencingMeasures, ","));
+            logger.info("Cube {} CapabilityInfluences: {}", cube.getCanonicalName(), StringUtils.join(influencingMeasures, ","));
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
index e268efd..85ca929 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
@@ -129,8 +129,7 @@ public class CubeDescManager {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
             String cubeDescName = cacheKey;
             CubeDesc cubeDesc = getCubeDesc(cubeDescName);
             String modelName = cubeDesc == null ? null : cubeDesc.getModel().getName();
@@ -266,8 +265,7 @@ public class CubeDescManager {
                 int keyLength = 0;
                 while (parameter != null) {
                     String encoding = configuration.get(TopNMeasureType.CONFIG_ENCODING_PREFIX + parameter.getValue());
-                    String encodingVersionStr = configuration
-                            .get(TopNMeasureType.CONFIG_ENCODING_VERSION_PREFIX + parameter.getValue());
+                    String encodingVersionStr = configuration.get(TopNMeasureType.CONFIG_ENCODING_VERSION_PREFIX + parameter.getValue());
                     if (StringUtils.isEmpty(encoding) || DictionaryDimEnc.ENCODING_NAME.equals(encoding)) {
                         keyLength += DictionaryDimEnc.MAX_ENCODING_LENGTH; // estimation for dict encoding
                     } else {
@@ -281,8 +279,7 @@ public class CubeDescManager {
                             }
                         }
                         Object[] encodingConf = DimensionEncoding.parseEncodingConf(encoding);
-                        DimensionEncoding dimensionEncoding = DimensionEncodingFactory.create((String) encodingConf[0],
-                                (String[]) encodingConf[1], encodingVersion);
+                        DimensionEncoding dimensionEncoding = DimensionEncodingFactory.create((String) encodingConf[0], (String[]) encodingConf[1], encodingVersion);
                         keyLength += dimensionEncoding.getLengthOfEncoding();
                     }
 
@@ -312,19 +309,16 @@ public class CubeDescManager {
 
     private void reloadAllCubeDesc() throws IOException {
         ResourceStore store = getStore();
-        logger.info("Reloading Cube Metadata from folder "
-                + store.getReadableResourcePath(ResourceStore.CUBE_DESC_RESOURCE_ROOT));
+        logger.info("Reloading Cube Metadata from folder " + store.getReadableResourcePath(ResourceStore.CUBE_DESC_RESOURCE_ROOT));
 
         cubeDescMap.clear();
 
-        List<String> paths = store.collectResourceRecursively(ResourceStore.CUBE_DESC_RESOURCE_ROOT,
-                MetadataConstants.FILE_SURFIX);
+        List<String> paths = store.collectResourceRecursively(ResourceStore.CUBE_DESC_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX);
         for (String path : paths) {
             CubeDesc desc = loadCubeDesc(path, true);
 
             if (!path.equals(desc.getResourcePath())) {
-                logger.error(
-                        "Skip suspicious desc at " + path + ", " + desc + " should be at " + desc.getResourcePath());
+                logger.error("Skip suspicious desc at " + path + ", " + desc + " should be at " + desc.getResourcePath());
                 continue;
             }
             if (cubeDescMap.containsKey(desc.getName())) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
index e9645b9..fb9a7a7 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
@@ -127,8 +127,7 @@ public class CubeInstance extends RootPersistentEntity implements IRealization,
     // in a temporary broken state, so that user can edit and fix it. Broken state is often due to
     // schema changes at source.
     public boolean allowBrokenDescriptor() {
-        return (getStatus() == RealizationStatusEnum.DISABLED || getStatus() == RealizationStatusEnum.DESCBROKEN)
-                && segments.isEmpty();
+        return (getStatus() == RealizationStatusEnum.DISABLED || getStatus() == RealizationStatusEnum.DESCBROKEN) && segments.isEmpty();
     }
 
     public String getResourcePath() {
@@ -362,8 +361,7 @@ public class CubeInstance extends RootPersistentEntity implements IRealization,
         if (!this.getDescriptor().getModel().getPartitionDesc().isPartitioned())
             return false;
 
-        return this.getDescriptor().getAutoMergeTimeRanges() != null
-                && this.getDescriptor().getAutoMergeTimeRanges().length > 0;
+        return this.getDescriptor().getAutoMergeTimeRanges() != null && this.getDescriptor().getAutoMergeTimeRanges().length > 0;
     }
 
     public Pair<Long, Long> autoMergeCubeSegments() throws IOException {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index 32e2316..e6cd761 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -109,8 +109,7 @@ public class CubeManager implements IRealizationProvider {
                 if (CACHE.size() > 1) {
                     logger.warn("More than one singleton exist");
                     for (KylinConfig kylinConfig : CACHE.keySet()) {
-                        logger.warn("type: " + kylinConfig.getClass() + " reference: "
-                                + System.identityHashCode(kylinConfig.base()));
+                        logger.warn("type: " + kylinConfig.getClass() + " reference: " + System.identityHashCode(kylinConfig.base()));
                     }
                 }
                 return r;
@@ -161,8 +160,7 @@ public class CubeManager implements IRealizationProvider {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
-                throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
             String cubeName = cacheKey;
 
             if (event == Event.DROP)
@@ -170,8 +168,7 @@ public class CubeManager implements IRealizationProvider {
             else
                 reloadCubeLocal(cubeName);
 
-            for (ProjectInstance prj : ProjectManager.getInstance(config).findProjects(RealizationType.CUBE,
-                    cubeName)) {
+            for (ProjectInstance prj : ProjectManager.getInstance(config).findProjects(RealizationType.CUBE, cubeName)) {
                 broadcaster.notifyProjectDataUpdate(prj.getName());
             }
         }
@@ -217,22 +214,19 @@ public class CubeManager implements IRealizationProvider {
         return result;
     }
 
-    public DictionaryInfo buildDictionary(CubeSegment cubeSeg, TblColRef col, IReadableTable inpTable)
-            throws IOException {
+    public DictionaryInfo buildDictionary(CubeSegment cubeSeg, TblColRef col, IReadableTable inpTable) throws IOException {
         CubeDesc cubeDesc = cubeSeg.getCubeDesc();
         if (!cubeDesc.getAllColumnsNeedDictionaryBuilt().contains(col))
             return null;
 
         String builderClass = cubeDesc.getDictionaryBuilderClass(col);
-        DictionaryInfo dictInfo = getDictionaryManager().buildDictionary(cubeDesc.getModel(), col, inpTable,
-                builderClass);
+        DictionaryInfo dictInfo = getDictionaryManager().buildDictionary(cubeDesc.getModel(), col, inpTable, builderClass);
 
         saveDictionaryInfo(cubeSeg, col, dictInfo);
         return dictInfo;
     }
 
-    public DictionaryInfo saveDictionary(CubeSegment cubeSeg, TblColRef col, IReadableTable inpTable,
-            Dictionary<String> dict) throws IOException {
+    public DictionaryInfo saveDictionary(CubeSegment cubeSeg, TblColRef col, IReadableTable inpTable, Dictionary<String> dict) throws IOException {
         CubeDesc cubeDesc = cubeSeg.getCubeDesc();
         if (!cubeDesc.getAllColumnsNeedDictionaryBuilt().contains(col))
             return null;
@@ -269,8 +263,7 @@ public class CubeManager implements IRealizationProvider {
 
             info = dictMgr.getDictionaryInfo(dictResPath);
             if (info == null)
-                throw new IllegalStateException("No dictionary found by " + dictResPath
-                        + ", invalid cube state; cube segment" + cubeSeg + ", col " + col);
+                throw new IllegalStateException("No dictionary found by " + dictResPath + ", invalid cube state; cube segment" + cubeSeg + ", col " + col);
         } catch (IOException e) {
             throw new IllegalStateException("Failed to get dictionary for cube segment" + cubeSeg + ", col" + col, e);
         }
@@ -325,8 +318,7 @@ public class CubeManager implements IRealizationProvider {
     }
 
     // sync on update
-    public CubeInstance createCube(String cubeName, String projectName, CubeDesc desc, String owner)
-            throws IOException {
+    public CubeInstance createCube(String cubeName, String projectName, CubeDesc desc, String owner) throws IOException {
         logger.info("Creating cube '" + projectName + "-->" + cubeName + "' from desc '" + desc.getName() + "'");
 
         // save cube resource
@@ -349,8 +341,7 @@ public class CubeManager implements IRealizationProvider {
         cube.setOwner(owner);
 
         updateCubeWithRetry(new CubeUpdate(cube), 0);
-        ProjectManager.getInstance(config).moveRealizationToProject(RealizationType.CUBE, cube.getName(), projectName,
-                owner);
+        ProjectManager.getInstance(config).moveRealizationToProject(RealizationType.CUBE, cube.getName(), projectName, owner);
 
         if (listener != null)
             listener.afterCubeCreate(cube);
@@ -465,16 +456,12 @@ public class CubeManager implements IRealizationProvider {
     }
 
     public CubeSegment appendSegment(CubeInstance cube, SourcePartition sourcePartition) throws IOException {
-        return appendSegment(cube, sourcePartition.getStartDate(), sourcePartition.getEndDate(),
-                sourcePartition.getStartOffset(), sourcePartition.getEndOffset(),
-                sourcePartition.getSourcePartitionOffsetStart(), sourcePartition.getSourcePartitionOffsetEnd());
+        return appendSegment(cube, sourcePartition.getStartDate(), sourcePartition.getEndDate(), sourcePartition.getStartOffset(), sourcePartition.getEndOffset(), sourcePartition.getSourcePartitionOffsetStart(), sourcePartition.getSourcePartitionOffsetEnd());
     }
 
-    CubeSegment appendSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset,
-            Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd)
-            throws IOException {
+    CubeSegment appendSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd) throws IOException {
         checkBuildingSegment(cube);
-
+        
         // fix start/end a bit
         if (cube.getModel().getPartitionDesc().isPartitioned()) {
             // if missing start, set it to where last time ends
@@ -500,22 +487,19 @@ public class CubeManager implements IRealizationProvider {
         return newSegment;
     }
 
-    public CubeSegment refreshSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset)
-            throws IOException {
+    public CubeSegment refreshSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset) throws IOException {
         checkBuildingSegment(cube);
 
         CubeSegment newSegment = newSegment(cube, startDate, endDate, startOffset, endOffset);
 
         Pair<Boolean, Boolean> pair = CubeValidator.fitInSegments(cube.getSegments(), newSegment);
         if (pair.getFirst() == false || pair.getSecond() == false)
-            throw new IllegalArgumentException("The new refreshing segment " + newSegment
-                    + " does not match any existing segment in cube " + cube);
+            throw new IllegalArgumentException("The new refreshing segment " + newSegment + " does not match any existing segment in cube " + cube);
 
         if (startOffset > 0 || endOffset > 0) {
             CubeSegment toRefreshSeg = null;
             for (CubeSegment cubeSegment : cube.getSegments()) {
-                if (cubeSegment.getSourceOffsetStart() == startOffset
-                        && cubeSegment.getSourceOffsetEnd() == endOffset) {
+                if (cubeSegment.getSourceOffsetStart() == startOffset && cubeSegment.getSourceOffsetEnd() == endOffset) {
                     toRefreshSeg = cubeSegment;
                     break;
                 }
@@ -536,8 +520,7 @@ public class CubeManager implements IRealizationProvider {
         return newSegment;
     }
 
-    public CubeSegment mergeSegments(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset,
-            boolean force) throws IOException {
+    public CubeSegment mergeSegments(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, boolean force) throws IOException {
         if (cube.getSegments().isEmpty())
             throw new IllegalArgumentException("Cube " + cube + " has no segments");
         if (startDate >= endDate && startOffset >= endOffset)
@@ -551,11 +534,9 @@ public class CubeManager implements IRealizationProvider {
         if (isOffsetsOn) {
             // offset cube, merge by date range?
             if (startOffset == endOffset) {
-                Pair<CubeSegment, CubeSegment> pair = cube.getSegments(SegmentStatusEnum.READY)
-                        .findMergeOffsetsByDateRange(startDate, endDate, Long.MAX_VALUE);
+                Pair<CubeSegment, CubeSegment> pair = cube.getSegments(SegmentStatusEnum.READY).findMergeOffsetsByDateRange(startDate, endDate, Long.MAX_VALUE);
                 if (pair == null)
-                    throw new IllegalArgumentException("Find no segments to merge by date range " + startDate + "-"
-                            + endDate + " for cube " + cube);
+                    throw new IllegalArgumentException("Find no segments to merge by date range " + startDate + "-" + endDate + " for cube " + cube);
                 startOffset = pair.getFirst().getSourceOffsetStart();
                 endOffset = pair.getSecond().getSourceOffsetEnd();
             }
@@ -575,9 +556,7 @@ public class CubeManager implements IRealizationProvider {
 
         List<CubeSegment> mergingSegments = cube.getMergingSegments(newSegment);
         if (mergingSegments.size() <= 1)
-            throw new IllegalArgumentException(
-                    "Range " + newSegment.getSourceOffsetStart() + "-" + newSegment.getSourceOffsetEnd()
-                            + " must contain at least 2 segments, but there is " + mergingSegments.size());
+            throw new IllegalArgumentException("Range " + newSegment.getSourceOffsetStart() + "-" + newSegment.getSourceOffsetEnd() + " must contain at least 2 segments, but there is " + mergingSegments.size());
 
         CubeSegment first = mergingSegments.get(0);
         CubeSegment last = mergingSegments.get(mergingSegments.size() - 1);
@@ -602,9 +581,7 @@ public class CubeManager implements IRealizationProvider {
             }
 
             if (emptySegment.size() > 0) {
-                throw new IllegalArgumentException(
-                        "Empty cube segment found, couldn't merge unless 'forceMergeEmptySegment' set to true: "
-                                + emptySegment);
+                throw new IllegalArgumentException("Empty cube segment found, couldn't merge unless 'forceMergeEmptySegment' set to true: " + emptySegment);
             }
         }
 
@@ -643,15 +620,13 @@ public class CubeManager implements IRealizationProvider {
     private void checkBuildingSegment(CubeInstance cube) {
         int maxBuldingSeg = cube.getConfig().getMaxBuildingSegments();
         if (cube.getBuildingSegments().size() >= maxBuldingSeg) {
-            throw new IllegalStateException(
-                    "There is already " + cube.getBuildingSegments().size() + " building segment; ");
+            throw new IllegalStateException("There is already " + cube.getBuildingSegments().size() + " building segment; ");
         }
     }
 
     private void checkCubeIsPartitioned(CubeInstance cube) {
         if (cube.getDescriptor().getModel().getPartitionDesc().isPartitioned() == false) {
-            throw new IllegalStateException(
-                    "there is no partition date column specified, only full build is supported");
+            throw new IllegalStateException("there is no partition date column specified, only full build is supported");
         }
     }
 
@@ -675,16 +650,14 @@ public class CubeManager implements IRealizationProvider {
         String[] pkCols = join.getPrimaryKey();
         String snapshotResPath = cubeSegment.getSnapshotResPath(tableName);
         if (snapshotResPath == null)
-            throw new IllegalStateException("No snaphot for table '" + tableName + "' found on cube segment"
-                    + cubeSegment.getCubeInstance().getName() + "/" + cubeSegment);
+            throw new IllegalStateException("No snaphot for table '" + tableName + "' found on cube segment" + cubeSegment.getCubeInstance().getName() + "/" + cubeSegment);
 
         try {
             SnapshotTable snapshot = getSnapshotManager().getSnapshotTable(snapshotResPath);
             TableDesc tableDesc = getMetadataManager().getTableDesc(tableName);
             return new LookupStringTable(tableDesc, pkCols, snapshot);
         } catch (IOException e) {
-            throw new IllegalStateException(
-                    "Failed to load lookup table " + tableName + " from snapshot " + snapshotResPath, e);
+            throw new IllegalStateException("Failed to load lookup table " + tableName + " from snapshot " + snapshotResPath, e);
         }
     }
 
@@ -728,8 +701,7 @@ public class CubeManager implements IRealizationProvider {
 
     public void promoteNewlyBuiltSegments(CubeInstance cube, CubeSegment newSegment) throws IOException {
         if (StringUtils.isBlank(newSegment.getStorageLocationIdentifier()))
-            throw new IllegalStateException(
-                    "For cube " + cube + ", segment " + newSegment + " missing StorageLocationIdentifier");
+            throw new IllegalStateException("For cube " + cube + ", segment " + newSegment + " missing StorageLocationIdentifier");
 
         if (StringUtils.isBlank(newSegment.getLastBuildJobID()))
             throw new IllegalStateException("For cube " + cube + ", segment " + newSegment + " missing LastBuildJobID");
@@ -741,8 +713,7 @@ public class CubeManager implements IRealizationProvider {
         List<CubeSegment> tobe = cube.calculateToBeSegments(newSegment);
 
         if (tobe.contains(newSegment) == false)
-            throw new IllegalStateException(
-                    "For cube " + cube + ", segment " + newSegment + " is expected but not in the tobe " + tobe);
+            throw new IllegalStateException("For cube " + cube + ", segment " + newSegment + " is expected but not in the tobe " + tobe);
 
         newSegment.setStatus(SegmentStatusEnum.READY);
 
@@ -755,8 +726,7 @@ public class CubeManager implements IRealizationProvider {
         logger.info("Promoting cube " + cube + ", new segment " + newSegment + ", to remove segments " + toRemoveSegs);
 
         CubeUpdate cubeBuilder = new CubeUpdate(cube);
-        cubeBuilder.setToRemoveSegs(toRemoveSegs.toArray(new CubeSegment[toRemoveSegs.size()]))
-                .setToUpdateSegs(newSegment).setStatus(RealizationStatusEnum.READY);
+        cubeBuilder.setToRemoveSegs(toRemoveSegs.toArray(new CubeSegment[toRemoveSegs.size()])).setToUpdateSegs(newSegment).setStatus(RealizationStatusEnum.READY);
         updateCube(cubeBuilder);
     }
 
@@ -764,8 +734,7 @@ public class CubeManager implements IRealizationProvider {
         List<CubeSegment> tobe = cube.calculateToBeSegments(newSegments);
         List<CubeSegment> newList = Arrays.asList(newSegments);
         if (tobe.containsAll(newList) == false) {
-            throw new IllegalStateException("For cube " + cube + ", the new segments " + newList
-                    + " do not fit in its current " + cube.getSegments() + "; the resulted tobe is " + tobe);
+            throw new IllegalStateException("For cube " + cube + ", the new segments " + newList + " do not fit in its current " + cube.getSegments() + "; the resulted tobe is " + tobe);
         }
     }
 
@@ -807,9 +776,7 @@ public class CubeManager implements IRealizationProvider {
             CubeDesc cubeDesc = CubeDescManager.getInstance(config).getCubeDesc(cube.getDescName());
             checkNotNull(cubeDesc, "cube descriptor '%s' (for cube '%s') not found", cube.getDescName(), cubeName);
             if (!isSpecialTestCube(cubeName))
-                checkState(cubeDesc.getName().equals(cubeName),
-                        "cube name '%s' must be same as descriptor name '%s', but it is not", cubeName,
-                        cubeDesc.getName());
+                checkState(cubeDesc.getName().equals(cubeName), "cube name '%s' must be same as descriptor name '%s', but it is not", cubeName, cubeDesc.getName());
 
             if (!cubeDesc.getError().isEmpty()) {
                 cube.setStatus(RealizationStatusEnum.DESCBROKEN);
@@ -840,8 +807,7 @@ public class CubeManager implements IRealizationProvider {
 
     private boolean isSpecialTestCube(String cubeName) {
         return cubeName.equals("kylin_sales_cube") //
-                || config.isDevEnv()
-                        && (cubeName.startsWith("test_kylin_cube") || cubeName.startsWith("test_streaming"));
+                || config.isDevEnv() && (cubeName.startsWith("test_kylin_cube") || cubeName.startsWith("test_streaming"));
     }
 
     private MetadataManager getMetadataManager() {
@@ -938,8 +904,7 @@ public class CubeManager implements IRealizationProvider {
                     hole.setDateRangeStart(first.getDateRangeEnd());
                     hole.setDateRangeEnd(second.getDateRangeStart());
                 }
-                hole.setName(CubeSegment.makeSegmentName(hole.getDateRangeStart(), hole.getDateRangeEnd(),
-                        hole.getSourceOffsetStart(), hole.getSourceOffsetEnd()));
+                hole.setName(CubeSegment.makeSegmentName(hole.getDateRangeStart(), hole.getDateRangeEnd(), hole.getSourceOffsetStart(), hole.getSourceOffsetEnd()));
                 holes.add(hole);
             }
         }
@@ -957,8 +922,7 @@ public class CubeManager implements IRealizationProvider {
         List<DictionaryDesc> dictionaryDescList = cubeDesc.getDictionaries();
         if (dictionaryDescList != null) {
             for (DictionaryDesc dictionaryDesc : dictionaryDescList) {
-                if (dictionaryDesc.getBuilderClass() != null
-                        && dictionaryDesc.getBuilderClass().equalsIgnoreCase(GLOBAL_DICTIONNARY_CLASS)) {
+                if (dictionaryDesc.getBuilderClass() != null && dictionaryDesc.getBuilderClass().equalsIgnoreCase(GLOBAL_DICTIONNARY_CLASS)) {
                     for (int i = 0; i < factDictCols.size(); i++) {
                         if (factDictCols.get(i).equals(dictionaryDesc.getColumnRef())) {
                             uhcIndex[i] = 1;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
index 24f3cdc..1b28bd8 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
@@ -371,11 +371,9 @@ public class CubeSegment implements Comparable<CubeSegment>, IBuildable, ISegmen
     public void validate() throws IllegalStateException {
         if (cubeInstance.getDescriptor().getModel().getPartitionDesc().isPartitioned()) {
             if (!isSourceOffsetsOn() && dateRangeStart >= dateRangeEnd)
-                throw new IllegalStateException("Invalid segment, dateRangeStart(" + dateRangeStart
-                        + ") must be smaller than dateRangeEnd(" + dateRangeEnd + ") in segment " + this);
+                throw new IllegalStateException("Invalid segment, dateRangeStart(" + dateRangeStart + ") must be smaller than dateRangeEnd(" + dateRangeEnd + ") in segment " + this);
             if (isSourceOffsetsOn() && sourceOffsetStart >= sourceOffsetEnd)
-                throw new IllegalStateException("Invalid segment, sourceOffsetStart(" + sourceOffsetStart
-                        + ") must be smaller than sourceOffsetEnd(" + sourceOffsetEnd + ") in segment " + this);
+                throw new IllegalStateException("Invalid segment, sourceOffsetStart(" + sourceOffsetStart + ") must be smaller than sourceOffsetEnd(" + sourceOffsetEnd + ") in segment " + this);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/RawQueryLastHacker.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/RawQueryLastHacker.java b/core-cube/src/main/java/org/apache/kylin/cube/RawQueryLastHacker.java
index e2bce26..d2e3a83 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/RawQueryLastHacker.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/RawQueryLastHacker.java
@@ -39,8 +39,7 @@ public class RawQueryLastHacker {
 
         // If no group by and metric found, then it's simple query like select ... from ... where ...,
         // But we have no raw data stored, in order to return better results, we hack to output sum of metric column
-        logger.info(
-                "No group by and aggregation found in this query, will hack some result for better look of output...");
+        logger.info("No group by and aggregation found in this query, will hack some result for better look of output...");
 
         // If it's select * from ...,
         // We need to retrieve cube to manually add columns into sqlDigest, so that we have full-columns results as output.

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/cli/CubeSignatureRefresher.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cli/CubeSignatureRefresher.java b/core-cube/src/main/java/org/apache/kylin/cube/cli/CubeSignatureRefresher.java
index 0d04780..1db804f 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/cli/CubeSignatureRefresher.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cli/CubeSignatureRefresher.java
@@ -53,8 +53,7 @@ public class CubeSignatureRefresher {
     }
 
     public void update() {
-        logger.info("Reloading Cube Metadata from store: "
-                + store.getReadableResourcePath(ResourceStore.CUBE_DESC_RESOURCE_ROOT));
+        logger.info("Reloading Cube Metadata from store: " + store.getReadableResourcePath(ResourceStore.CUBE_DESC_RESOURCE_ROOT));
         CubeDescManager cubeDescManager = CubeDescManager.getInstance(config);
         List<CubeDesc> cubeDescs;
         if (ArrayUtils.isEmpty(cubeNames)) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/cli/DictionaryGeneratorCLI.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cli/DictionaryGeneratorCLI.java b/core-cube/src/main/java/org/apache/kylin/cube/cli/DictionaryGeneratorCLI.java
index 6a4bef4..e7368e8 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/cli/DictionaryGeneratorCLI.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cli/DictionaryGeneratorCLI.java
@@ -47,16 +47,14 @@ public class DictionaryGeneratorCLI {
 
     private static final Logger logger = LoggerFactory.getLogger(DictionaryGeneratorCLI.class);
 
-    public static void processSegment(KylinConfig config, String cubeName, String segmentID,
-            DistinctColumnValuesProvider factTableValueProvider, DictionaryProvider dictProvider) throws IOException {
+    public static void processSegment(KylinConfig config, String cubeName, String segmentID, DistinctColumnValuesProvider factTableValueProvider, DictionaryProvider dictProvider) throws IOException {
         CubeInstance cube = CubeManager.getInstance(config).getCube(cubeName);
         CubeSegment segment = cube.getSegmentById(segmentID);
 
         processSegment(config, segment, factTableValueProvider, dictProvider);
     }
 
-    private static void processSegment(KylinConfig config, CubeSegment cubeSeg,
-            DistinctColumnValuesProvider factTableValueProvider, DictionaryProvider dictProvider) throws IOException {
+    private static void processSegment(KylinConfig config, CubeSegment cubeSeg, DistinctColumnValuesProvider factTableValueProvider, DictionaryProvider dictProvider) throws IOException {
         CubeManager cubeMgr = CubeManager.getInstance(config);
 
         // dictionary
@@ -69,8 +67,7 @@ public class DictionaryGeneratorCLI {
                     logger.debug("Dict for '" + col.getName() + "' has already been built, save it");
                     cubeMgr.saveDictionary(cubeSeg, col, inpTable, dict);
                 } else {
-                    logger.debug(
-                            "Dict for '" + col.getName() + "' not pre-built, build it from " + inpTable.toString());
+                    logger.debug("Dict for '" + col.getName() + "' not pre-built, build it from " + inpTable.toString());
                     cubeMgr.buildDictionary(cubeSeg, col, inpTable);
                 }
             } else {
@@ -94,7 +91,7 @@ public class DictionaryGeneratorCLI {
             logger.info("Building snapshot of " + tableIdentity);
             cubeMgr.buildSnapshotTable(cubeSeg, tableIdentity);
         }
-
+        
         for (TableRef lookup : toCheckLookup) {
             logger.info("Checking snapshot of " + lookup);
             JoinDesc join = cubeSeg.getModel().getJoinsTree().getJoinByPKSide(lookup);
@@ -102,8 +99,7 @@ public class DictionaryGeneratorCLI {
         }
     }
 
-    private static IReadableTable decideInputTable(DataModelDesc model, TblColRef col,
-            DistinctColumnValuesProvider factTableValueProvider) {
+    private static IReadableTable decideInputTable(DataModelDesc model, TblColRef col, DistinctColumnValuesProvider factTableValueProvider) {
         KylinConfig config = model.getConfig();
         DictionaryManager dictMgr = DictionaryManager.getInstance(config);
         TblColRef srcCol = dictMgr.decideSourceData(model, col);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/cuboid/AggregationGroupScheduler.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/AggregationGroupScheduler.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/AggregationGroupScheduler.java
index c69dc5e..cdcbcfc 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/AggregationGroupScheduler.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/AggregationGroupScheduler.java
@@ -122,5 +122,5 @@ public class AggregationGroupScheduler {
         }
         return false;
     }
-
+    
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/cuboid/Cuboid.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/Cuboid.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/Cuboid.java
index f80e8c4..76cb511 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/Cuboid.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/Cuboid.java
@@ -58,14 +58,12 @@ public class Cuboid implements Comparable<Cuboid>, Serializable {
     };
 
     // this is the only entry point for query to find the right cuboid
-    public static Cuboid identifyCuboid(CubeDesc cubeDesc, Set<TblColRef> dimensions,
-            Collection<FunctionDesc> metrics) {
+    public static Cuboid identifyCuboid(CubeDesc cubeDesc, Set<TblColRef> dimensions, Collection<FunctionDesc> metrics) {
         long cuboidID = identifyCuboidId(cubeDesc, dimensions, metrics);
         return Cuboid.findById(cubeDesc, cuboidID);
     }
 
-    public static long identifyCuboidId(CubeDesc cubeDesc, Set<TblColRef> dimensions,
-            Collection<FunctionDesc> metrics) {
+    public static long identifyCuboidId(CubeDesc cubeDesc, Set<TblColRef> dimensions, Collection<FunctionDesc> metrics) {
         for (FunctionDesc metric : metrics) {
             if (metric.getMeasureType().onlyAggrInBaseCuboid())
                 return Cuboid.getBaseCuboidId(cubeDesc);
@@ -178,16 +176,14 @@ public class Cuboid implements Comparable<Cuboid>, Serializable {
 
         if (!agg.isOnTree(cuboidID)) {
             // no column, add one column
-            long nonJointDims = removeBits((agg.getPartialCubeFullMask() ^ agg.getMandatoryColumnMask()),
-                    agg.getJoints());
+            long nonJointDims = removeBits((agg.getPartialCubeFullMask() ^ agg.getMandatoryColumnMask()), agg.getJoints());
             if (nonJointDims != 0) {
-                long nonJointNonHierarchy = removeBits(nonJointDims,
-                        Collections2.transform(agg.getHierarchyMasks(), new Function<HierarchyMask, Long>() {
-                            @Override
-                            public Long apply(HierarchyMask input) {
-                                return input.fullMask;
-                            }
-                        }));
+                long nonJointNonHierarchy = removeBits(nonJointDims, Collections2.transform(agg.getHierarchyMasks(), new Function<HierarchyMask, Long>() {
+                    @Override
+                    public Long apply(HierarchyMask input) {
+                        return input.fullMask;
+                    }
+                }));
                 if (nonJointNonHierarchy != 0) {
                     //there exists dim that does not belong to any joint or any hierarchy, that's perfect
                     return cuboidID | Long.lowestOneBit(nonJointNonHierarchy);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidCLI.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidCLI.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidCLI.java
index 530e149..e2ff97e 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidCLI.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidCLI.java
@@ -75,16 +75,14 @@ public class CuboidCLI {
         if (validate) {
             if (enableDimCap) {
                 if (cubeDesc.getAllCuboids().size() != cuboidSet.size()) {
-                    throw new IllegalStateException(
-                            "Expected cuboid set " + cubeDesc.getAllCuboids() + "; but actual cuboid set " + cuboidSet);
+                    throw new IllegalStateException("Expected cuboid set " + cubeDesc.getAllCuboids() + "; but actual cuboid set " + cuboidSet);
                 }
             } else {
                 //only run this for test purpose, performance is bad when # of dims is large
                 TreeSet<Long> enumCuboids = enumCalcCuboidCount(cubeDesc);
                 System.out.println(Arrays.toString(enumCuboids.toArray(new Long[enumCuboids.size()])));
                 if (enumCuboids.equals(cuboidSet) == false) {
-                    throw new IllegalStateException(
-                            "Expected cuboid set " + enumCuboids + "; but actual cuboid set " + cuboidSet);
+                    throw new IllegalStateException("Expected cuboid set " + enumCuboids + "; but actual cuboid set " + cuboidSet);
                 }
 
                 //check all valid and invalid

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidScheduler.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidScheduler.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidScheduler.java
index f1f5067..def3f03 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidScheduler.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidScheduler.java
@@ -19,6 +19,10 @@
 package org.apache.kylin.cube.cuboid;
 
 import java.io.Serializable;
+
+/**
+ */
+
 import java.util.ArrayDeque;
 import java.util.Collection;
 import java.util.Collections;
@@ -42,9 +46,6 @@ import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
-/**
- */
-
 @SuppressWarnings("serial")
 public class CuboidScheduler implements Serializable {
     private final CubeDesc cubeDesc;
@@ -142,8 +143,7 @@ public class CuboidScheduler implements Serializable {
         maxCombination = maxCombination < 0 ? Long.MAX_VALUE : maxCombination;
         while (!children.isEmpty()) {
             if (cuboidHolder.size() > maxCombination) {
-                throw new IllegalStateException("Too many cuboids for the cube. Cuboid combination reached "
-                        + cuboidHolder.size() + " and limit is " + maxCombination + ". Abort calculation.");
+                throw new IllegalStateException("Too many cuboids for the cube. Cuboid combination reached " + cuboidHolder.size() + " and limit is " + maxCombination + ". Abort calculation.");
             }
             cuboidHolder.addAll(children);
             children = getOnTreeParentsByLayer(children);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/gridtable/AsymmetricRecordComparator.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/AsymmetricRecordComparator.java b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/AsymmetricRecordComparator.java
index 32a9c7e..74dc855 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/AsymmetricRecordComparator.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/AsymmetricRecordComparator.java
@@ -18,11 +18,11 @@
 
 package org.apache.kylin.cube.gridtable;
 
-import java.util.Collection;
-
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.gridtable.GTRecord;
 
+import java.util.Collection;
+
 /**
  * asymmetric means compare(a,b) > 0 does not cause compare(b,a) < 0 
  * so min max functions will not be supported

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeCodeSystem.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeCodeSystem.java b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeCodeSystem.java
index cd05fb6..aaa12a7 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeCodeSystem.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeCodeSystem.java
@@ -123,8 +123,7 @@ public class CubeCodeSystem implements IGTCodeSystem {
             try {
                 serializer.serialize(value, buf);
             } catch (IllegalArgumentException ex) {
-                IllegalArgumentException rewordEx = new IllegalArgumentException("Column " + col + " value '"
-                        + toStringBinary(value) + "' met dictionary error: " + ex.getMessage());
+                IllegalArgumentException rewordEx = new IllegalArgumentException("Column " + col + " value '" + toStringBinary(value) + "' met dictionary error: " + ex.getMessage());
                 rewordEx.setStackTrace(ex.getStackTrace());
                 throw rewordEx;
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeGridTable.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeGridTable.java b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeGridTable.java
index 6dae1ef..5cee9df 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeGridTable.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeGridTable.java
@@ -28,8 +28,7 @@ public class CubeGridTable {
 
         GTInfo.Builder builder = GTInfo.builder();
         builder.setTableName("Cuboid " + cuboid.getId());
-        builder.setCodeSystem(
-                new CubeCodeSystem(mapping.getDimensionEncodings(dimEncMap), mapping.getDependentMetricsMap()));
+        builder.setCodeSystem(new CubeCodeSystem(mapping.getDimensionEncodings(dimEncMap), mapping.getDependentMetricsMap()));
         builder.setColumns(mapping.getDataTypes());
         builder.setPrimaryKey(mapping.getPrimaryKey());
         builder.enableColumnBlock(mapping.getColumnBlocks());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/gridtable/ScanRangePlannerBase.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/ScanRangePlannerBase.java b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/ScanRangePlannerBase.java
index 05fbef5..ed0a77a 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/ScanRangePlannerBase.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/ScanRangePlannerBase.java
@@ -118,8 +118,7 @@ public abstract class ScanRangePlannerBase {
             }
 
             @SuppressWarnings("unchecked")
-            ColumnRange newRange = new ColumnRange(comp.getColumn(), (Set<ByteArray>) comp.getValues(),
-                    comp.getOperator());
+            ColumnRange newRange = new ColumnRange(comp.getColumn(), (Set<ByteArray>) comp.getValues(), comp.getOperator());
             ColumnRange existing = rangeMap.get(newRange.column);
             if (existing == null) {
                 rangeMap.put(newRange.column, newRange);
@@ -171,8 +170,7 @@ public abstract class ScanRangePlannerBase {
             this.column = column;
 
             //TODO: the treatment is un-precise
-            if (op == TupleFilter.FilterOperatorEnum.EQ || op == TupleFilter.FilterOperatorEnum.IN
-                    || op == TupleFilter.FilterOperatorEnum.LTE || op == TupleFilter.FilterOperatorEnum.GTE) {
+            if (op == TupleFilter.FilterOperatorEnum.EQ || op == TupleFilter.FilterOperatorEnum.IN || op == TupleFilter.FilterOperatorEnum.LTE || op == TupleFilter.FilterOperatorEnum.GTE) {
                 isBoundryInclusive = true;
             }
 
@@ -270,8 +268,7 @@ public abstract class ScanRangePlannerBase {
         private Set<ByteArray> filter(Set<ByteArray> equalValues, ByteArray beginValue, ByteArray endValue) {
             Set<ByteArray> result = Sets.newHashSetWithExpectedSize(equalValues.size());
             for (ByteArray v : equalValues) {
-                if (rangeStartEndComparator.comparator.compare(beginValue, v) <= 0
-                        && rangeStartEndComparator.comparator.compare(v, endValue) <= 0) {
+                if (rangeStartEndComparator.comparator.compare(beginValue, v) <= 0 && rangeStartEndComparator.comparator.compare(v, endValue) <= 0) {
                     result.add(v);
                 }
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/AbstractInMemCubeBuilder.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/AbstractInMemCubeBuilder.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/AbstractInMemCubeBuilder.java
index 97dcc70..c7a4a05 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/AbstractInMemCubeBuilder.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/AbstractInMemCubeBuilder.java
@@ -49,8 +49,7 @@ abstract public class AbstractInMemCubeBuilder {
     protected int taskThreadCount = 1;
     protected int reserveMemoryMB = 100;
 
-    public AbstractInMemCubeBuilder(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDesc,
-            Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    public AbstractInMemCubeBuilder(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
         if (flatDesc == null)
             throw new NullPointerException();
         if (cubeDesc == null)
@@ -92,8 +91,7 @@ abstract public class AbstractInMemCubeBuilder {
 
     protected void outputCuboid(long cuboidId, GridTable gridTable, ICuboidWriter output) throws IOException {
         long startTime = System.currentTimeMillis();
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(gridTable.getInfo()).setRanges(null).setDimensions(null)
-                .setFilterPushDown(null).createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(gridTable.getInfo()).setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest();
         IGTScanner scanner = gridTable.scan(req);
         for (GTRecord record : scanner) {
             output.write(cuboidId, record);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStore.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStore.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStore.java
index e6f5f08..41d2dfb 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStore.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStore.java
@@ -323,8 +323,7 @@ public class ConcurrentDiskStore implements IGTStore, Closeable {
 
     private void openWriteChannel(long startOffset) throws IOException {
         if (startOffset > 0) { // TODO does not support append yet
-            writeChannel = FileChannel.open(diskFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.APPEND,
-                    StandardOpenOption.WRITE);
+            writeChannel = FileChannel.open(diskFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE);
         } else {
             diskFile.delete();
             writeChannel = FileChannel.open(diskFile.toPath(), StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE);


[60/67] [abbrv] kylin git commit: KYLIN-216 update draft design

Posted by li...@apache.org.
KYLIN-216 update draft design


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/58a63073
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/58a63073
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/58a63073

Branch: refs/heads/master
Commit: 58a63073620d16c4bc1aeb0a44db6caadccd3d17
Parents: 944d3aa
Author: Luwei-Chen <ch...@apache.org>
Authored: Fri Jun 2 21:10:45 2017 +0800
Committer: liyang-gmt8 <li...@apache.org>
Committed: Fri Jun 2 21:25:23 2017 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/cube/CubeDescManager.java  | 20 ++++++---
 .../java/org/apache/kylin/cube/CubeManager.java |  4 +-
 .../kylin/metadata/project/ProjectManager.java  | 36 ++++++++++-----
 .../kylin/rest/controller/CubeController.java   |  3 --
 .../rest/controller2/CubeControllerV2.java      | 47 ++++++++++++--------
 .../rest/controller2/ModelControllerV2.java     | 46 ++++++++++---------
 .../rest/response/CubeInstanceResponse.java     | 18 --------
 .../rest/response/DataModelDescResponse.java    | 18 --------
 .../apache/kylin/rest/service/CubeService.java  | 21 +++++----
 9 files changed, 107 insertions(+), 106 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/58a63073/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
index 85ca929..4c3c85d 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
@@ -129,10 +129,11 @@ public class CubeDescManager {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
+                throws IOException {
             String cubeDescName = cacheKey;
             CubeDesc cubeDesc = getCubeDesc(cubeDescName);
-            String modelName = cubeDesc == null ? null : cubeDesc.getModel().getName();
+            String modelName = cubeDesc == null ? null : cubeDesc.getModelName();
 
             if (event == Event.DROP)
                 removeLocalCubeDesc(cubeDescName);
@@ -265,7 +266,8 @@ public class CubeDescManager {
                 int keyLength = 0;
                 while (parameter != null) {
                     String encoding = configuration.get(TopNMeasureType.CONFIG_ENCODING_PREFIX + parameter.getValue());
-                    String encodingVersionStr = configuration.get(TopNMeasureType.CONFIG_ENCODING_VERSION_PREFIX + parameter.getValue());
+                    String encodingVersionStr = configuration
+                            .get(TopNMeasureType.CONFIG_ENCODING_VERSION_PREFIX + parameter.getValue());
                     if (StringUtils.isEmpty(encoding) || DictionaryDimEnc.ENCODING_NAME.equals(encoding)) {
                         keyLength += DictionaryDimEnc.MAX_ENCODING_LENGTH; // estimation for dict encoding
                     } else {
@@ -279,7 +281,8 @@ public class CubeDescManager {
                             }
                         }
                         Object[] encodingConf = DimensionEncoding.parseEncodingConf(encoding);
-                        DimensionEncoding dimensionEncoding = DimensionEncodingFactory.create((String) encodingConf[0], (String[]) encodingConf[1], encodingVersion);
+                        DimensionEncoding dimensionEncoding = DimensionEncodingFactory.create((String) encodingConf[0],
+                                (String[]) encodingConf[1], encodingVersion);
                         keyLength += dimensionEncoding.getLengthOfEncoding();
                     }
 
@@ -309,16 +312,19 @@ public class CubeDescManager {
 
     private void reloadAllCubeDesc() throws IOException {
         ResourceStore store = getStore();
-        logger.info("Reloading Cube Metadata from folder " + store.getReadableResourcePath(ResourceStore.CUBE_DESC_RESOURCE_ROOT));
+        logger.info("Reloading Cube Metadata from folder "
+                + store.getReadableResourcePath(ResourceStore.CUBE_DESC_RESOURCE_ROOT));
 
         cubeDescMap.clear();
 
-        List<String> paths = store.collectResourceRecursively(ResourceStore.CUBE_DESC_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX);
+        List<String> paths = store.collectResourceRecursively(ResourceStore.CUBE_DESC_RESOURCE_ROOT,
+                MetadataConstants.FILE_SURFIX);
         for (String path : paths) {
             CubeDesc desc = loadCubeDesc(path, true);
 
             if (!path.equals(desc.getResourcePath())) {
-                logger.error("Skip suspicious desc at " + path + ", " + desc + " should be at " + desc.getResourcePath());
+                logger.error(
+                        "Skip suspicious desc at " + path + ", " + desc + " should be at " + desc.getResourcePath());
                 continue;
             }
             if (cubeDescMap.containsKey(desc.getName())) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/58a63073/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index e6cd761..8546f76 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -326,7 +326,9 @@ public class CubeManager implements IRealizationProvider {
         cube.setOwner(owner);
 
         updateCubeWithRetry(new CubeUpdate(cube), 0);
-        ProjectManager.getInstance(config).moveRealizationToProject(RealizationType.CUBE, cubeName, projectName, owner);
+        if (!desc.isDraft()) {
+            ProjectManager.getInstance(config).moveRealizationToProject(RealizationType.CUBE, cubeName, projectName, owner);
+        }
 
         if (listener != null)
             listener.afterCubeCreate(cube);

http://git-wip-us.apache.org/repos/asf/kylin/blob/58a63073/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
index a172db8..71d0e6c 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
@@ -28,6 +28,7 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -52,7 +53,8 @@ import com.google.common.collect.Sets;
 public class ProjectManager {
     private static final Logger logger = LoggerFactory.getLogger(ProjectManager.class);
     private static final ConcurrentMap<KylinConfig, ProjectManager> CACHE = new ConcurrentHashMap<KylinConfig, ProjectManager>();
-    public static final Serializer<ProjectInstance> PROJECT_SERIALIZER = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
+    public static final Serializer<ProjectInstance> PROJECT_SERIALIZER = new JsonSerializer<ProjectInstance>(
+            ProjectInstance.class);
 
     public static ProjectManager getInstance(KylinConfig config) {
         ProjectManager r = CACHE.get(config);
@@ -108,7 +110,8 @@ public class ProjectManager {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
+                throws IOException {
             String project = cacheKey;
 
             if (event == Event.DROP)
@@ -129,7 +132,8 @@ public class ProjectManager {
         ResourceStore store = getStore();
         List<String> paths = store.collectResourceRecursively(ResourceStore.PROJECT_RESOURCE_ROOT, ".json");
 
-        logger.debug("Loading Project from folder " + store.getReadableResourcePath(ResourceStore.PROJECT_RESOURCE_ROOT));
+        logger.debug(
+                "Loading Project from folder " + store.getReadableResourcePath(ResourceStore.PROJECT_RESOURCE_ROOT));
 
         for (String path : paths) {
             reloadProjectLocalAt(path);
@@ -165,7 +169,8 @@ public class ProjectManager {
         return projectMap.get(projectName);
     }
 
-    public ProjectInstance createProject(String projectName, String owner, String description, LinkedHashMap<String, String> overrideProps) throws IOException {
+    public ProjectInstance createProject(String projectName, String owner, String description,
+            LinkedHashMap<String, String> overrideProps) throws IOException {
         logger.info("Creating project " + projectName);
 
         ProjectInstance currentProject = getProject(projectName);
@@ -191,7 +196,8 @@ public class ProjectManager {
         }
 
         if (projectInstance.getRealizationCount(null) != 0) {
-            throw new IllegalStateException("The project named " + projectName + " can not be deleted because there's still realizations in it. Delete them first.");
+            throw new IllegalStateException("The project named " + projectName
+                    + " can not be deleted because there's still realizations in it. Delete them first.");
         }
 
         logger.info("Dropping project '" + projectInstance.getName() + "'");
@@ -210,7 +216,8 @@ public class ProjectManager {
     }
 
     //update project itself
-    public ProjectInstance updateProject(ProjectInstance project, String newName, String newDesc, LinkedHashMap<String, String> overrideProps) throws IOException {
+    public ProjectInstance updateProject(ProjectInstance project, String newName, String newDesc,
+            LinkedHashMap<String, String> overrideProps) throws IOException {
         if (!project.getName().equals(newName)) {
             ProjectInstance newProject = this.createProject(newName, project.getOwner(), newDesc, overrideProps);
 
@@ -286,16 +293,24 @@ public class ProjectManager {
         return newProject;
     }
 
-    public ProjectInstance moveRealizationToProject(RealizationType type, String realizationName, String newProjectName, String owner) throws IOException {
+    public ProjectInstance moveRealizationToProject(RealizationType type, String realizationName, String newProjectName,
+            String owner) throws IOException {
         removeRealizationsFromProjects(type, realizationName);
         return addRealizationToProject(type, realizationName, newProjectName, owner);
     }
 
-    private ProjectInstance addRealizationToProject(RealizationType type, String realizationName, String project, String user) throws IOException {
+    private ProjectInstance addRealizationToProject(RealizationType type, String realizationName, String project,
+            String user) throws IOException {
         String newProjectName = norm(project);
+        if (StringUtils.isEmpty(newProjectName)) {
+            throw new IllegalArgumentException("Project name should not be empty.");
+        }
         ProjectInstance newProject = getProject(newProjectName);
         if (newProject == null) {
-            newProject = this.createProject(newProjectName, user, "This is a project automatically added when adding realization " + realizationName + "(" + type + ")", null);
+            newProject = this.createProject(newProjectName, user,
+                    "This is a project automatically added when adding realization " + realizationName + "(" + type
+                            + ")",
+                    null);
         }
         newProject.addRealizationEntry(type, realizationName);
         updateProject(newProject);
@@ -436,7 +451,8 @@ public class ProjectManager {
 
     public boolean isExposedColumn(String project, String table, String col) {
         return config.isAdhocEnabled() ? //
-                l2Cache.isDefinedColumn(norm(project), table, col) || l2Cache.isExposedColumn(norm(project), table, col) : //
+                l2Cache.isDefinedColumn(norm(project), table, col) || l2Cache.isExposedColumn(norm(project), table, col)
+                : //
                 l2Cache.isExposedColumn(norm(project), table, col);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/58a63073/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index f932509..bfa5603 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -299,9 +299,6 @@ public class CubeController extends BasicController {
             if (cube == null) {
                 throw new InternalErrorException("Cannot find cube " + cubeName);
             }
-            if (cube.getStatus() != null && cube.getStatus().equals("DRAFT")) {
-                throw new BadRequestException("Cannot build draft cube");
-            }
             return jobService.submitJob(cube, startTime, endTime, startOffset, endOffset, //
                     sourcePartitionOffsetStart, sourcePartitionOffsetEnd, CubeBuildTypeEnum.valueOf(buildType), force, submitter);
         } catch (Throwable e) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/58a63073/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
index 8179bc8..2edfe7d 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
@@ -21,7 +21,6 @@ package org.apache.kylin.rest.controller2;
 import java.io.IOException;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -51,7 +50,6 @@ import org.apache.kylin.rest.request.CubeRequest;
 import org.apache.kylin.rest.request.JobBuildRequest;
 import org.apache.kylin.rest.request.JobBuildRequest2;
 import org.apache.kylin.rest.response.CubeInstanceResponse;
-import org.apache.kylin.rest.response.CubeInstanceResponse.CubeComparator;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.response.GeneralResponse;
 import org.apache.kylin.rest.response.HBaseResponse;
@@ -117,20 +115,19 @@ public class CubeControllerV2 extends BasicController {
         List<CubeInstanceResponse> cubeInstanceResponses = new ArrayList<CubeInstanceResponse>();
         List<CubeInstance> cubes = cubeService.listAllCubes(cubeName, projectName, modelName);
 
-        int offset = pageOffset * pageSize;
-        int limit = pageSize;
-
-        if (cubes.size() <= offset) {
-            offset = cubes.size();
-            limit = 0;
-        }
+        for (CubeInstance cube : cubes) {
+            CubeInstanceResponse cubeInstanceResponse = new CubeInstanceResponse(cube);
 
-        if ((cubes.size() - offset) < limit) {
-            limit = cubes.size() - offset;
-        }
+            if (cube.getDescriptor().isDraft()) {
+                String parentName = cube.getName().substring(0, cube.getName().lastIndexOf("_draft"));
+                CubeInstance official = cubeService.getCubeManager().getCube(parentName);
+                if (official == null) {
+                    cubeInstanceResponse.setName(parentName);
+                } else {
+                    continue;
+                }
+            }
 
-        for (CubeInstance cube : cubes.subList(offset, offset + limit)) {
-            CubeInstanceResponse cubeInstanceResponse = new CubeInstanceResponse(cube);
             cubeInstanceResponse.setPartitionDateStart(cube.getDescriptor().getPartitionDateStart());
 
             String getModelName = modelName == null ? cube.getDescriptor().getModelName() : modelName;
@@ -154,10 +151,22 @@ public class CubeControllerV2 extends BasicController {
 
             cubeInstanceResponses.add(cubeInstanceResponse);
         }
-        CubeComparator cubeComparator = new CubeComparator();
-        Collections.sort(cubeInstanceResponses, cubeComparator);
-        data.put("cubes", cubeInstanceResponses);
-        data.put("size", cubes.size());
+
+        int offset = pageOffset * pageSize;
+        int limit = pageSize;
+        int size = cubeInstanceResponses.size();
+
+        if (size <= offset) {
+            offset = size;
+            limit = 0;
+        }
+
+        if ((size - offset) < limit) {
+            limit = size - offset;
+        }
+
+        data.put("cubes", cubeInstanceResponses.subList(offset, offset + limit));
+        data.put("size", size);
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }
@@ -391,7 +400,7 @@ public class CubeControllerV2 extends BasicController {
         if (cube == null) {
             throw new BadRequestException(String.format(msg.getCUBE_NOT_FOUND(), cubeName));
         }
-        if (cube.getStatus() != null && cube.getStatus().equals("DRAFT")) {
+        if (cube.getDescriptor().isDraft()) {
             throw new BadRequestException(msg.getBUILD_DRAFT_CUBE());
         }
         return jobService.submitJob(cube, startTime, endTime, startOffset, endOffset, //

http://git-wip-us.apache.org/repos/asf/kylin/blob/58a63073/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
index 9e47790..57debf0 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/ModelControllerV2.java
@@ -20,7 +20,6 @@ package org.apache.kylin.rest.controller2;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -42,7 +41,6 @@ import org.apache.kylin.rest.msg.Message;
 import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.request.ModelRequest;
 import org.apache.kylin.rest.response.DataModelDescResponse;
-import org.apache.kylin.rest.response.DataModelDescResponse.ModelComparator;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.response.GeneralResponse;
 import org.apache.kylin.rest.response.ResponseCode;
@@ -100,33 +98,39 @@ public class ModelControllerV2 extends BasicController {
         HashMap<String, Object> data = new HashMap<String, Object>();
         List<DataModelDesc> models = modelService.listAllModels(modelName, projectName);
 
-        int offset = pageOffset * pageSize;
-        int limit = pageSize;
-
-        if (models.size() <= offset) {
-            offset = models.size();
-            limit = 0;
-        }
-
-        if ((models.size() - offset) < limit) {
-            limit = models.size() - offset;
-        }
-
         List<DataModelDescResponse> dataModelDescResponses = new ArrayList<DataModelDescResponse>();
-        for (DataModelDesc model : modelService.getModels(modelName, projectName, limit, offset)) {
+        for (DataModelDesc model : models) {
             DataModelDescResponse dataModelDescResponse = new DataModelDescResponse(model);
-
+            if (model.isDraft()) {
+                String parentName = model.getName().substring(0, model.getName().lastIndexOf("_draft"));
+                DataModelDesc official = modelService.getMetadataManager().getDataModelDesc(parentName);
+                if (official == null) {
+                    dataModelDescResponse.setName(parentName);
+                } else {
+                    continue;
+                }
+            }
             if (projectName != null)
                 dataModelDescResponse.setProject(projectName);
             else
                 dataModelDescResponse.setProject(projectService.getProjectOfModel(model.getName()));
-
             dataModelDescResponses.add(dataModelDescResponse);
         }
-        ModelComparator modelComparator = new ModelComparator();
-        Collections.sort(dataModelDescResponses, modelComparator);
-        data.put("models", dataModelDescResponses);
-        data.put("size", models.size());
+
+        int offset = pageOffset * pageSize;
+        int limit = pageSize;
+        int size = dataModelDescResponses.size();
+
+        if (size <= offset) {
+            offset = size;
+            limit = 0;
+        }
+
+        if ((size - offset) < limit) {
+            limit = size - offset;
+        }
+        data.put("models", dataModelDescResponses.subList(offset, offset + limit));
+        data.put("size", size);
 
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, data, "");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/58a63073/server-base/src/main/java/org/apache/kylin/rest/response/CubeInstanceResponse.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/response/CubeInstanceResponse.java b/server-base/src/main/java/org/apache/kylin/rest/response/CubeInstanceResponse.java
index ab59165..f05de22 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/response/CubeInstanceResponse.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/response/CubeInstanceResponse.java
@@ -18,8 +18,6 @@
 
 package org.apache.kylin.rest.response;
 
-import java.util.Comparator;
-
 import org.apache.kylin.cube.CubeInstance;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
@@ -72,20 +70,4 @@ public class CubeInstanceResponse extends CubeInstance {
         setSegments(cubeInstance.getSegments());
         setCreateTimeUTC(cubeInstance.getCreateTimeUTC());
     }
-
-    public static class CubeComparator implements Comparator<CubeInstanceResponse> {
-        @Override
-        public int compare(CubeInstanceResponse o1, CubeInstanceResponse o2) {
-            String name1 = o1.getName(), name2 = o2.getName();
-            if (name1.endsWith("_draft")) {
-                name1 = name1.substring(0, name1.lastIndexOf("_draft"));
-            }
-            if (name2.endsWith("_draft")) {
-                name2 = name2.substring(0, name2.lastIndexOf("_draft"));
-            }
-            if (name1.equals(name2))
-                return o1.getName().compareTo(o2.getName());
-            return name1.compareTo(name2);
-        }
-    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/58a63073/server-base/src/main/java/org/apache/kylin/rest/response/DataModelDescResponse.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/response/DataModelDescResponse.java b/server-base/src/main/java/org/apache/kylin/rest/response/DataModelDescResponse.java
index 7ff30d3..59c9090 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/response/DataModelDescResponse.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/response/DataModelDescResponse.java
@@ -18,8 +18,6 @@
 
 package org.apache.kylin.rest.response;
 
-import java.util.Comparator;
-
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.model.PartitionDesc;
 
@@ -62,20 +60,4 @@ public class DataModelDescResponse extends DataModelDesc {
         setCapacity(dataModelDesc.getCapacity());
         setComputedColumnDescs(dataModelDesc.getComputedColumnDescs());
     }
-
-    public static class ModelComparator implements Comparator<DataModelDescResponse> {
-        @Override
-        public int compare(DataModelDescResponse o1, DataModelDescResponse o2) {
-            String name1 = o1.getName(), name2 = o2.getName();
-            if (name1.endsWith("_draft")) {
-                name1 = name1.substring(0, name1.lastIndexOf("_draft"));
-            }
-            if (name2.endsWith("_draft")) {
-                name2 = name2.substring(0, name2.lastIndexOf("_draft"));
-            }
-            if (name1.equals(name2))
-                return o1.getName().compareTo(o2.getName());
-            return name1.compareTo(name2);
-        }
-    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/58a63073/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 1eaa31c..3c0bbc6 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -182,10 +182,13 @@ public class CubeService extends BasicService {
         }
 
         createdCube = getCubeManager().createCube(cubeName, projectName, createdDesc, owner);
-        accessService.init(createdCube, AclPermission.ADMINISTRATION);
 
-        ProjectInstance project = getProjectManager().getProject(projectName);
-        accessService.inherit(createdCube, project);
+        if (!desc.isDraft()) {
+            accessService.init(createdCube, AclPermission.ADMINISTRATION);
+
+            ProjectInstance project = getProjectManager().getProject(projectName);
+            accessService.inherit(createdCube, project);
+        }
 
         return createdCube;
     }
@@ -248,13 +251,13 @@ public class CubeService extends BasicService {
         if (!desc.isDraft()) {
             int cuboidCount = CuboidCLI.simulateCuboidGeneration(updatedCubeDesc, false);
             logger.info("Updated cube " + cube.getName() + " has " + cuboidCount + " cuboids");
-        }
 
-        ProjectManager projectManager = getProjectManager();
-        if (!isCubeInProject(newProjectName, cube)) {
-            String owner = SecurityContextHolder.getContext().getAuthentication().getName();
-            ProjectInstance newProject = projectManager.moveRealizationToProject(RealizationType.CUBE, cube.getName(), newProjectName, owner);
-            accessService.inherit(cube, newProject);
+            ProjectManager projectManager = getProjectManager();
+            if (!isCubeInProject(newProjectName, cube)) {
+                String owner = SecurityContextHolder.getContext().getAuthentication().getName();
+                ProjectInstance newProject = projectManager.moveRealizationToProject(RealizationType.CUBE, cube.getName(), newProjectName, owner);
+                accessService.inherit(cube, newProject);
+            }
         }
 
         return updatedCubeDesc;


[58/67] [abbrv] kylin git commit: minor, remove some verbose logs

Posted by li...@apache.org.
minor, remove some verbose logs


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/de193bcd
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/de193bcd
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/de193bcd

Branch: refs/heads/master
Commit: de193bcd1c4ef37b0d06a8c11557a64f68fa90ee
Parents: 0c85fb7
Author: lidongsjtu <li...@apache.org>
Authored: Thu Jun 1 21:32:58 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Thu Jun 1 21:37:09 2017 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/common/KylinConfig.java    |  1 -
 .../org/apache/kylin/job/dao/ExecutableDao.java |  1 -
 .../kylin/metadata/cachesync/Broadcaster.java   |  2 +-
 .../engine/mr/common/AbstractHadoopJob.java     | 34 ++++++++++----------
 4 files changed, 18 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/de193bcd/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
index 04af9f5..7974c80 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
@@ -131,7 +131,6 @@ public class KylinConfig extends KylinConfigBase {
          * LOCAL_FOLDER: path to resource folder
          */
         UriType uriType = decideUriType(uri);
-        logger.info("The URI " + uri + " is recognized as " + uriType);
 
         if (uriType == UriType.LOCAL_FOLDER) {
             KylinConfig config = new KylinConfig();

http://git-wip-us.apache.org/repos/asf/kylin/blob/de193bcd/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java b/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
index 70799d8..bd020f8 100644
--- a/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
+++ b/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
@@ -214,7 +214,6 @@ public class ExecutableDao {
     }
 
     public void updateJobOutput(ExecutableOutputPO output) throws PersistentException {
-        logger.debug("updating job output, id: " + output.getUuid());
         try {
             final long ts = writeJobOutputResource(pathOfJobOutput(output.getUuid()), output);
             output.setLastModified(ts);

http://git-wip-us.apache.org/repos/asf/kylin/blob/de193bcd/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java b/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
index 4a8c6d3..4fbfc7c 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/cachesync/Broadcaster.java
@@ -192,7 +192,7 @@ public class Broadcaster {
         if (list == null)
             return;
 
-        logger.debug("Broadcasting metadata change: entity=" + entity + ", event=" + event + ", cacheKey=" + cacheKey + ", listeners=" + list);
+        logger.trace("Broadcasting metadata change: entity=" + entity + ", event=" + event + ", cacheKey=" + cacheKey + ", listeners=" + list);
 
         // prevents concurrent modification exception
         list = Lists.newArrayList(list);

http://git-wip-us.apache.org/repos/asf/kylin/blob/de193bcd/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
index 764cbdd..abfa224 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
@@ -160,7 +160,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
         File jarFile = new File(jarPath);
         if (jarFile.exists()) {
             job.setJar(jarPath);
-            logger.info("append job jar: " + jarPath);
+            logger.trace("append job jar: " + jarPath);
         } else {
             job.setJarByClass(this.getClass());
         }
@@ -168,7 +168,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
         String kylinHiveDependency = System.getProperty("kylin.hive.dependency");
         String kylinHBaseDependency = System.getProperty("kylin.hbase.dependency");
         String kylinKafkaDependency = System.getProperty("kylin.kafka.dependency");
-        logger.info("append kylin.hbase.dependency: " + kylinHBaseDependency + " to " + MAP_REDUCE_CLASSPATH);
+        logger.trace("append kylin.hbase.dependency: " + kylinHBaseDependency + " to " + MAP_REDUCE_CLASSPATH);
 
         Configuration jobConf = job.getConfiguration();
         String classpath = jobConf.get(MAP_REDUCE_CLASSPATH);
@@ -185,7 +185,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
         }
 
         jobConf.set(MAP_REDUCE_CLASSPATH, classpath);
-        logger.info("Hadoop job classpath is: " + job.getConfiguration().get(MAP_REDUCE_CLASSPATH));
+        logger.trace("Hadoop job classpath is: " + job.getConfiguration().get(MAP_REDUCE_CLASSPATH));
 
         /*
          *  set extra dependencies as tmpjars & tmpfiles if configured
@@ -197,28 +197,28 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
             // yarn classpath is comma separated
             kylinHiveDependency = kylinHiveDependency.replace(":", ",");
 
-            logger.info("Hive Dependencies Before Filtered: " + kylinHiveDependency);
+            logger.trace("Hive Dependencies Before Filtered: " + kylinHiveDependency);
             String filteredHive = filterKylinHiveDependency(kylinHiveDependency, kylinConf);
-            logger.info("Hive Dependencies After Filtered: " + filteredHive);
+            logger.trace("Hive Dependencies After Filtered: " + filteredHive);
 
             StringUtil.appendWithSeparator(kylinDependency, filteredHive);
         } else {
 
-            logger.info("No hive dependency jars set in the environment, will find them from classpath:");
+            logger.debug("No hive dependency jars set in the environment, will find them from classpath:");
 
             try {
                 String hiveExecJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hadoop.hive.ql.Driver"));
 
                 StringUtil.appendWithSeparator(kylinDependency, hiveExecJarPath);
-                logger.info("hive-exec jar file: " + hiveExecJarPath);
+                logger.debug("hive-exec jar file: " + hiveExecJarPath);
 
                 String hiveHCatJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hive.hcatalog.mapreduce.HCatInputFormat"));
                 StringUtil.appendWithSeparator(kylinDependency, hiveHCatJarPath);
-                logger.info("hive-catalog jar file: " + hiveHCatJarPath);
+                logger.debug("hive-catalog jar file: " + hiveHCatJarPath);
 
                 String hiveMetaStoreJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hadoop.hive.metastore.api.Table"));
                 StringUtil.appendWithSeparator(kylinDependency, hiveMetaStoreJarPath);
-                logger.info("hive-metastore jar file: " + hiveMetaStoreJarPath);
+                logger.debug("hive-metastore jar file: " + hiveMetaStoreJarPath);
             } catch (ClassNotFoundException e) {
                 logger.error("Cannot found hive dependency jars: " + e);
             }
@@ -227,14 +227,14 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
         // for kafka dependencies
         if (kylinKafkaDependency != null) {
             kylinKafkaDependency = kylinKafkaDependency.replace(":", ",");
-            logger.info("Kafka Dependencies: " + kylinKafkaDependency);
+            logger.trace("Kafka Dependencies: " + kylinKafkaDependency);
             StringUtil.appendWithSeparator(kylinDependency, kylinKafkaDependency);
         } else {
-            logger.info("No Kafka dependency jar set in the environment, will find them from classpath:");
+            logger.debug("No Kafka dependency jar set in the environment, will find them from classpath:");
             try {
                 String kafkaClientJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.kafka.clients.consumer.KafkaConsumer"));
                 StringUtil.appendWithSeparator(kylinDependency, kafkaClientJarPath);
-                logger.info("kafka jar file: " + kafkaClientJarPath);
+                logger.debug("kafka jar file: " + kafkaClientJarPath);
 
             } catch (ClassNotFoundException e) {
                 logger.debug("Not found kafka client jar from classpath, it is optional for normal build: " + e);
@@ -303,7 +303,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
                     logger.warn("The directory of kylin dependency '" + fileName + "' does not exist, skip");
                     continue;
                 }
-                
+
                 if (fs.getFileStatus(p).isDirectory()) {
                     appendTmpDir(job, fs, p, jarList, fileList);
                     continue;
@@ -355,7 +355,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
             tmpJars += "," + jarList;
         }
         conf.set("tmpjars", tmpJars);
-        logger.info("Job 'tmpjars' updated -- " + tmpJars);
+        logger.trace("Job 'tmpjars' updated -- " + tmpJars);
     }
 
     private void appendTmpFiles(String fileList, Configuration conf) {
@@ -369,7 +369,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
             tmpFiles += "," + fileList;
         }
         conf.set("tmpfiles", tmpFiles);
-        logger.info("Job 'tmpfiles' updated -- " + tmpFiles);
+        logger.trace("Job 'tmpfiles' updated -- " + tmpFiles);
     }
 
     private String getDefaultMapRedClasspath() {
@@ -427,7 +427,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
                     ret += addInputDirs(new String[] { path.toString() }, job);
                 }
             } else {
-                logger.debug("Add input " + inp);
+                logger.trace("Add input " + inp);
                 FileInputFormat.addInputPath(job, new Path(inp));
                 ret++;
             }
@@ -517,7 +517,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
 
     protected void cleanupTempConfFile(Configuration conf) {
         String tempMetaFileString = conf.get("tmpfiles");
-        logger.info("tempMetaFileString is : " + tempMetaFileString);
+        logger.trace("tempMetaFileString is : " + tempMetaFileString);
         if (tempMetaFileString != null) {
             if (tempMetaFileString.startsWith("file://")) {
                 tempMetaFileString = tempMetaFileString.substring("file://".length());


[13/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NewCubeSamplingMethodTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NewCubeSamplingMethodTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NewCubeSamplingMethodTest.java
index 869146b..f018f28 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NewCubeSamplingMethodTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NewCubeSamplingMethodTest.java
@@ -16,14 +16,13 @@
  * limitations under the License.
 */
 
-package org.apache.kylin.engine.mr.steps;
 
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Random;
-import java.util.Set;
+package org.apache.kylin.engine.mr.steps;
 
+import com.google.common.collect.Lists;
+import com.google.common.hash.HashFunction;
+import com.google.common.hash.Hasher;
+import com.google.common.hash.Hashing;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.Bytes;
@@ -33,10 +32,11 @@ import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import com.google.common.collect.Lists;
-import com.google.common.hash.HashFunction;
-import com.google.common.hash.Hasher;
-import com.google.common.hash.Hashing;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Random;
+import java.util.Set;
 
 @Ignore
 public class NewCubeSamplingMethodTest {
@@ -64,6 +64,7 @@ public class NewCubeSamplingMethodTest {
         compareAccuracyBasic(dataSet);
     }
 
+
     @Ignore
     @Test
     public void testSmallCardData() throws Exception {
@@ -72,6 +73,7 @@ public class NewCubeSamplingMethodTest {
         compareAccuracyBasic(dataSet);
     }
 
+
     public void comparePerformanceBasic(final List<List<String>> rows) throws Exception {
         //old hash method
         ByteArray[] colHashValues = getNewColHashValues(ROW_LENGTH);
@@ -120,11 +122,11 @@ public class NewCubeSamplingMethodTest {
                     counter.add(hc.hash().asBytes());
                 }
                 long estimate = counter.getCountEstimate();
-                System.out.println("old method finished. Estimate cardinality : " + estimate + ". Error rate : "
-                        + countErrorRate(estimate, realCardinality));
+                System.out.println("old method finished. Estimate cardinality : " + estimate + ". Error rate : " + countErrorRate(estimate, realCardinality));
             }
         });
 
+
         long t2 = runAndGetTime(new TestCase() {
             @Override
             public void run() throws Exception {
@@ -147,8 +149,7 @@ public class NewCubeSamplingMethodTest {
                     counter.addHashDirectly(value);
                 }
                 long estimate = counter.getCountEstimate();
-                System.out.println("new method finished. Estimate cardinality : " + estimate + ". Error rate : "
-                        + countErrorRate(estimate, realCardinality));
+                System.out.println("new method finished. Estimate cardinality : " + estimate + ". Error rate : " + countErrorRate(estimate, realCardinality));
             }
         });
     }
@@ -178,6 +179,7 @@ public class NewCubeSamplingMethodTest {
         return counters;
     }
 
+
     private void addCuboidBitSet(long cuboidId, List<Integer[]> allCuboidsBitSet) {
         Integer[] indice = new Integer[Long.bitCount(cuboidId)];
 
@@ -206,8 +208,7 @@ public class NewCubeSamplingMethodTest {
         void run() throws Exception;
     }
 
-    private void putRowKeyToHLL(List<String> row, ByteArray[] colHashValues, HLLCounter[] cuboidCounters,
-            HashFunction hashFunction) {
+    private void putRowKeyToHLL(List<String> row, ByteArray[] colHashValues, HLLCounter[] cuboidCounters, HashFunction hashFunction) {
         int x = 0;
         for (String field : row) {
             Hasher hc = hashFunction.newHasher();
@@ -224,8 +225,7 @@ public class NewCubeSamplingMethodTest {
         }
     }
 
-    private void putRowKeyToHLLNew(List<String> row, long[] hashValuesLong, HLLCounter[] cuboidCounters,
-            HashFunction hashFunction) {
+    private void putRowKeyToHLLNew(List<String> row, long[] hashValuesLong, HLLCounter[] cuboidCounters, HashFunction hashFunction) {
         int x = 0;
         for (String field : row) {
             Hasher hc = hashFunction.newHasher();
@@ -266,7 +266,7 @@ public class NewCubeSamplingMethodTest {
         return row;
     }
 
-    private String[] smallCardRow = { "abc", "bcd", "jifea", "feaifj" };
+    private String[] smallCardRow = {"abc", "bcd", "jifea", "feaifj"};
 
     private Random rand = new Random(System.currentTimeMillis());
 
@@ -279,6 +279,7 @@ public class NewCubeSamplingMethodTest {
         return row;
     }
 
+
     private int countCardinality(List<List<String>> rows) {
         Set<String> diffCols = new HashSet<String>();
         for (List<String> row : rows) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NumberDictionaryForestTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NumberDictionaryForestTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NumberDictionaryForestTest.java
index ab55bcf..414ab95 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NumberDictionaryForestTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/NumberDictionaryForestTest.java
@@ -67,7 +67,7 @@ public class NumberDictionaryForestTest {
         //stimulate map-reduce job
         ArrayList<SelfDefineSortableKey> keyList = createKeyList(humanList, (byte) flag.ordinal());
         Collections.sort(keyList);
-
+        
         //build tree
         NumberDictionaryForestBuilder b = new NumberDictionaryForestBuilder(0, 0);
         expectedList = numberSort(expectedList);
@@ -76,7 +76,7 @@ public class NumberDictionaryForestTest {
         }
         TrieDictionaryForest<String> dict = b.build();
         dict.dump(System.out);
-
+        
         ArrayList<Integer> resultIds = new ArrayList<>();
         for (int i = 0; i < keyList.size(); i++) {
             SelfDefineSortableKey key = keyList.get(i);
@@ -84,7 +84,7 @@ public class NumberDictionaryForestTest {
             resultIds.add(dict.getIdFromValue(fieldValue));
             assertEquals(expectedList.get(i), dict.getValueFromId(dict.getIdFromValue(fieldValue)));
         }
-
+        
         assertTrue(isIncreasedOrder(resultIds, new Comparator<Integer>() {
             @Override
             public int compare(Integer o1, Integer o2) {
@@ -101,8 +101,7 @@ public class NumberDictionaryForestTest {
                 double d1 = Double.parseDouble(o1);
                 double d2 = Double.parseDouble(o2);
                 return Double.compare(d1, d2);
-            }
-        });
+            }});
         return result;
     }
 
@@ -291,18 +290,16 @@ public class NumberDictionaryForestTest {
         int flag;
         T previous = null;
         for (T t : list) {
-            if (previous == null)
-                previous = t;
+            if (previous == null) previous = t;
             else {
                 flag = comp.compare(previous, t);
-                if (flag > 0)
-                    return false;
+                if (flag > 0) return false;
                 previous = t;
             }
         }
         return true;
     }
-
+    
     @Test
     public void testNormalizeNumber() {
         assertEquals("0", Number2BytesConverter.normalizeNumber("+0000.000"));
@@ -314,7 +311,7 @@ public class NumberDictionaryForestTest {
         assertEquals("200", Number2BytesConverter.normalizeNumber("200"));
         assertEquals("200", Number2BytesConverter.normalizeNumber("200.00"));
         assertEquals("200.01", Number2BytesConverter.normalizeNumber("200.010"));
-
+        
         for (int i = -100; i < 101; i++) {
             String expected = "" + i;
             int cut = expected.startsWith("-") ? 1 : 0;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKeyTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKeyTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKeyTest.java
index 231387b..551998f 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKeyTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKeyTest.java
@@ -56,8 +56,7 @@ public class SelfDefineSortableKeyTest {
         System.out.println("test numbers:" + longList);
         ArrayList<String> strNumList = listToStringList(longList);
         //System.out.println("test num strs list:"+strNumList);
-        ArrayList<SelfDefineSortableKey> keyList = createKeyList(strNumList,
-                (byte) SelfDefineSortableKey.TypeFlag.INTEGER_FAMILY_TYPE.ordinal());
+        ArrayList<SelfDefineSortableKey> keyList = createKeyList(strNumList, (byte) SelfDefineSortableKey.TypeFlag.INTEGER_FAMILY_TYPE.ordinal());
         System.out.println(keyList.get(0).isIntegerFamily());
         Collections.sort(keyList);
         ArrayList<String> strListAftereSort = new ArrayList<>();
@@ -93,8 +92,7 @@ public class SelfDefineSortableKeyTest {
         System.out.println("test numbers:" + doubleList);
         ArrayList<String> strNumList = listToStringList(doubleList);
         //System.out.println("test num strs list:"+strNumList);
-        ArrayList<SelfDefineSortableKey> keyList = createKeyList(strNumList,
-                (byte) SelfDefineSortableKey.TypeFlag.DOUBLE_FAMILY_TYPE.ordinal());
+        ArrayList<SelfDefineSortableKey> keyList = createKeyList(strNumList, (byte) SelfDefineSortableKey.TypeFlag.DOUBLE_FAMILY_TYPE.ordinal());
         Collections.sort(keyList);
         ArrayList<String> strListAftereSort = new ArrayList<>();
         for (SelfDefineSortableKey key : keyList) {
@@ -123,8 +121,7 @@ public class SelfDefineSortableKeyTest {
         strList.add("hello"); //duplicate
         strList.add("123");
         strList.add("");
-        ArrayList<SelfDefineSortableKey> keyList = createKeyList(strList,
-                (byte) SelfDefineSortableKey.TypeFlag.NONE_NUMERIC_TYPE.ordinal());
+        ArrayList<SelfDefineSortableKey> keyList = createKeyList(strList, (byte) SelfDefineSortableKey.TypeFlag.NONE_NUMERIC_TYPE.ordinal());
         Collections.sort(keyList);
         ArrayList<String> strListAftereSort = new ArrayList<>();
         for (SelfDefineSortableKey key : keyList) {
@@ -154,16 +151,17 @@ public class SelfDefineSortableKeyTest {
         doubleList.add(-Double.MAX_VALUE);
         //System.out.println(Double.MIN_VALUE);
 
+
         ArrayList<String> strNumList = listToStringList(doubleList);
         //System.out.println("test num strs list:"+strNumList);
-        ArrayList<SelfDefineSortableKey> keyList = createKeyList(strNumList,
-                (byte) SelfDefineSortableKey.TypeFlag.DOUBLE_FAMILY_TYPE.ordinal());
+        ArrayList<SelfDefineSortableKey> keyList = createKeyList(strNumList, (byte) SelfDefineSortableKey.TypeFlag.DOUBLE_FAMILY_TYPE.ordinal());
 
         System.out.println("start to test str sort");
         long start = System.currentTimeMillis();
         Collections.sort(strNumList);
         System.out.println("sort time : " + (System.currentTimeMillis() - start));
 
+
         System.out.println("start to test double sort");
         start = System.currentTimeMillis();
         Collections.sort(keyList);
@@ -191,6 +189,7 @@ public class SelfDefineSortableKeyTest {
         System.out.println("sort time : " + (System.currentTimeMillis() - start));
     }
 
+
     @Test
     public void testIllegalNumber() {
         Random rand = new Random(System.currentTimeMillis());
@@ -211,11 +210,10 @@ public class SelfDefineSortableKeyTest {
         strNumList.add("fjaeif"); //illegal type
         //System.out.println("test num strs list:"+strNumList);
         try {
-            ArrayList<SelfDefineSortableKey> keyList = createKeyList(strNumList,
-                    (byte) SelfDefineSortableKey.TypeFlag.DOUBLE_FAMILY_TYPE.ordinal());
+            ArrayList<SelfDefineSortableKey> keyList = createKeyList(strNumList, (byte) SelfDefineSortableKey.TypeFlag.DOUBLE_FAMILY_TYPE.ordinal());
             Collections.sort(keyList);
             fail("Need catch exception");
-        } catch (Exception e) {
+        }catch(Exception e){
             //correct
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
index cf6d0a8..66b154d 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkBatchCubingJobBuilder2.java
@@ -47,8 +47,7 @@ public class SparkBatchCubingJobBuilder2 extends BatchCubingJobBuilder2 {
         sparkExecutable.setClassName(SparkCubingByLayer.class.getName());
         sparkExecutable.setParam(SparkCubingByLayer.OPTION_CUBE_NAME.getOpt(), seg.getRealization().getName());
         sparkExecutable.setParam(SparkCubingByLayer.OPTION_SEGMENT_ID.getOpt(), seg.getUuid());
-        sparkExecutable.setParam(SparkCubingByLayer.OPTION_INPUT_TABLE.getOpt(),
-                seg.getConfig().getHiveDatabaseForIntermediateTable() + "." + flatTableDesc.getTableName());
+        sparkExecutable.setParam(SparkCubingByLayer.OPTION_INPUT_TABLE.getOpt(), seg.getConfig().getHiveDatabaseForIntermediateTable() + "." + flatTableDesc.getTableName());
         sparkExecutable.setParam(SparkCubingByLayer.OPTION_CONF_PATH.getOpt(), KylinConfig.getKylinConfPath());
         sparkExecutable.setParam(SparkCubingByLayer.OPTION_OUTPUT_PATH.getOpt(), cuboidRootPath);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCountDemo.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCountDemo.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCountDemo.java
index ef39c69..6478c10 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCountDemo.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCountDemo.java
@@ -37,8 +37,7 @@ import scala.Tuple2;
  */
 public class SparkCountDemo extends AbstractApplication {
 
-    private static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true)
-            .withDescription("Input path").create("input");
+    private static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("Input path").create("input");
 
     private Options options;
 
@@ -57,29 +56,25 @@ public class SparkCountDemo extends AbstractApplication {
         String logFile = "hdfs://10.249.65.231:8020/tmp/kylin.properties"; // Should be some file on your system
         SparkConf conf = new SparkConf().setAppName("Simple Application");
         JavaSparkContext sc = new JavaSparkContext(conf);
-        final JavaPairRDD<String, Integer> logData = sc.textFile(logFile)
-                .mapToPair(new PairFunction<String, String, Integer>() {
+        final JavaPairRDD<String, Integer> logData = sc.textFile(logFile).mapToPair(new PairFunction<String, String, Integer>() {
 
-                    @Override
-                    public Tuple2<String, Integer> call(String s) throws Exception {
-                        return new Tuple2<String, Integer>(s, s.length());
-                    }
-                }).sortByKey();
+            @Override
+            public Tuple2<String, Integer> call(String s) throws Exception {
+                return new Tuple2<String, Integer>(s, s.length());
+            }
+        }).sortByKey();
         logData.persist(StorageLevel.MEMORY_AND_DISK_SER());
 
         System.out.println("line number:" + logData.count());
 
         logData.mapToPair(new PairFunction<Tuple2<String, Integer>, ImmutableBytesWritable, KeyValue>() {
             @Override
-            public Tuple2<ImmutableBytesWritable, KeyValue> call(Tuple2<String, Integer> stringIntegerTuple2)
-                    throws Exception {
+            public Tuple2<ImmutableBytesWritable, KeyValue> call(Tuple2<String, Integer> stringIntegerTuple2) throws Exception {
                 ImmutableBytesWritable key = new ImmutableBytesWritable(stringIntegerTuple2._1().getBytes());
-                KeyValue value = new KeyValue(stringIntegerTuple2._1().getBytes(), "f".getBytes(), "c".getBytes(),
-                        String.valueOf(stringIntegerTuple2._2()).getBytes());
+                KeyValue value = new KeyValue(stringIntegerTuple2._1().getBytes(), "f".getBytes(), "c".getBytes(), String.valueOf(stringIntegerTuple2._2()).getBytes());
                 return new Tuple2(key, value);
             }
-        }).saveAsNewAPIHadoopFile("hdfs://10.249.65.231:8020/tmp/hfile", ImmutableBytesWritable.class, KeyValue.class,
-                HFileOutputFormat.class);
+        }).saveAsNewAPIHadoopFile("hdfs://10.249.65.231:8020/tmp/hfile", ImmutableBytesWritable.class, KeyValue.class, HFileOutputFormat.class);
 
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
index c3326ff..2a0981a 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
@@ -120,16 +120,11 @@ public class SparkCubing extends AbstractApplication {
 
     protected static final Logger logger = LoggerFactory.getLogger(SparkCubing.class);
 
-    private static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true)
-            .withDescription("Hive Intermediate Table").create("hiveTable");
-    private static final Option OPTION_CUBE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_CUBE_NAME).hasArg()
-            .isRequired(true).withDescription("Cube Name").create(BatchConstants.ARG_CUBE_NAME);
-    private static final Option OPTION_SEGMENT_ID = OptionBuilder.withArgName("segment").hasArg().isRequired(true)
-            .withDescription("Cube Segment Id").create("segmentId");
-    private static final Option OPTION_CONF_PATH = OptionBuilder.withArgName("confPath").hasArg().isRequired(true)
-            .withDescription("Configuration Path").create("confPath");
-    private static final Option OPTION_COPROCESSOR = OptionBuilder.withArgName("coprocessor").hasArg().isRequired(true)
-            .withDescription("Coprocessor Jar Path").create("coprocessor");
+    private static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("Hive Intermediate Table").create("hiveTable");
+    private static final Option OPTION_CUBE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_CUBE_NAME).hasArg().isRequired(true).withDescription("Cube Name").create(BatchConstants.ARG_CUBE_NAME);
+    private static final Option OPTION_SEGMENT_ID = OptionBuilder.withArgName("segment").hasArg().isRequired(true).withDescription("Cube Segment Id").create("segmentId");
+    private static final Option OPTION_CONF_PATH = OptionBuilder.withArgName("confPath").hasArg().isRequired(true).withDescription("Configuration Path").create("confPath");
+    private static final Option OPTION_COPROCESSOR = OptionBuilder.withArgName("coprocessor").hasArg().isRequired(true).withDescription("Coprocessor Jar Path").create("coprocessor");
 
     private Options options;
 
@@ -192,10 +187,8 @@ public class SparkCubing extends AbstractApplication {
         final CubeSegment seg = cubeInstance.getSegmentById(segmentId);
         final CubeDesc cubeDesc = cubeInstance.getDescriptor();
         final HashMap<Integer, TblColRef> tblColRefMap = Maps.newHashMap();
-        final CubeJoinedFlatTableEnrich flatDesc = new CubeJoinedFlatTableEnrich(
-                EngineFactory.getJoinedFlatTableDesc(seg), cubeDesc);
-        final List<TblColRef> baseCuboidColumn = Cuboid.findById(cubeDesc, Cuboid.getBaseCuboidId(cubeDesc))
-                .getColumns();
+        final CubeJoinedFlatTableEnrich flatDesc = new CubeJoinedFlatTableEnrich(EngineFactory.getJoinedFlatTableDesc(seg), cubeDesc);
+        final List<TblColRef> baseCuboidColumn = Cuboid.findById(cubeDesc, Cuboid.getBaseCuboidId(cubeDesc)).getColumns();
         final long start = System.currentTimeMillis();
         final RowKeyDesc rowKey = cubeDesc.getRowkey();
         for (int i = 0; i < baseCuboidColumn.size(); i++) {
@@ -214,36 +207,35 @@ public class SparkCubing extends AbstractApplication {
             final DataFrame frame = intermediateTable.select(column).distinct();
 
             final Row[] rows = frame.collect();
-            dictionaryMap.put(tblColRef, DictionaryGenerator.buildDictionary(tblColRef.getType(),
-                    new IterableDictionaryValueEnumerator(new Iterable<String>() {
-                        @Override
-                        public Iterator<String> iterator() {
-                            return new Iterator<String>() {
-                                int i = 0;
+            dictionaryMap.put(tblColRef, DictionaryGenerator.buildDictionary(tblColRef.getType(), new IterableDictionaryValueEnumerator(new Iterable<String>() {
+                @Override
+                public Iterator<String> iterator() {
+                    return new Iterator<String>() {
+                        int i = 0;
 
-                                @Override
-                                public boolean hasNext() {
-                                    return i < rows.length;
-                                }
+                        @Override
+                        public boolean hasNext() {
+                            return i < rows.length;
+                        }
 
-                                @Override
-                                public String next() {
-                                    if (hasNext()) {
-                                        final Row row = rows[i++];
-                                        final Object o = row.get(0);
-                                        return o != null ? o.toString() : null;
-                                    } else {
-                                        throw new NoSuchElementException();
-                                    }
-                                }
+                        @Override
+                        public String next() {
+                            if (hasNext()) {
+                                final Row row = rows[i++];
+                                final Object o = row.get(0);
+                                return o != null ? o.toString() : null;
+                            } else {
+                                throw new NoSuchElementException();
+                            }
+                        }
 
-                                @Override
-                                public void remove() {
-                                    throw new UnsupportedOperationException();
-                                }
-                            };
+                        @Override
+                        public void remove() {
+                            throw new UnsupportedOperationException();
                         }
-                    })));
+                    };
+                }
+            })));
         }
         final long end = System.currentTimeMillis();
         CubingUtils.writeDictionary(seg, dictionaryMap, start, end);
@@ -256,8 +248,7 @@ public class SparkCubing extends AbstractApplication {
         }
     }
 
-    private Map<Long, HLLCounter> sampling(final JavaRDD<List<String>> rowJavaRDD, final String cubeName,
-            String segmentId) throws Exception {
+    private Map<Long, HLLCounter> sampling(final JavaRDD<List<String>> rowJavaRDD, final String cubeName, String segmentId) throws Exception {
         CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).reloadCubeLocal(cubeName);
         CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
         CubeDesc cubeDesc = cubeInstance.getDescriptor();
@@ -268,8 +259,7 @@ public class SparkCubing extends AbstractApplication {
             zeroValue.put(id, new HLLCounter(cubeDesc.getConfig().getCubeStatsHLLPrecision()));
         }
 
-        CubeJoinedFlatTableEnrich flatDesc = new CubeJoinedFlatTableEnrich(
-                EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
+        CubeJoinedFlatTableEnrich flatDesc = new CubeJoinedFlatTableEnrich(EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
 
         final int[] rowKeyColumnIndexes = flatDesc.getRowKeyColumnIndexes();
         final int nRowKey = cubeDesc.getRowkey().getRowKeyColumns().length;
@@ -295,56 +285,52 @@ public class SparkCubing extends AbstractApplication {
             row_hashcodes[i] = new ByteArray();
         }
 
-        final HashMap<Long, HLLCounter> samplingResult = rowJavaRDD.aggregate(zeroValue,
-                new Function2<HashMap<Long, HLLCounter>, List<String>, HashMap<Long, HLLCounter>>() {
+        final HashMap<Long, HLLCounter> samplingResult = rowJavaRDD.aggregate(zeroValue, new Function2<HashMap<Long, HLLCounter>, List<String>, HashMap<Long, HLLCounter>>() {
 
-                    final HashFunction hashFunction = Hashing.murmur3_128();
+            final HashFunction hashFunction = Hashing.murmur3_128();
 
-                    @Override
-                    public HashMap<Long, HLLCounter> call(HashMap<Long, HLLCounter> v1, List<String> v2)
-                            throws Exception {
-                        for (int i = 0; i < nRowKey; i++) {
-                            Hasher hc = hashFunction.newHasher();
-                            String colValue = v2.get(rowKeyColumnIndexes[i]);
-                            if (colValue != null) {
-                                row_hashcodes[i].set(hc.putString(colValue).hash().asBytes());
-                            } else {
-                                row_hashcodes[i].set(hc.putInt(0).hash().asBytes());
-                            }
-                        }
-
-                        for (Map.Entry<Long, Integer[]> entry : allCuboidsBitSet.entrySet()) {
-                            Hasher hc = hashFunction.newHasher();
-                            HLLCounter counter = v1.get(entry.getKey());
-                            final Integer[] cuboidBitSet = entry.getValue();
-                            for (int position = 0; position < cuboidBitSet.length; position++) {
-                                hc.putBytes(row_hashcodes[cuboidBitSet[position]].array());
-                            }
-                            counter.add(hc.hash().asBytes());
-                        }
-                        return v1;
+            @Override
+            public HashMap<Long, HLLCounter> call(HashMap<Long, HLLCounter> v1, List<String> v2) throws Exception {
+                for (int i = 0; i < nRowKey; i++) {
+                    Hasher hc = hashFunction.newHasher();
+                    String colValue = v2.get(rowKeyColumnIndexes[i]);
+                    if (colValue != null) {
+                        row_hashcodes[i].set(hc.putString(colValue).hash().asBytes());
+                    } else {
+                        row_hashcodes[i].set(hc.putInt(0).hash().asBytes());
                     }
-                }, new Function2<HashMap<Long, HLLCounter>, HashMap<Long, HLLCounter>, HashMap<Long, HLLCounter>>() {
-                    @Override
-                    public HashMap<Long, HLLCounter> call(HashMap<Long, HLLCounter> v1, HashMap<Long, HLLCounter> v2)
-                            throws Exception {
-                        Preconditions.checkArgument(v1.size() == v2.size());
-                        Preconditions.checkArgument(v1.size() > 0);
-                        for (Map.Entry<Long, HLLCounter> entry : v1.entrySet()) {
-                            final HLLCounter counter1 = entry.getValue();
-                            final HLLCounter counter2 = v2.get(entry.getKey());
-                            counter1.merge(Preconditions.checkNotNull(counter2, "counter cannot be null"));
-                        }
-                        return v1;
+                }
+
+                for (Map.Entry<Long, Integer[]> entry : allCuboidsBitSet.entrySet()) {
+                    Hasher hc = hashFunction.newHasher();
+                    HLLCounter counter = v1.get(entry.getKey());
+                    final Integer[] cuboidBitSet = entry.getValue();
+                    for (int position = 0; position < cuboidBitSet.length; position++) {
+                        hc.putBytes(row_hashcodes[cuboidBitSet[position]].array());
                     }
+                    counter.add(hc.hash().asBytes());
+                }
+                return v1;
+            }
+        }, new Function2<HashMap<Long, HLLCounter>, HashMap<Long, HLLCounter>, HashMap<Long, HLLCounter>>() {
+            @Override
+            public HashMap<Long, HLLCounter> call(HashMap<Long, HLLCounter> v1, HashMap<Long, HLLCounter> v2) throws Exception {
+                Preconditions.checkArgument(v1.size() == v2.size());
+                Preconditions.checkArgument(v1.size() > 0);
+                for (Map.Entry<Long, HLLCounter> entry : v1.entrySet()) {
+                    final HLLCounter counter1 = entry.getValue();
+                    final HLLCounter counter2 = v2.get(entry.getKey());
+                    counter1.merge(Preconditions.checkNotNull(counter2, "counter cannot be null"));
+                }
+                return v1;
+            }
 
-                });
+        });
         return samplingResult;
     }
 
     /** return hfile location */
-    private String build(JavaRDD<List<String>> javaRDD, final String cubeName, final String segmentId,
-            final byte[][] splitKeys) throws Exception {
+    private String build(JavaRDD<List<String>> javaRDD, final String cubeName, final String segmentId, final byte[][] splitKeys) throws Exception {
         CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
         CubeDesc cubeDesc = cubeInstance.getDescriptor();
         final CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
@@ -378,41 +364,35 @@ public class SparkCubing extends AbstractApplication {
             }
         }
 
-        final JavaPairRDD<byte[], byte[]> javaPairRDD = javaRDD.glom()
-                .mapPartitionsToPair(new PairFlatMapFunction<Iterator<List<List<String>>>, byte[], byte[]>() {
+        final JavaPairRDD<byte[], byte[]> javaPairRDD = javaRDD.glom().mapPartitionsToPair(new PairFlatMapFunction<Iterator<List<List<String>>>, byte[], byte[]>() {
 
-                    @Override
-                    public Iterable<Tuple2<byte[], byte[]>> call(Iterator<List<List<String>>> listIterator)
-                            throws Exception {
-                        long t = System.currentTimeMillis();
-                        prepare();
-
-                        final CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv())
-                                .getCube(cubeName);
-
-                        LinkedBlockingQueue<List<String>> blockingQueue = new LinkedBlockingQueue();
-                        System.out.println("load properties finished");
-                        IJoinedFlatTableDesc flatDesc = EngineFactory.getJoinedFlatTableDesc(cubeSegment);
-                        AbstractInMemCubeBuilder inMemCubeBuilder = new DoggedCubeBuilder(cubeInstance.getDescriptor(),
-                                flatDesc, dictionaryMap);
-                        final SparkCuboidWriter sparkCuboidWriter = new BufferedCuboidWriter(
-                                new DefaultTupleConverter(cubeInstance.getSegmentById(segmentId), columnLengthMap));
-                        Executors.newCachedThreadPool()
-                                .submit(inMemCubeBuilder.buildAsRunnable(blockingQueue, sparkCuboidWriter));
-                        try {
-                            while (listIterator.hasNext()) {
-                                for (List<String> row : listIterator.next()) {
-                                    blockingQueue.put(row);
-                                }
-                            }
-                            blockingQueue.put(Collections.<String> emptyList());
-                        } catch (Exception e) {
-                            throw new RuntimeException(e);
+            @Override
+            public Iterable<Tuple2<byte[], byte[]>> call(Iterator<List<List<String>>> listIterator) throws Exception {
+                long t = System.currentTimeMillis();
+                prepare();
+
+                final CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
+
+                LinkedBlockingQueue<List<String>> blockingQueue = new LinkedBlockingQueue();
+                System.out.println("load properties finished");
+                IJoinedFlatTableDesc flatDesc = EngineFactory.getJoinedFlatTableDesc(cubeSegment);
+                AbstractInMemCubeBuilder inMemCubeBuilder = new DoggedCubeBuilder(cubeInstance.getDescriptor(), flatDesc, dictionaryMap);
+                final SparkCuboidWriter sparkCuboidWriter = new BufferedCuboidWriter(new DefaultTupleConverter(cubeInstance.getSegmentById(segmentId), columnLengthMap));
+                Executors.newCachedThreadPool().submit(inMemCubeBuilder.buildAsRunnable(blockingQueue, sparkCuboidWriter));
+                try {
+                    while (listIterator.hasNext()) {
+                        for (List<String> row : listIterator.next()) {
+                            blockingQueue.put(row);
                         }
-                        System.out.println("build partition cost: " + (System.currentTimeMillis() - t) + "ms");
-                        return sparkCuboidWriter.getResult();
                     }
-                });
+                    blockingQueue.put(Collections.<String> emptyList());
+                } catch (Exception e) {
+                    throw new RuntimeException(e);
+                }
+                System.out.println("build partition cost: " + (System.currentTimeMillis() - t) + "ms");
+                return sparkCuboidWriter.getResult();
+            }
+        });
 
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         Configuration conf = getConfigurationForHFile(cubeSegment.getStorageLocationIdentifier());
@@ -431,9 +411,7 @@ public class SparkCubing extends AbstractApplication {
         return url;
     }
 
-    private void writeToHFile2(final JavaPairRDD<byte[], byte[]> javaPairRDD, final String[] dataTypes,
-            final int measureSize, final MeasureAggregators aggs, final byte[][] splitKeys, final Configuration conf,
-            final String hFileLocation) {
+    private void writeToHFile2(final JavaPairRDD<byte[], byte[]> javaPairRDD, final String[] dataTypes, final int measureSize, final MeasureAggregators aggs, final byte[][] splitKeys, final Configuration conf, final String hFileLocation) {
         javaPairRDD.repartitionAndSortWithinPartitions(new Partitioner() {
             @Override
             public int numPartitions() {
@@ -450,52 +428,46 @@ public class SparkCubing extends AbstractApplication {
                 }
                 return splitKeys.length;
             }
-        }, UnsignedBytes.lexicographicalComparator())
-                .mapPartitions(new FlatMapFunction<Iterator<Tuple2<byte[], byte[]>>, Tuple2<byte[], byte[]>>() {
-                    @Override
-                    public Iterable<Tuple2<byte[], byte[]>> call(final Iterator<Tuple2<byte[], byte[]>> tuple2Iterator)
-                            throws Exception {
-                        return new Iterable<Tuple2<byte[], byte[]>>() {
-                            final BufferedMeasureCodec codec = new BufferedMeasureCodec(dataTypes);
-                            final Object[] input = new Object[measureSize];
-                            final Object[] result = new Object[measureSize];
+        }, UnsignedBytes.lexicographicalComparator()).mapPartitions(new FlatMapFunction<Iterator<Tuple2<byte[], byte[]>>, Tuple2<byte[], byte[]>>() {
+            @Override
+            public Iterable<Tuple2<byte[], byte[]>> call(final Iterator<Tuple2<byte[], byte[]>> tuple2Iterator) throws Exception {
+                return new Iterable<Tuple2<byte[], byte[]>>() {
+                    final BufferedMeasureCodec codec = new BufferedMeasureCodec(dataTypes);
+                    final Object[] input = new Object[measureSize];
+                    final Object[] result = new Object[measureSize];
 
+                    @Override
+                    public Iterator<Tuple2<byte[], byte[]>> iterator() {
+                        return IteratorUtils.merge(tuple2Iterator, UnsignedBytes.lexicographicalComparator(), new Function<Iterable<byte[]>, byte[]>() {
                             @Override
-                            public Iterator<Tuple2<byte[], byte[]>> iterator() {
-                                return IteratorUtils.merge(tuple2Iterator, UnsignedBytes.lexicographicalComparator(),
-                                        new Function<Iterable<byte[]>, byte[]>() {
-                                            @Override
-                                            public byte[] call(Iterable<byte[]> v1) throws Exception {
-                                                final LinkedList<byte[]> list = Lists.newLinkedList(v1);
-                                                if (list.size() == 1) {
-                                                    return list.get(0);
-                                                }
-                                                aggs.reset();
-                                                for (byte[] v : list) {
-                                                    codec.decode(ByteBuffer.wrap(v), input);
-                                                    aggs.aggregate(input);
-                                                }
-                                                aggs.collectStates(result);
-                                                ByteBuffer buffer = codec.encode(result);
-                                                byte[] bytes = new byte[buffer.position()];
-                                                System.arraycopy(buffer.array(), buffer.arrayOffset(), bytes, 0,
-                                                        buffer.position());
-                                                return bytes;
-                                            }
-                                        });
+                            public byte[] call(Iterable<byte[]> v1) throws Exception {
+                                final LinkedList<byte[]> list = Lists.newLinkedList(v1);
+                                if (list.size() == 1) {
+                                    return list.get(0);
+                                }
+                                aggs.reset();
+                                for (byte[] v : list) {
+                                    codec.decode(ByteBuffer.wrap(v), input);
+                                    aggs.aggregate(input);
+                                }
+                                aggs.collectStates(result);
+                                ByteBuffer buffer = codec.encode(result);
+                                byte[] bytes = new byte[buffer.position()];
+                                System.arraycopy(buffer.array(), buffer.arrayOffset(), bytes, 0, buffer.position());
+                                return bytes;
                             }
-                        };
-                    }
-                }, true).mapToPair(new PairFunction<Tuple2<byte[], byte[]>, ImmutableBytesWritable, KeyValue>() {
-                    @Override
-                    public Tuple2<ImmutableBytesWritable, KeyValue> call(Tuple2<byte[], byte[]> tuple2)
-                            throws Exception {
-                        ImmutableBytesWritable key = new ImmutableBytesWritable(tuple2._1());
-                        KeyValue value = new KeyValue(tuple2._1(), "F1".getBytes(), "M".getBytes(), tuple2._2());
-                        return new Tuple2(key, value);
+                        });
                     }
-                }).saveAsNewAPIHadoopFile(hFileLocation, ImmutableBytesWritable.class, KeyValue.class,
-                        HFileOutputFormat.class, conf);
+                };
+            }
+        }, true).mapToPair(new PairFunction<Tuple2<byte[], byte[]>, ImmutableBytesWritable, KeyValue>() {
+            @Override
+            public Tuple2<ImmutableBytesWritable, KeyValue> call(Tuple2<byte[], byte[]> tuple2) throws Exception {
+                ImmutableBytesWritable key = new ImmutableBytesWritable(tuple2._1());
+                KeyValue value = new KeyValue(tuple2._1(), "F1".getBytes(), "M".getBytes(), tuple2._2());
+                return new Tuple2(key, value);
+            }
+        }).saveAsNewAPIHadoopFile(hFileLocation, ImmutableBytesWritable.class, KeyValue.class, HFileOutputFormat.class, conf);
     }
 
     public static void prepare() throws Exception {
@@ -506,16 +478,14 @@ public class SparkCubing extends AbstractApplication {
         ClassUtil.addClasspath(confPath);
     }
 
-    private byte[][] createHTable(String cubeName, String segmentId, Map<Long, HLLCounter> samplingResult)
-            throws Exception {
+    private byte[][] createHTable(String cubeName, String segmentId, Map<Long, HLLCounter> samplingResult) throws Exception {
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         final CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName);
         final CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
         final Map<Long, Long> rowCountMap = CubeStatsReader.getCuboidRowCountMapFromSampling(samplingResult, 100);
         final Map<Long, Double> cubeSizeMap = CubeStatsReader.getCuboidSizeMapFromRowCount(cubeSegment, rowCountMap);
         System.out.println("cube size estimation:" + cubeSizeMap);
-        final byte[][] splitKeys = CreateHTableJob.getRegionSplitsFromCuboidStatistics(cubeSizeMap, kylinConfig,
-                cubeSegment, null); //FIXME: passing non-null value for 'hfileSplitsOutputFolder'
+        final byte[][] splitKeys = CreateHTableJob.getRegionSplitsFromCuboidStatistics(cubeSizeMap, kylinConfig, cubeSegment, null); //FIXME: passing non-null value for 'hfileSplitsOutputFolder'
         CubeHTableUtil.createHTable(cubeSegment, splitKeys);
         System.out.println(cubeSegment.getStorageLocationIdentifier() + " table created");
         return splitKeys;
@@ -590,23 +560,22 @@ public class SparkCubing extends AbstractApplication {
         setupClasspath(sc, confPath);
         intermediateTable.cache();
         writeDictionary(intermediateTable, cubeName, segmentId);
-        final JavaRDD<List<String>> rowJavaRDD = intermediateTable.javaRDD()
-                .map(new org.apache.spark.api.java.function.Function<Row, List<String>>() {
-                    @Override
-                    public List<String> call(Row v1) throws Exception {
-                        ArrayList<String> result = Lists.newArrayListWithExpectedSize(v1.size());
-                        for (int i = 0; i < v1.size(); i++) {
-                            final Object o = v1.get(i);
-                            if (o != null) {
-                                result.add(o.toString());
-                            } else {
-                                result.add(null);
-                            }
-                        }
-                        return result;
-
+        final JavaRDD<List<String>> rowJavaRDD = intermediateTable.javaRDD().map(new org.apache.spark.api.java.function.Function<Row, List<String>>() {
+            @Override
+            public List<String> call(Row v1) throws Exception {
+                ArrayList<String> result = Lists.newArrayListWithExpectedSize(v1.size());
+                for (int i = 0; i < v1.size(); i++) {
+                    final Object o = v1.get(i);
+                    if (o != null) {
+                        result.add(o.toString());
+                    } else {
+                        result.add(null);
                     }
-                });
+                }
+                return result;
+
+            }
+        });
 
         final Map<Long, HLLCounter> samplingResult = sampling(rowJavaRDD, cubeName, segmentId);
         final byte[][] splitKeys = createHTable(cubeName, segmentId, samplingResult);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
index cf2a650..f70fd30 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
@@ -17,14 +17,6 @@
 */
 package org.apache.kylin.engine.spark;
 
-import java.io.File;
-import java.io.FileFilter;
-import java.io.Serializable;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
@@ -73,9 +65,17 @@ import org.apache.spark.sql.hive.HiveContext;
 import org.apache.spark.storage.StorageLevel;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
 import scala.Tuple2;
 
+import java.io.File;
+import java.io.FileFilter;
+import java.io.Serializable;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+
 /**
  * Spark application to build cube with the "by-layer" algorithm. Only support source data from Hive; Metadata in HBase.
  */
@@ -83,16 +83,11 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
 
     protected static final Logger logger = LoggerFactory.getLogger(SparkCubingByLayer.class);
 
-    public static final Option OPTION_CUBE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_CUBE_NAME).hasArg()
-            .isRequired(true).withDescription("Cube Name").create(BatchConstants.ARG_CUBE_NAME);
-    public static final Option OPTION_SEGMENT_ID = OptionBuilder.withArgName("segment").hasArg().isRequired(true)
-            .withDescription("Cube Segment Id").create("segmentId");
-    public static final Option OPTION_CONF_PATH = OptionBuilder.withArgName("confPath").hasArg().isRequired(true)
-            .withDescription("Configuration Path").create("confPath");
-    public static final Option OPTION_OUTPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_OUTPUT).hasArg()
-            .isRequired(true).withDescription("Cube output path").create(BatchConstants.ARG_OUTPUT);
-    public static final Option OPTION_INPUT_TABLE = OptionBuilder.withArgName("hiveTable").hasArg().isRequired(true)
-            .withDescription("Hive Intermediate Table").create("hiveTable");
+    public static final Option OPTION_CUBE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_CUBE_NAME).hasArg().isRequired(true).withDescription("Cube Name").create(BatchConstants.ARG_CUBE_NAME);
+    public static final Option OPTION_SEGMENT_ID = OptionBuilder.withArgName("segment").hasArg().isRequired(true).withDescription("Cube Segment Id").create("segmentId");
+    public static final Option OPTION_CONF_PATH = OptionBuilder.withArgName("confPath").hasArg().isRequired(true).withDescription("Configuration Path").create("confPath");
+    public static final Option OPTION_OUTPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_OUTPUT).hasArg().isRequired(true).withDescription("Cube output path").create(BatchConstants.ARG_OUTPUT);
+    public static final Option OPTION_INPUT_TABLE = OptionBuilder.withArgName("hiveTable").hasArg().isRequired(true).withDescription("Hive Intermediate Table").create("hiveTable");
 
     private Options options;
 
@@ -165,14 +160,12 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
         final CubeInstance cubeInstance = CubeManager.getInstance(envConfig).getCube(cubeName);
         final CubeDesc cubeDesc = cubeInstance.getDescriptor();
         final CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
-        final CubeJoinedFlatTableEnrich intermediateTableDesc = new CubeJoinedFlatTableEnrich(
-                EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
+        final CubeJoinedFlatTableEnrich intermediateTableDesc = new CubeJoinedFlatTableEnrich(EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
 
         final KylinConfig kylinConfig = cubeDesc.getConfig();
         final Broadcast<CubeDesc> vCubeDesc = sc.broadcast(cubeDesc);
         final Broadcast<CubeSegment> vCubeSegment = sc.broadcast(cubeSegment);
-        final NDCuboidBuilder ndCuboidBuilder = new NDCuboidBuilder(vCubeSegment.getValue(),
-                new RowKeyEncoderProvider(vCubeSegment.getValue()));
+        final NDCuboidBuilder ndCuboidBuilder = new NDCuboidBuilder(vCubeSegment.getValue(), new RowKeyEncoderProvider(vCubeSegment.getValue()));
 
         final Broadcast<CuboidScheduler> vCuboidScheduler = sc.broadcast(new CuboidScheduler(vCubeDesc.getValue()));
         final int measureNum = cubeDesc.getMeasures().size();
@@ -197,50 +190,45 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
         StorageLevel storageLevel = StorageLevel.MEMORY_AND_DISK_SER();
 
         // encode with dimension encoding, transform to <ByteArray, Object[]> RDD
-        final JavaPairRDD<ByteArray, Object[]> encodedBaseRDD = intermediateTable.javaRDD()
-                .mapToPair(new PairFunction<Row, ByteArray, Object[]>() {
-                    volatile transient boolean initialized = false;
-                    BaseCuboidBuilder baseCuboidBuilder = null;
+        final JavaPairRDD<ByteArray, Object[]> encodedBaseRDD = intermediateTable.javaRDD().mapToPair(new PairFunction<Row, ByteArray, Object[]>() {
+            volatile transient boolean initialized = false;
+            BaseCuboidBuilder baseCuboidBuilder = null;
 
-                    @Override
-                    public Tuple2<ByteArray, Object[]> call(Row row) throws Exception {
+            @Override
+            public Tuple2<ByteArray, Object[]> call(Row row) throws Exception {
+                if (initialized == false) {
+                    synchronized (SparkCubingByLayer.class) {
                         if (initialized == false) {
-                            synchronized (SparkCubingByLayer.class) {
-                                if (initialized == false) {
-                                    prepare();
-                                    long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
-                                    Cuboid baseCuboid = Cuboid.findById(cubeDesc, baseCuboidId);
-                                    baseCuboidBuilder = new BaseCuboidBuilder(kylinConfig, cubeDesc, cubeSegment,
-                                            intermediateTableDesc,
-                                            AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid),
-                                            MeasureIngester.create(cubeDesc.getMeasures()),
-                                            cubeSegment.buildDictionaryMap());
-                                    initialized = true;
-                                }
-                            }
+                            prepare();
+                            long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
+                            Cuboid baseCuboid = Cuboid.findById(cubeDesc, baseCuboidId);
+                            baseCuboidBuilder = new BaseCuboidBuilder(kylinConfig, cubeDesc, cubeSegment, intermediateTableDesc, AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid), MeasureIngester.create(cubeDesc.getMeasures()), cubeSegment.buildDictionaryMap());
+                            initialized = true;
                         }
-
-                        String[] rowArray = rowToArray(row);
-                        baseCuboidBuilder.resetAggrs();
-                        byte[] rowKey = baseCuboidBuilder.buildKey(rowArray);
-                        Object[] result = baseCuboidBuilder.buildValueObjects(rowArray);
-                        return new Tuple2<>(new ByteArray(rowKey), result);
                     }
+                }
 
-                    private String[] rowToArray(Row row) {
-                        String[] result = new String[row.size()];
-                        for (int i = 0; i < row.size(); i++) {
-                            final Object o = row.get(i);
-                            if (o != null) {
-                                result[i] = o.toString();
-                            } else {
-                                result[i] = null;
-                            }
-                        }
-                        return result;
+                String[] rowArray = rowToArray(row);
+                baseCuboidBuilder.resetAggrs();
+                byte[] rowKey = baseCuboidBuilder.buildKey(rowArray);
+                Object[] result = baseCuboidBuilder.buildValueObjects(rowArray);
+                return new Tuple2<>(new ByteArray(rowKey), result);
+            }
+
+            private String[] rowToArray(Row row) {
+                String[] result = new String[row.size()];
+                for (int i = 0; i < row.size(); i++) {
+                    final Object o = row.get(i);
+                    if (o != null) {
+                        result[i] = o.toString();
+                    } else {
+                        result[i] = null;
                     }
+                }
+                return result;
+            }
 
-                });
+        });
 
         logger.info("encodedBaseRDD partition number: " + encodedBaseRDD.getNumPartitions());
         Long totalCount = 0L;
@@ -250,12 +238,10 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
         }
 
         final MeasureAggregators measureAggregators = new MeasureAggregators(cubeDesc.getMeasures());
-        final BaseCuboidReducerFunction2 baseCuboidReducerFunction = new BaseCuboidReducerFunction2(measureNum,
-                vCubeDesc.getValue(), measureAggregators);
+        final BaseCuboidReducerFunction2 baseCuboidReducerFunction = new BaseCuboidReducerFunction2(measureNum, vCubeDesc.getValue(), measureAggregators);
         BaseCuboidReducerFunction2 reducerFunction2 = baseCuboidReducerFunction;
         if (allNormalMeasure == false) {
-            reducerFunction2 = new CuboidReducerFunction2(measureNum, vCubeDesc.getValue(), measureAggregators,
-                    needAggr);
+            reducerFunction2 = new CuboidReducerFunction2(measureNum, vCubeDesc.getValue(), measureAggregators, needAggr);
         }
 
         final int totalLevels = cubeDesc.getBuildLevel();
@@ -271,14 +257,12 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
         saveToHDFS(allRDDs[0], vCubeDesc.getValue(), outputPath, 0, confOverwrite);
 
         // aggregate to ND cuboids
-        PairFlatMapFunction<Tuple2<ByteArray, Object[]>, ByteArray, Object[]> flatMapFunction = new CuboidFlatMap(
-                vCubeSegment.getValue(), vCubeDesc.getValue(), vCuboidScheduler.getValue(), ndCuboidBuilder);
+        PairFlatMapFunction<Tuple2<ByteArray, Object[]>, ByteArray, Object[]> flatMapFunction = new CuboidFlatMap(vCubeSegment.getValue(), vCubeDesc.getValue(), vCuboidScheduler.getValue(), ndCuboidBuilder);
 
         for (level = 1; level <= totalLevels; level++) {
             partition = estimateRDDPartitionNum(level, cubeStatsReader, kylinConfig);
             logger.info("Level " + level + " partition number: " + partition);
-            allRDDs[level] = allRDDs[level - 1].flatMapToPair(flatMapFunction).reduceByKey(reducerFunction2, partition)
-                    .persist(storageLevel);
+            allRDDs[level] = allRDDs[level - 1].flatMapToPair(flatMapFunction).reduceByKey(reducerFunction2, partition).persist(storageLevel);
             if (kylinConfig.isSparkSanityCheckEnabled() == true) {
                 sanityCheck(allRDDs[level], totalCount, level, cubeStatsReader, countMeasureIndex);
             }
@@ -299,24 +283,19 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
         return partition;
     }
 
-    private static void saveToHDFS(final JavaPairRDD<ByteArray, Object[]> rdd, final CubeDesc cubeDesc,
-            final String hdfsBaseLocation, int level, Configuration conf) {
+    private static void saveToHDFS(final JavaPairRDD<ByteArray, Object[]> rdd, final CubeDesc cubeDesc, final String hdfsBaseLocation, int level, Configuration conf) {
         final String cuboidOutputPath = BatchCubingJobBuilder2.getCuboidOutputPathsByLevel(hdfsBaseLocation, level);
-        rdd.mapToPair(
-                new PairFunction<Tuple2<ByteArray, Object[]>, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>() {
-                    BufferedMeasureCodec codec = new BufferedMeasureCodec(cubeDesc.getMeasures());
-
-                    @Override
-                    public Tuple2<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text> call(
-                            Tuple2<ByteArray, Object[]> tuple2) throws Exception {
-                        ByteBuffer valueBuf = codec.encode(tuple2._2());
-                        byte[] encodedBytes = new byte[valueBuf.position()];
-                        System.arraycopy(valueBuf.array(), 0, encodedBytes, 0, valueBuf.position());
-                        return new Tuple2<>(new org.apache.hadoop.io.Text(tuple2._1().array()),
-                                new org.apache.hadoop.io.Text(encodedBytes));
-                    }
-                }).saveAsNewAPIHadoopFile(cuboidOutputPath, org.apache.hadoop.io.Text.class,
-                        org.apache.hadoop.io.Text.class, SequenceFileOutputFormat.class, conf);
+        rdd.mapToPair(new PairFunction<Tuple2<ByteArray, Object[]>, org.apache.hadoop.io.Text, org.apache.hadoop.io.Text>() {
+            BufferedMeasureCodec codec = new BufferedMeasureCodec(cubeDesc.getMeasures());
+
+            @Override
+            public Tuple2<org.apache.hadoop.io.Text, org.apache.hadoop.io.Text> call(Tuple2<ByteArray, Object[]> tuple2) throws Exception {
+                ByteBuffer valueBuf = codec.encode(tuple2._2());
+                byte[] encodedBytes = new byte[valueBuf.position()];
+                System.arraycopy(valueBuf.array(), 0, encodedBytes, 0, valueBuf.position());
+                return new Tuple2<>(new org.apache.hadoop.io.Text(tuple2._1().array()), new org.apache.hadoop.io.Text(encodedBytes));
+            }
+        }).saveAsNewAPIHadoopFile(cuboidOutputPath, org.apache.hadoop.io.Text.class, org.apache.hadoop.io.Text.class, SequenceFileOutputFormat.class, conf);
         logger.info("Persisting RDD for level " + level + " into " + cuboidOutputPath);
     }
 
@@ -366,8 +345,7 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
         RowKeySplitter rowKeySplitter;
         transient boolean initialized = false;
 
-        CuboidFlatMap(CubeSegment cubeSegment, CubeDesc cubeDesc, CuboidScheduler cuboidScheduler,
-                NDCuboidBuilder ndCuboidBuilder) {
+        CuboidFlatMap(CubeSegment cubeSegment, CubeDesc cubeDesc, CuboidScheduler cuboidScheduler, NDCuboidBuilder ndCuboidBuilder) {
             this.cubeSegment = cubeSegment;
             this.cubeDesc = cubeDesc;
             this.cuboidScheduler = cuboidScheduler;
@@ -396,8 +374,7 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
             List<Tuple2<ByteArray, Object[]>> tuples = new ArrayList(myChildren.size());
             for (Long child : myChildren) {
                 Cuboid childCuboid = Cuboid.findById(cubeDesc, child);
-                Pair<Integer, ByteArray> result = ndCuboidBuilder.buildKey(parentCuboid, childCuboid,
-                        rowKeySplitter.getSplitBuffers());
+                Pair<Integer, ByteArray> result = ndCuboidBuilder.buildKey(parentCuboid, childCuboid, rowKeySplitter.getSplitBuffers());
 
                 byte[] newKey = new byte[result.getFirst()];
                 System.arraycopy(result.getSecond().array(), 0, newKey, 0, result.getFirst());
@@ -411,14 +388,11 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
 
     //sanity check
 
-    private void sanityCheck(JavaPairRDD<ByteArray, Object[]> rdd, Long totalCount, int thisLevel,
-            CubeStatsReader cubeStatsReader, final int countMeasureIndex) {
+    private void sanityCheck(JavaPairRDD<ByteArray, Object[]> rdd, Long totalCount, int thisLevel, CubeStatsReader cubeStatsReader, final int countMeasureIndex) {
         int thisCuboidNum = cubeStatsReader.getCuboidsByLayer(thisLevel).size();
         Long count2 = getRDDCountSum(rdd, countMeasureIndex);
         if (count2 != totalCount * thisCuboidNum) {
-            throw new IllegalStateException(
-                    String.format("Sanity check failed, level %s, total count(*) is %s; cuboid number %s", thisLevel,
-                            count2, thisCuboidNum));
+            throw new IllegalStateException(String.format("Sanity check failed, level %s, total count(*) is %s; cuboid number %s", thisLevel, count2, thisCuboidNum));
         } else {
             logger.info("sanity check success for level " + thisLevel + ", count(*) is " + (count2 / thisCuboidNum));
         }
@@ -433,8 +407,7 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
             }
         }).reduce(new Function2<Tuple2<ByteArray, Long>, Tuple2<ByteArray, Long>, Tuple2<ByteArray, Long>>() {
             @Override
-            public Tuple2<ByteArray, Long> call(Tuple2<ByteArray, Long> longTuple2, Tuple2<ByteArray, Long> longTuple22)
-                    throws Exception {
+            public Tuple2<ByteArray, Long> call(Tuple2<ByteArray, Long> longTuple2, Tuple2<ByteArray, Long> longTuple22) throws Exception {
                 return new Tuple2<>(ONE, longTuple2._2() + longTuple22._2());
             }
         })._2();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
index e05d63e..1ed2235 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
@@ -84,8 +84,7 @@ public class SparkExecutable extends AbstractExecutable {
         hadoopConf = System.getProperty("kylin.hadoop.conf.dir");
 
         if (StringUtils.isEmpty(hadoopConf)) {
-            throw new RuntimeException(
-                    "kylin_hadoop_conf_dir is empty, check if there's error in the output of 'kylin.sh start'");
+            throw new RuntimeException("kylin_hadoop_conf_dir is empty, check if there's error in the output of 'kylin.sh start'");
         }
 
         File hiveConfFile = new File(hadoopConf, "hive-site.xml");
@@ -109,8 +108,7 @@ public class SparkExecutable extends AbstractExecutable {
         }
 
         StringBuilder stringBuilder = new StringBuilder();
-        stringBuilder.append(
-                "export HADOOP_CONF_DIR=%s && %s/bin/spark-submit --class org.apache.kylin.common.util.SparkEntry ");
+        stringBuilder.append("export HADOOP_CONF_DIR=%s && %s/bin/spark-submit --class org.apache.kylin.common.util.SparkEntry ");
 
         Map<String, String> sparkConfs = config.getSparkConfigOverride();
         for (Map.Entry<String, String> entry : sparkConfs.entrySet()) {
@@ -119,8 +117,7 @@ public class SparkExecutable extends AbstractExecutable {
 
         stringBuilder.append("--files %s --jars %s %s %s");
         try {
-            String cmd = String.format(stringBuilder.toString(), hadoopConf, KylinConfig.getSparkHome(),
-                    hbaseConfFile.getAbsolutePath(), jars, jobJar, formatArgs());
+            String cmd = String.format(stringBuilder.toString(), hadoopConf, KylinConfig.getSparkHome(), hbaseConfFile.getAbsolutePath(), jars, jobJar, formatArgs());
             logger.info("cmd: " + cmd);
             CliCommandExecutor exec = new CliCommandExecutor();
             PatternedLogger patternedLogger = new PatternedLogger(logger);
@@ -133,4 +130,5 @@ public class SparkExecutable extends AbstractExecutable {
         }
     }
 
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-spark/src/main/java/org/apache/kylin/engine/spark/util/IteratorUtils.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/util/IteratorUtils.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/util/IteratorUtils.java
index 68ac1af..a8a4d28 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/util/IteratorUtils.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/util/IteratorUtils.java
@@ -30,8 +30,7 @@ import scala.Tuple2;
  */
 public class IteratorUtils {
 
-    public static <K, V> Iterator<Tuple2<K, V>> merge(final Iterator<Tuple2<K, V>> input,
-            final Comparator<K> comparator, final Function<Iterable<V>, V> converter) {
+    public static <K, V> Iterator<Tuple2<K, V>> merge(final Iterator<Tuple2<K, V>> input, final Comparator<K> comparator, final Function<Iterable<V>, V> converter) {
         return new Iterator<Tuple2<K, V>>() {
 
             Tuple2<K, V> current = input.hasNext() ? input.next() : null;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-spark/src/test/java/org/apache/kylin/engine/spark/cube/BufferedCuboidWriterTest.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/test/java/org/apache/kylin/engine/spark/cube/BufferedCuboidWriterTest.java b/engine-spark/src/test/java/org/apache/kylin/engine/spark/cube/BufferedCuboidWriterTest.java
index 548a496..8afea55 100644
--- a/engine-spark/src/test/java/org/apache/kylin/engine/spark/cube/BufferedCuboidWriterTest.java
+++ b/engine-spark/src/test/java/org/apache/kylin/engine/spark/cube/BufferedCuboidWriterTest.java
@@ -38,8 +38,7 @@ public class BufferedCuboidWriterTest {
         final BufferedCuboidWriter bufferedCuboidWriter = new BufferedCuboidWriter(new TupleConverter() {
             @Override
             public Tuple2<byte[], byte[]> convert(long cuboidId, GTRecord record) {
-                return new Tuple2<>(Long.valueOf(cuboidId).toString().getBytes(),
-                        Long.valueOf(cuboidId).toString().getBytes());
+                return new Tuple2<>(Long.valueOf(cuboidId).toString().getBytes(), Long.valueOf(cuboidId).toString().getBytes());
             }
         });
         final int testCount = 10000000;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/engine-spark/src/test/java/org/apache/kylin/engine/spark/util/KyroMappingGenerator.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/test/java/org/apache/kylin/engine/spark/util/KyroMappingGenerator.java b/engine-spark/src/test/java/org/apache/kylin/engine/spark/util/KyroMappingGenerator.java
index 095c041..b181d33 100644
--- a/engine-spark/src/test/java/org/apache/kylin/engine/spark/util/KyroMappingGenerator.java
+++ b/engine-spark/src/test/java/org/apache/kylin/engine/spark/util/KyroMappingGenerator.java
@@ -18,21 +18,20 @@
 
 package org.apache.kylin.engine.spark.util;
 
-import java.io.Serializable;
-import java.util.Set;
-import java.util.TreeSet;
-
 import org.apache.kylin.common.util.BytesSerializer;
 import org.apache.kylin.measure.MeasureIngester;
 import org.reflections.Reflections;
 
+import java.io.Serializable;
+import java.util.Set;
+import java.util.TreeSet;
+
 /**
  * Generate Kyro Registrator class, the output will be added into KylinKyroRegistrator manually. No runtime dependency with Reflections.
  */
 public class KyroMappingGenerator {
     public static void main(String[] args) {
-        Set<Class<? extends Serializable>> subTypesOfSerializable = new Reflections("org.apache.kylin")
-                .getSubTypesOf(Serializable.class);
+        Set<Class<? extends Serializable>> subTypesOfSerializable = new Reflections("org.apache.kylin").getSubTypesOf(Serializable.class);
         String begin = "kyroClasses.add(";
         String end = ".class);";
         TreeSet<String> sortedSet = new TreeSet();
@@ -40,14 +39,12 @@ public class KyroMappingGenerator {
             if (clazz.getCanonicalName() != null)
                 sortedSet.add(clazz.getCanonicalName());
         }
-        Set<Class<? extends BytesSerializer>> subTypesOfBytes = new Reflections("org.apache.kylin.metadata.datatype")
-                .getSubTypesOf(BytesSerializer.class);
+        Set<Class<? extends BytesSerializer>> subTypesOfBytes = new Reflections("org.apache.kylin.metadata.datatype").getSubTypesOf(BytesSerializer.class);
         for (Class clazz : subTypesOfBytes) {
             if (clazz.getCanonicalName() != null)
                 sortedSet.add(clazz.getCanonicalName());
         }
-        Set<Class<? extends MeasureIngester>> subTypesOfMeasure = new Reflections("org.apache.kylin.measure")
-                .getSubTypesOf(MeasureIngester.class);
+        Set<Class<? extends MeasureIngester>> subTypesOfMeasure = new Reflections("org.apache.kylin.measure").getSubTypesOf(MeasureIngester.class);
         for (Class clazz : subTypesOfMeasure) {
             if (clazz.getCanonicalName() != null)
                 sortedSet.add(clazz.getCanonicalName());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/main/java/org/apache/kylin/jdbc/Driver.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/Driver.java b/jdbc/src/main/java/org/apache/kylin/jdbc/Driver.java
index cfc4f5c..33d82f80 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/Driver.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/Driver.java
@@ -74,8 +74,7 @@ public class Driver extends UnregisteredDriver {
         try {
             DriverManager.registerDriver(new Driver());
         } catch (SQLException e) {
-            throw new RuntimeException(
-                    "Error occurred while registering JDBC driver " + Driver.class.getName() + ": " + e.toString());
+            throw new RuntimeException("Error occurred while registering JDBC driver " + Driver.class.getName() + ": " + e.toString());
         }
     }
 
@@ -86,8 +85,7 @@ public class Driver extends UnregisteredDriver {
 
     @Override
     protected DriverVersion createDriverVersion() {
-        return DriverVersion.load(Driver.class, "org-apache-kylin-jdbc.properties", "Kylin JDBC Driver",
-                "unknown version", "Kylin", "unknown version");
+        return DriverVersion.load(Driver.class, "org-apache-kylin-jdbc.properties", "Kylin JDBC Driver", "unknown version", "Kylin", "unknown version");
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/main/java/org/apache/kylin/jdbc/IRemoteClient.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/IRemoteClient.java b/jdbc/src/main/java/org/apache/kylin/jdbc/IRemoteClient.java
index 56ce2b4..dfd8d76 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/IRemoteClient.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/IRemoteClient.java
@@ -52,7 +52,6 @@ public interface IRemoteClient extends Closeable {
     /**
      * Execute query remotely and get back result.
      */
-    public QueryResult executeQuery(String sql, List<AvaticaParameter> params, List<Object> paramValues,
-            Map<String, String> queryToggles) throws IOException;
+    public QueryResult executeQuery(String sql, List<AvaticaParameter> params, List<Object> paramValues, Map<String, String> queryToggles) throws IOException;
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java
index 0d43f8d..86c3a5b 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java
@@ -85,8 +85,7 @@ public class KylinClient implements IRemoteClient {
         if (isSSL()) {
             try {
                 SSLSocketFactory sslsf = new SSLSocketFactory(new TrustStrategy() {
-                    public boolean isTrusted(final X509Certificate[] chain, String authType)
-                            throws CertificateException {
+                    public boolean isTrusted(final X509Certificate[] chain, String authType) throws CertificateException {
                         // Oh, I am easy...
                         return true;
                     }
@@ -251,9 +250,8 @@ public class KylinClient implements IRemoteClient {
             throw asIOException(get, response);
         }
 
-        List<TableMetaStub> tableMetaStubs = jsonMapper.readValue(response.getEntity().getContent(),
-                new TypeReference<List<TableMetaStub>>() {
-                });
+        List<TableMetaStub> tableMetaStubs = jsonMapper.readValue(response.getEntity().getContent(), new TypeReference<List<TableMetaStub>>() {
+        });
 
         List<KMetaTable> tables = convertMetaTables(tableMetaStubs);
         List<KMetaSchema> schemas = convertMetaSchemas(tables);
@@ -315,21 +313,15 @@ public class KylinClient implements IRemoteClient {
         for (ColumnMetaStub columnStub : tableStub.getColumns()) {
             columns.add(convertMetaColumn(columnStub));
         }
-        return new KMetaTable(tableStub.getTABLE_CAT(), tableStub.getTABLE_SCHEM(), tableStub.getTABLE_NAME(),
-                tableStub.getTABLE_TYPE(), columns);
+        return new KMetaTable(tableStub.getTABLE_CAT(), tableStub.getTABLE_SCHEM(), tableStub.getTABLE_NAME(), tableStub.getTABLE_TYPE(), columns);
     }
 
     private KMetaColumn convertMetaColumn(ColumnMetaStub columnStub) {
-        return new KMetaColumn(columnStub.getTABLE_CAT(), columnStub.getTABLE_SCHEM(), columnStub.getTABLE_NAME(),
-                columnStub.getCOLUMN_NAME(), columnStub.getDATA_TYPE(), columnStub.getTYPE_NAME(),
-                columnStub.getCOLUMN_SIZE(), columnStub.getDECIMAL_DIGITS(), columnStub.getNUM_PREC_RADIX(),
-                columnStub.getNULLABLE(), columnStub.getCHAR_OCTET_LENGTH(), columnStub.getORDINAL_POSITION(),
-                columnStub.getIS_NULLABLE());
+        return new KMetaColumn(columnStub.getTABLE_CAT(), columnStub.getTABLE_SCHEM(), columnStub.getTABLE_NAME(), columnStub.getCOLUMN_NAME(), columnStub.getDATA_TYPE(), columnStub.getTYPE_NAME(), columnStub.getCOLUMN_SIZE(), columnStub.getDECIMAL_DIGITS(), columnStub.getNUM_PREC_RADIX(), columnStub.getNULLABLE(), columnStub.getCHAR_OCTET_LENGTH(), columnStub.getORDINAL_POSITION(), columnStub.getIS_NULLABLE());
     }
 
     @Override
-    public QueryResult executeQuery(String sql, List<AvaticaParameter> params, List<Object> paramValues,
-            Map<String, String> queryToggles) throws IOException {
+    public QueryResult executeQuery(String sql, List<AvaticaParameter> params, List<Object> paramValues, Map<String, String> queryToggles) throws IOException {
 
         SQLResponseStub queryResp = executeKylinQuery(sql, convertParameters(params, paramValues), queryToggles);
         if (queryResp.getIsException())
@@ -354,8 +346,7 @@ public class KylinClient implements IRemoteClient {
         return result;
     }
 
-    private SQLResponseStub executeKylinQuery(String sql, List<StatementParameter> params,
-            Map<String, String> queryToggles) throws IOException {
+    private SQLResponseStub executeKylinQuery(String sql, List<StatementParameter> params, Map<String, String> queryToggles) throws IOException {
         String url = baseUrl() + "/kylin/api/query";
         String project = conn.getProject();
 
@@ -397,11 +388,7 @@ public class KylinClient implements IRemoteClient {
             Class columnClass = convertType(scm.getColumnType());
             ScalarType type = ColumnMetaData.scalar(scm.getColumnType(), scm.getColumnTypeName(), Rep.of(columnClass));
 
-            ColumnMetaData meta = new ColumnMetaData(i, scm.isAutoIncrement(), scm.isCaseSensitive(),
-                    scm.isSearchable(), scm.isCurrency(), scm.getIsNullable(), scm.isSigned(), scm.getDisplaySize(),
-                    scm.getLabel(), scm.getName(), scm.getSchemaName(), scm.getPrecision(), scm.getScale(),
-                    scm.getTableName(), scm.getSchemaName(), type, scm.isReadOnly(), scm.isWritable(), scm.isWritable(),
-                    columnClass.getCanonicalName());
+            ColumnMetaData meta = new ColumnMetaData(i, scm.isAutoIncrement(), scm.isCaseSensitive(), scm.isSearchable(), scm.isCurrency(), scm.getIsNullable(), scm.isSigned(), scm.getDisplaySize(), scm.getLabel(), scm.getName(), scm.getSchemaName(), scm.getPrecision(), scm.getScale(), scm.getTableName(), scm.getSchemaName(), type, scm.isReadOnly(), scm.isWritable(), scm.isWritable(), columnClass.getCanonicalName());
 
             metas.add(meta);
         }
@@ -426,8 +413,7 @@ public class KylinClient implements IRemoteClient {
     }
 
     private IOException asIOException(HttpRequestBase request, HttpResponse response) throws IOException {
-        return new IOException(request.getMethod() + " failed, error code " + response.getStatusLine().getStatusCode()
-                + " and response: " + EntityUtils.toString(response.getEntity()));
+        return new IOException(request.getMethod() + " failed, error code " + response.getStatusLine().getStatusCode() + " and response: " + EntityUtils.toString(response.getEntity()));
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/main/java/org/apache/kylin/jdbc/KylinConnection.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinConnection.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinConnection.java
index 7fd09d6..6852998 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinConnection.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinConnection.java
@@ -47,8 +47,7 @@ public class KylinConnection extends AvaticaConnection {
     private final String project;
     private final IRemoteClient remoteClient;
 
-    protected KylinConnection(UnregisteredDriver driver, KylinJdbcFactory factory, String url, Properties info)
-            throws SQLException {
+    protected KylinConnection(UnregisteredDriver driver, KylinJdbcFactory factory, String url, Properties info) throws SQLException {
         super(driver, factory, url, info);
 
         String odbcUrl = url;
@@ -84,8 +83,7 @@ public class KylinConnection extends AvaticaConnection {
     }
 
     @Override
-    public AvaticaStatement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability)
-            throws SQLException {
+    public AvaticaStatement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
         return super.createStatement(resultSetType, resultSetConcurrency, resultSetHoldability);
     }
 
@@ -102,11 +100,9 @@ public class KylinConnection extends AvaticaConnection {
     }
 
     @Override
-    public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency,
-            int resultSetHoldability) throws SQLException {
+    public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
         Meta.Signature sig = mockPreparedSignature(sql);
-        return factory().newPreparedStatement(this, null, sig, resultSetType, resultSetConcurrency,
-                resultSetHoldability);
+        return factory().newPreparedStatement(this, null, sig, resultSetType, resultSetConcurrency, resultSetHoldability);
     }
 
     // TODO add restful API to prepare SQL, get back expected ResultSetMetaData

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/jdbc/src/main/java/org/apache/kylin/jdbc/KylinJdbcFactory.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinJdbcFactory.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinJdbcFactory.java
index 32bf6ca..6aae983 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinJdbcFactory.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinJdbcFactory.java
@@ -73,8 +73,7 @@ public class KylinJdbcFactory implements AvaticaFactory {
     }
 
     @Override
-    public AvaticaConnection newConnection(UnregisteredDriver driver, AvaticaFactory factory, String url,
-            Properties info) throws SQLException {
+    public AvaticaConnection newConnection(UnregisteredDriver driver, AvaticaFactory factory, String url, Properties info) throws SQLException {
         return new KylinConnection(driver, (KylinJdbcFactory) factory, url, info);
     }
 
@@ -85,23 +84,17 @@ public class KylinJdbcFactory implements AvaticaFactory {
     }
 
     @Override
-    public AvaticaStatement newStatement(AvaticaConnection connection, StatementHandle h, int resultSetType,
-            int resultSetConcurrency, int resultSetHoldability) throws SQLException {
-        return new KylinStatement((KylinConnection) connection, h, resultSetType, resultSetConcurrency,
-                resultSetHoldability);
+    public AvaticaStatement newStatement(AvaticaConnection connection, StatementHandle h, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
+        return new KylinStatement((KylinConnection) connection, h, resultSetType, resultSetConcurrency, resultSetHoldability);
     }
 
     @Override
-    public AvaticaPreparedStatement newPreparedStatement(AvaticaConnection connection, StatementHandle h,
-            Signature signature, int resultSetType, int resultSetConcurrency, int resultSetHoldability)
-            throws SQLException {
-        return new KylinPreparedStatement((KylinConnection) connection, h, signature, resultSetType,
-                resultSetConcurrency, resultSetHoldability);
+    public AvaticaPreparedStatement newPreparedStatement(AvaticaConnection connection, StatementHandle h, Signature signature, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
+        return new KylinPreparedStatement((KylinConnection) connection, h, signature, resultSetType, resultSetConcurrency, resultSetHoldability);
     }
 
     @Override
-    public AvaticaResultSet newResultSet(AvaticaStatement statement, QueryState state, Signature signature,
-            TimeZone timeZone, Frame firstFrame) throws SQLException {
+    public AvaticaResultSet newResultSet(AvaticaStatement statement, QueryState state, Signature signature, TimeZone timeZone, Frame firstFrame) throws SQLException {
         AvaticaResultSetMetaData resultSetMetaData = new AvaticaResultSetMetaData(statement, null, signature);
         return new KylinResultSet(statement, state, signature, resultSetMetaData, timeZone, firstFrame);
     }


[41/67] [abbrv] kylin git commit: KYLIN-2646 bug fix

Posted by li...@apache.org.
KYLIN-2646 bug fix


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ce97f814
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ce97f814
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ce97f814

Branch: refs/heads/master
Commit: ce97f814f7bfa003011e5176ab891a11558ad009
Parents: 6ce9983
Author: Hongbin Ma <ma...@apache.org>
Authored: Fri May 26 16:51:42 2017 +0800
Committer: Roger Shi <ro...@gmail.com>
Committed: Sat May 27 21:11:03 2017 +0800

----------------------------------------------------------------------
 .../kylin/metadata/project/ProjectManager.java  | 23 +++++++++++++++-----
 1 file changed, 17 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ce97f814/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
index ff2d3e6..a172db8 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
@@ -20,7 +20,6 @@ package org.apache.kylin.metadata.project;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -48,6 +47,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 
 public class ProjectManager {
     private static final Logger logger = LoggerFactory.getLogger(ProjectManager.class);
@@ -416,17 +416,28 @@ public class ProjectManager {
     }
 
     public List<ColumnDesc> listExposedColumns(String project, TableDesc tableDesc) {
-        return config.isAdhocEnabled() ? //
-                Arrays.asList(tableDesc.getColumns()) : //
-                Lists.newArrayList(l2Cache.listExposedColumns(norm(project), tableDesc.getIdentity()));
+        Set<ColumnDesc> exposedColumns = l2Cache.listExposedColumns(norm(project), tableDesc.getIdentity());
+
+        if (config.isAdhocEnabled()) {
+            // take care of computed columns
+            Set<ColumnDesc> dedup = Sets.newHashSet(tableDesc.getColumns());
+            dedup.addAll(exposedColumns);
+            return Lists.newArrayList(dedup);
+        } else {
+            return Lists.newArrayList(exposedColumns);
+        }
     }
 
     public boolean isExposedTable(String project, String table) {
-        return config.isAdhocEnabled() ? l2Cache.isDefinedTable(norm(project), table) : l2Cache.isExposedTable(norm(project), table);
+        return config.isAdhocEnabled() ? //
+                l2Cache.isDefinedTable(norm(project), table) : //
+                l2Cache.isExposedTable(norm(project), table);
     }
 
     public boolean isExposedColumn(String project, String table, String col) {
-        return config.isAdhocEnabled() ? l2Cache.isDefinedColumn(norm(project), table, col) : l2Cache.isExposedColumn(norm(project), table, col);
+        return config.isAdhocEnabled() ? //
+                l2Cache.isDefinedColumn(norm(project), table, col) || l2Cache.isExposedColumn(norm(project), table, col) : //
+                l2Cache.isExposedColumn(norm(project), table, col);
     }
 
     public Set<IRealization> listAllRealizations(String project) {


[20/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java
index 50c4639..8812dad 100644
--- a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java
+++ b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java
@@ -82,6 +82,7 @@ public class DistributedScheduler implements Scheduler<AbstractExecutable>, Conn
     private JobEngineConfig jobEngineConfig;
     private String serverName;
 
+
     private final static String SEGMENT_ID = "segmentId";
     public static final String ZOOKEEPER_LOCK_PATH = "/job_engine/lock"; // note ZookeeperDistributedLock will ensure zk path prefix: /kylin/metadata
 
@@ -143,8 +144,7 @@ public class DistributedScheduler implements Scheduler<AbstractExecutable>, Conn
                         logger.warn(executable.toString() + " fail to schedule in server: " + serverName, ex);
                     }
                 }
-                logger.info("Job Fetcher: " + nRunning + " should running, " + runningJobs.size() + " actual running, "
-                        + nOtherRunning + " running in other server, " + nReady + " ready, " + nOthers + " others");
+                logger.info("Job Fetcher: " + nRunning + " should running, " + runningJobs.size() + " actual running, " + nOtherRunning + " running in other server, " + nReady + " ready, " + nOthers + " others");
             } catch (Exception e) {
                 logger.warn("Job Fetcher caught a exception " + e);
             }
@@ -216,12 +216,9 @@ public class DistributedScheduler implements Scheduler<AbstractExecutable>, Conn
                 final Output output = executableManager.getOutput(id);
                 if (output.getState() == ExecutableState.RUNNING) {
                     AbstractExecutable executable = executableManager.getJob(id);
-                    if (executable instanceof DefaultChainedExecutable
-                            && executable.getParams().get(SEGMENT_ID).equalsIgnoreCase(segmentId)
-                            && !nodeData.equalsIgnoreCase(serverName)) {
+                    if (executable instanceof DefaultChainedExecutable && executable.getParams().get(SEGMENT_ID).equalsIgnoreCase(segmentId) && !nodeData.equalsIgnoreCase(serverName)) {
                         try {
-                            logger.warn(nodeData + " has released the lock for: " + segmentId
-                                    + " but the job still running. so " + serverName + " resume the job");
+                            logger.warn(nodeData + " has released the lock for: " + segmentId + " but the job still running. so " + serverName + " resume the job");
                             if (!jobLock.isLocked(getLockPath(segmentId))) {
                                 executableManager.resumeRunningJobForce(executable.getId());
                                 fetcherPool.schedule(fetcher, 0, TimeUnit.SECONDS);
@@ -280,13 +277,11 @@ public class DistributedScheduler implements Scheduler<AbstractExecutable>, Conn
         lockWatch = this.jobLock.watchLocks(getWatchPath(), watchPool, watcherProcess);
 
         int corePoolSize = jobEngineConfig.getMaxConcurrentJobLimit();
-        jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS,
-                new SynchronousQueue<Runnable>());
+        jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue<Runnable>());
         context = new DefaultContext(Maps.<String, Executable> newConcurrentMap(), jobEngineConfig.getConfig());
 
         fetcher = new FetcherRunner();
-        fetcherPool.scheduleAtFixedRate(fetcher, 10, ExecutableConstants.DEFAULT_SCHEDULER_INTERVAL_SECONDS,
-                TimeUnit.SECONDS);
+        fetcherPool.scheduleAtFixedRate(fetcher, 10, ExecutableConstants.DEFAULT_SCHEDULER_INTERVAL_SECONDS, TimeUnit.SECONDS);
         hasStarted = true;
 
         resumeAllRunningJobs();
@@ -324,7 +319,7 @@ public class DistributedScheduler implements Scheduler<AbstractExecutable>, Conn
         }
         return path;
     }
-
+    
     @Override
     public void shutdown() throws SchedulerException {
         logger.info("Will shut down Job Engine ....");

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/main/java/org/apache/kylin/job/lock/JobLock.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/lock/JobLock.java b/core-job/src/main/java/org/apache/kylin/job/lock/JobLock.java
index 5ed4b7e..1b6b29e 100644
--- a/core-job/src/main/java/org/apache/kylin/job/lock/JobLock.java
+++ b/core-job/src/main/java/org/apache/kylin/job/lock/JobLock.java
@@ -23,7 +23,7 @@ package org.apache.kylin.job.lock;
  * This interface is for such negotiation. 
  */
 public interface JobLock {
-
+    
     boolean lockJobEngine();
 
     void unlockJobEngine();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/test/java/org/apache/kylin/job/ExecutableManagerTest.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/ExecutableManagerTest.java b/core-job/src/test/java/org/apache/kylin/job/ExecutableManagerTest.java
index 76a5300..faea9a4 100644
--- a/core-job/src/test/java/org/apache/kylin/job/ExecutableManagerTest.java
+++ b/core-job/src/test/java/org/apache/kylin/job/ExecutableManagerTest.java
@@ -119,8 +119,7 @@ public class ExecutableManagerTest extends LocalFileMetadataTestCase {
         assertEquals(one.getStatus(), another.getStatus());
         assertEquals(one.isRunnable(), another.isRunnable());
         assertEquals(one.getOutput(), another.getOutput());
-        assertTrue((one.getParams() == null && another.getParams() == null)
-                || (one.getParams() != null && another.getParams() != null));
+        assertTrue((one.getParams() == null && another.getParams() == null) || (one.getParams() != null && another.getParams() != null));
         if (one.getParams() != null) {
             assertEquals(one.getParams().size(), another.getParams().size());
             for (String key : one.getParams().keySet()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/test/java/org/apache/kylin/job/JobEngineConfigTest.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/JobEngineConfigTest.java b/core-job/src/test/java/org/apache/kylin/job/JobEngineConfigTest.java
index fe77d09..77914ef 100644
--- a/core-job/src/test/java/org/apache/kylin/job/JobEngineConfigTest.java
+++ b/core-job/src/test/java/org/apache/kylin/job/JobEngineConfigTest.java
@@ -18,15 +18,15 @@
 
 package org.apache.kylin.job;
 
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.HotLoadKylinPropertiesTestCase;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.junit.Test;
 
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+
 /**
  * @author kangkaisen
  */

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/test/java/org/apache/kylin/job/SelfStopExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/SelfStopExecutable.java b/core-job/src/test/java/org/apache/kylin/job/SelfStopExecutable.java
index f5abd30..9a3eb48 100644
--- a/core-job/src/test/java/org/apache/kylin/job/SelfStopExecutable.java
+++ b/core-job/src/test/java/org/apache/kylin/job/SelfStopExecutable.java
@@ -38,11 +38,11 @@ public class SelfStopExecutable extends BaseTestExecutable {
         try {
             for (int i = 0; i < 20; i++) {
                 sleepOneSecond();
-
+                
                 if (isDiscarded())
                     return new ExecuteResult(ExecuteResult.State.STOPPED, "stopped");
             }
-
+                
             return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed");
         } finally {
             doingWork = false;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
index c514dfd..1ada9a1 100644
--- a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
+++ b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
@@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory;
 public abstract class BaseSchedulerTest extends LocalFileMetadataTestCase {
 
     private static final Logger logger = LoggerFactory.getLogger(BaseSchedulerTest.class);
-
+    
     private DefaultScheduler scheduler;
 
     protected ExecutableManager jobService;
@@ -76,7 +76,7 @@ public abstract class BaseSchedulerTest extends LocalFileMetadataTestCase {
     protected void waitForJobFinish(String jobId) {
         int error = 0;
         final int errorLimit = 3;
-
+        
         while (error < errorLimit) {
             try {
                 Thread.sleep(2000);
@@ -87,8 +87,7 @@ public abstract class BaseSchedulerTest extends LocalFileMetadataTestCase {
             try {
                 AbstractExecutable job = jobService.getJob(jobId);
                 ExecutableState status = job.getStatus();
-                if (status == ExecutableState.SUCCEED || status == ExecutableState.ERROR
-                        || status == ExecutableState.STOPPED || status == ExecutableState.DISCARDED) {
+                if (status == ExecutableState.SUCCEED || status == ExecutableState.ERROR || status == ExecutableState.STOPPED || status == ExecutableState.DISCARDED) {
                     break;
                 }
             } catch (Exception ex) {
@@ -96,7 +95,7 @@ public abstract class BaseSchedulerTest extends LocalFileMetadataTestCase {
                 error++;
             }
         }
-
+        
         if (error >= errorLimit) {
             throw new RuntimeException("waitForJobFinish() encounters exceptions, see logs above");
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/dimension/BooleanDimEnc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/dimension/BooleanDimEnc.java b/core-metadata/src/main/java/org/apache/kylin/dimension/BooleanDimEnc.java
index 1b2b57d..fbdb0bb 100644
--- a/core-metadata/src/main/java/org/apache/kylin/dimension/BooleanDimEnc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/dimension/BooleanDimEnc.java
@@ -34,14 +34,13 @@ import com.google.common.collect.Maps;
 /**
  * Encoding Boolean values to bytes
  */
-public class BooleanDimEnc extends DimensionEncoding implements Serializable {
+public class BooleanDimEnc extends DimensionEncoding implements Serializable{
     private static final long serialVersionUID = 1L;
 
     public static final String ENCODING_NAME = "boolean";
 
     //NOTE: when add new value, append to the array tail, DO NOT insert!
-    public static String[] ALLOWED_VALUES = new String[] { "", "true", "false", "TRUE", "FALSE", "True", "False", "t",
-            "f", "T", "F", "yes", "no", "YES", "NO", "Yes", "No", "y", "n", "Y", "N", "1", "0" };
+    public static String[] ALLOWED_VALUES = new String[] { "", "true", "false", "TRUE", "FALSE", "True", "False", "t", "f", "T", "F", "yes", "no", "YES", "NO", "Yes", "No", "y", "n", "Y", "N", "1", "0" };
 
     public static final Map<String, Integer> map = Maps.newHashMap();
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/dimension/DateDimEnc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/dimension/DateDimEnc.java b/core-metadata/src/main/java/org/apache/kylin/dimension/DateDimEnc.java
index 50c08e6..6f06841 100644
--- a/core-metadata/src/main/java/org/apache/kylin/dimension/DateDimEnc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/dimension/DateDimEnc.java
@@ -96,8 +96,7 @@ public class DateDimEnc extends AbstractDateDimEnc implements Serializable {
         return millis;
     }
 
-    public static String[] replaceEncodingArgs(String encoding, String[] encodingArgs, String encodingName,
-            DataType type) {
+    public static String[] replaceEncodingArgs(String encoding, String[] encodingArgs, String encodingName, DataType type) {
         // https://issues.apache.org/jira/browse/KYLIN-2495
         if (DateDimEnc.ENCODING_NAME.equals(encodingName)) {
             if (type.isIntegerFamily()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java b/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java
index c846560..dcc8d47 100644
--- a/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java
@@ -100,8 +100,7 @@ public class DictionaryDimEnc extends DimensionEncoding implements Serializable
             for (int i = outputOffset; i < outputOffset + fixedLen; i++) {
                 output[i] = defaultByte;
             }
-            logger.error("Can't translate value " + valueStr + " to dictionary ID, roundingFlag " + roundingFlag
-                    + ". Using default value " + String.format("\\x%02X", defaultByte));
+            logger.error("Can't translate value " + valueStr + " to dictionary ID, roundingFlag " + roundingFlag + ". Using default value " + String.format("\\x%02X", defaultByte));
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/dimension/FixedLenDimEnc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/dimension/FixedLenDimEnc.java b/core-metadata/src/main/java/org/apache/kylin/dimension/FixedLenDimEnc.java
index 39a985e..9ce1577 100644
--- a/core-metadata/src/main/java/org/apache/kylin/dimension/FixedLenDimEnc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/dimension/FixedLenDimEnc.java
@@ -30,7 +30,7 @@ import org.apache.kylin.metadata.datatype.DataTypeSerializer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class FixedLenDimEnc extends DimensionEncoding implements Serializable {
+public class FixedLenDimEnc extends DimensionEncoding implements Serializable{
     private static final long serialVersionUID = 1L;
 
     private static Logger logger = LoggerFactory.getLogger(FixedLenDimEnc.class);
@@ -100,9 +100,7 @@ public class FixedLenDimEnc extends DimensionEncoding implements Serializable {
         int valueLen = value.length;
         if (valueLen > fixedLen) {
             if (avoidVerbose++ % 10000 == 0) {
-                logger.warn(
-                        "Expect at most " + fixedLen + " bytes, but got " + valueLen + ", will truncate, value string: "
-                                + Bytes.toString(value, 0, valueLen) + " times:" + avoidVerbose);
+                logger.warn("Expect at most " + fixedLen + " bytes, but got " + valueLen + ", will truncate, value string: " + Bytes.toString(value, 0, valueLen) + " times:" + avoidVerbose);
             }
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/dimension/FixedLenHexDimEnc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/dimension/FixedLenHexDimEnc.java b/core-metadata/src/main/java/org/apache/kylin/dimension/FixedLenHexDimEnc.java
index 701aebc..a931450 100644
--- a/core-metadata/src/main/java/org/apache/kylin/dimension/FixedLenHexDimEnc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/dimension/FixedLenHexDimEnc.java
@@ -44,7 +44,7 @@ import com.google.common.base.Preconditions;
  * <p>
  * Due to these limitations hex representation of hash values(with no padding, better with even characters) is more suitable
  */
-public class FixedLenHexDimEnc extends DimensionEncoding implements Serializable {
+public class FixedLenHexDimEnc extends DimensionEncoding implements Serializable{
     private static final long serialVersionUID = 1L;
 
     private static Logger logger = LoggerFactory.getLogger(FixedLenHexDimEnc.class);
@@ -166,19 +166,16 @@ public class FixedLenHexDimEnc extends DimensionEncoding implements Serializable
         byte[] value = Bytes.toBytes(valueStr);
         int valueLen = value.length;
         int endOffset = outputOffset + bytelen;
-
+        
         if (valueLen > hexLength) {
             if (avoidVerbose++ % 10000 == 0) {
-                logger.warn("Expect at most " + hexLength + " bytes, but got " + valueLen
-                        + ", will truncate, value string: " + Bytes.toString(value, 0, valueLen) + " times:"
-                        + avoidVerbose);
+                logger.warn("Expect at most " + hexLength + " bytes, but got " + valueLen + ", will truncate, value string: " + Bytes.toString(value, 0, valueLen) + " times:" + avoidVerbose);
             }
         }
 
         if (valueLen >= hexLength && isF(value, 0, hexLength)) {
             if (avoidVerbose2++ % 10000 == 0) {
-                logger.warn("All 'F' value: " + Bytes.toString(value, 0, valueLen)
-                        + "will become null after encode/decode. times:" + avoidVerbose);
+                logger.warn("All 'F' value: " + Bytes.toString(value, 0, valueLen) + "will become null after encode/decode. times:" + avoidVerbose);
             }
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/dimension/IntDimEnc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/dimension/IntDimEnc.java b/core-metadata/src/main/java/org/apache/kylin/dimension/IntDimEnc.java
index 695fa03..3650200 100644
--- a/core-metadata/src/main/java/org/apache/kylin/dimension/IntDimEnc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/dimension/IntDimEnc.java
@@ -34,13 +34,12 @@ import org.slf4j.LoggerFactory;
  * deprecated use IntegerDimEnc instead
  * @deprecated
  */
-public class IntDimEnc extends DimensionEncoding implements Serializable {
+public class IntDimEnc extends DimensionEncoding implements Serializable{
     private static final long serialVersionUID = 1L;
 
     private static Logger logger = LoggerFactory.getLogger(IntDimEnc.class);
 
-    private static final long[] CAP = { 0, 0xffL, 0xffffL, 0xffffffL, 0xffffffffL, 0xffffffffffL, 0xffffffffffffL,
-            0xffffffffffffffL, Long.MAX_VALUE };
+    private static final long[] CAP = { 0, 0xffL, 0xffffL, 0xffffffL, 0xffffffffL, 0xffffffffffL, 0xffffffffffffL, 0xffffffffffffffL, Long.MAX_VALUE };
 
     public static final String ENCODING_NAME = "int";
 
@@ -88,8 +87,7 @@ public class IntDimEnc extends DimensionEncoding implements Serializable {
         long integer = Long.parseLong(valueStr);
         if (integer > CAP[fixedLen]) {
             if (avoidVerbose++ % 10000 == 0) {
-                logger.warn("Expect at most " + fixedLen + " bytes, but got " + valueStr + ", will truncate, hit times:"
-                        + avoidVerbose);
+                logger.warn("Expect at most " + fixedLen + " bytes, but got " + valueStr + ", will truncate, hit times:" + avoidVerbose);
             }
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/dimension/IntegerDimEnc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/dimension/IntegerDimEnc.java b/core-metadata/src/main/java/org/apache/kylin/dimension/IntegerDimEnc.java
index 26a40d8..e024696 100644
--- a/core-metadata/src/main/java/org/apache/kylin/dimension/IntegerDimEnc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/dimension/IntegerDimEnc.java
@@ -37,17 +37,14 @@ import org.slf4j.LoggerFactory;
  * -2^(8*N-1) is not supported because the slot is reserved for null values.
  * -2^(8*N-1) will be encoded with warn, and its output will be null
  */
-public class IntegerDimEnc extends DimensionEncoding implements Serializable {
+public class IntegerDimEnc extends DimensionEncoding implements Serializable{
     private static final long serialVersionUID = 1L;
 
     private static Logger logger = LoggerFactory.getLogger(IntegerDimEnc.class);
 
-    private static final long[] CAP = { 0, 0x7fL, 0x7fffL, 0x7fffffL, 0x7fffffffL, 0x7fffffffffL, 0x7fffffffffffL,
-            0x7fffffffffffffL, 0x7fffffffffffffffL };
-    private static final long[] MASK = { 0, 0xffL, 0xffffL, 0xffffffL, 0xffffffffL, 0xffffffffffL, 0xffffffffffffL,
-            0xffffffffffffffL, 0xffffffffffffffffL };
-    private static final long[] TAIL = { 0, 0x80L, 0x8000L, 0x800000L, 0x80000000L, 0x8000000000L, 0x800000000000L,
-            0x80000000000000L, 0x8000000000000000L };
+    private static final long[] CAP = { 0, 0x7fL, 0x7fffL, 0x7fffffL, 0x7fffffffL, 0x7fffffffffL, 0x7fffffffffffL, 0x7fffffffffffffL, 0x7fffffffffffffffL };
+    private static final long[] MASK = { 0, 0xffL, 0xffffL, 0xffffffL, 0xffffffffL, 0xffffffffffL, 0xffffffffffffL, 0xffffffffffffffL, 0xffffffffffffffffL };
+    private static final long[] TAIL = { 0, 0x80L, 0x8000L, 0x800000L, 0x80000000L, 0x8000000000L, 0x800000000000L, 0x80000000000000L, 0x8000000000000000L };
     static {
         for (int i = 1; i < TAIL.length; ++i) {
             long head = ~MASK[i];
@@ -102,8 +99,7 @@ public class IntegerDimEnc extends DimensionEncoding implements Serializable {
         long integer = Long.parseLong(valueStr);
         if (integer > CAP[fixedLen] || integer < TAIL[fixedLen]) {
             if (avoidVerbose++ % 10000 == 0) {
-                logger.warn("Expect at most " + fixedLen + " bytes, but got " + valueStr + ", will truncate, hit times:"
-                        + avoidVerbose);
+                logger.warn("Expect at most " + fixedLen + " bytes, but got " + valueStr + ", will truncate, hit times:" + avoidVerbose);
             }
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/dimension/OneMoreByteVLongDimEnc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/dimension/OneMoreByteVLongDimEnc.java b/core-metadata/src/main/java/org/apache/kylin/dimension/OneMoreByteVLongDimEnc.java
index 5738ce9..d998f44 100644
--- a/core-metadata/src/main/java/org/apache/kylin/dimension/OneMoreByteVLongDimEnc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/dimension/OneMoreByteVLongDimEnc.java
@@ -18,6 +18,11 @@
 
 package org.apache.kylin.dimension;
 
+import org.apache.kylin.common.util.BytesUtil;
+import org.apache.kylin.metadata.datatype.DataTypeSerializer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.IOException;
 import java.io.ObjectInput;
 import java.io.ObjectOutput;
@@ -25,25 +30,17 @@ import java.io.Serializable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 
-import org.apache.kylin.common.util.BytesUtil;
-import org.apache.kylin.metadata.datatype.DataTypeSerializer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * not being used yet, prepared for future
  */
-public class OneMoreByteVLongDimEnc extends DimensionEncoding implements Serializable {
+public class OneMoreByteVLongDimEnc extends DimensionEncoding implements Serializable{
     private static final long serialVersionUID = 1L;
 
     private static Logger logger = LoggerFactory.getLogger(OneMoreByteVLongDimEnc.class);
 
-    private static final long[] CAP = { 0, 0x7fL, 0x7fffL, 0x7fffffL, 0x7fffffffL, 0x7fffffffffL, 0x7fffffffffffL,
-            0x7fffffffffffffL, 0x7fffffffffffffffL };
-    private static final long[] MASK = { 0, 0xffL, 0xffffL, 0xffffffL, 0xffffffffL, 0xffffffffffL, 0xffffffffffffL,
-            0xffffffffffffffL, 0xffffffffffffffffL };
-    private static final long[] TAIL = { 0, 0x80L, 0x8000L, 0x800000L, 0x80000000L, 0x8000000000L, 0x800000000000L,
-            0x80000000000000L, 0x8000000000000000L };
+    private static final long[] CAP = { 0, 0x7fL, 0x7fffL, 0x7fffffL, 0x7fffffffL, 0x7fffffffffL, 0x7fffffffffffL, 0x7fffffffffffffL, 0x7fffffffffffffffL };
+    private static final long[] MASK = { 0, 0xffL, 0xffffL, 0xffffffL, 0xffffffffL, 0xffffffffffL, 0xffffffffffffL, 0xffffffffffffffL, 0xffffffffffffffffL };
+    private static final long[] TAIL = { 0, 0x80L, 0x8000L, 0x800000L, 0x80000000L, 0x8000000000L, 0x800000000000L, 0x80000000000000L, 0x8000000000000000L };
     static {
         for (int i = 1; i < TAIL.length; ++i) {
             long head = ~MASK[i];
@@ -98,8 +95,7 @@ public class OneMoreByteVLongDimEnc extends DimensionEncoding implements Seriali
         long integer = Long.parseLong(valueStr);
         if (integer > CAP[fixedLen] || integer < TAIL[fixedLen]) {
             if (avoidVerbose++ % 10000 == 0) {
-                logger.warn("Expect at most " + fixedLen + " bytes, but got " + valueStr + ", will truncate, hit times:"
-                        + avoidVerbose);
+                logger.warn("Expect at most " + fixedLen + " bytes, but got " + valueStr + ", will truncate, hit times:" + avoidVerbose);
             }
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/BufferedMeasureCodec.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/BufferedMeasureCodec.java b/core-metadata/src/main/java/org/apache/kylin/measure/BufferedMeasureCodec.java
index 09b6a8b..44e5708 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/BufferedMeasureCodec.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/BufferedMeasureCodec.java
@@ -18,13 +18,13 @@
 
 package org.apache.kylin.measure;
 
+import org.apache.kylin.metadata.datatype.DataType;
+import org.apache.kylin.metadata.model.MeasureDesc;
+
 import java.nio.BufferOverflowException;
 import java.nio.ByteBuffer;
 import java.util.Collection;
 
-import org.apache.kylin.metadata.datatype.DataType;
-import org.apache.kylin.metadata.model.MeasureDesc;
-
 /**
  * This class embeds a reusable byte buffer for measure encoding, and is not thread-safe.
  * The buffer will grow to accommodate BufferOverflowException until a limit.

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/MeasureAggregators.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureAggregators.java b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureAggregators.java
index a54f471..710f324 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureAggregators.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureAggregators.java
@@ -94,8 +94,7 @@ public class MeasureAggregators implements Serializable {
     }
 
     public void aggregate(Object[] values1, Object[] values2, Object[] result, boolean[] aggrMask) {
-        assert values1.length == values2.length && values2.length == descLength && values1.length == result.length
-                && result.length == aggrMask.length;
+        assert values1.length == values2.length && values2.length == descLength && values1.length == result.length && result.length == aggrMask.length;
         for (int i = 0; i < descLength; i++) {
             if (aggrMask[i]) {
                 result[i] = aggs[i].aggregate(values1[i], values2[i]);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/MeasureCodec.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureCodec.java b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureCodec.java
index edfc8ea..2d73e59 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureCodec.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureCodec.java
@@ -18,13 +18,13 @@
 
 package org.apache.kylin.measure;
 
-import java.nio.ByteBuffer;
-import java.util.Collection;
-
 import org.apache.kylin.metadata.datatype.DataType;
 import org.apache.kylin.metadata.datatype.DataTypeSerializer;
 import org.apache.kylin.metadata.model.MeasureDesc;
 
+import java.nio.ByteBuffer;
+import java.util.Collection;
+
 /**
  * @author yangli9
  * 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/MeasureIngester.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureIngester.java b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureIngester.java
index 42537eb..ed2cb02 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureIngester.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureIngester.java
@@ -18,13 +18,13 @@
 
 package org.apache.kylin.measure;
 
-import java.util.Collection;
-import java.util.Map;
-
 import org.apache.kylin.common.util.Dictionary;
 import org.apache.kylin.metadata.model.MeasureDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 
+import java.util.Collection;
+import java.util.Map;
+
 abstract public class MeasureIngester<V> implements java.io.Serializable {
     private static final long serialVersionUID = 1L;
 
@@ -41,15 +41,13 @@ abstract public class MeasureIngester<V> implements java.io.Serializable {
         return result;
     }
 
-    abstract public V valueOf(String[] values, MeasureDesc measureDesc,
-            Map<TblColRef, Dictionary<String>> dictionaryMap);
+    abstract public V valueOf(String[] values, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> dictionaryMap);
 
     public void reset() {
 
     }
 
-    public V reEncodeDictionary(V value, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> oldDicts,
-            Map<TblColRef, Dictionary<String>> newDicts) {
+    public V reEncodeDictionary(V value, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> oldDicts, Map<TblColRef, Dictionary<String>> newDicts) {
         throw new UnsupportedOperationException();
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/MeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureType.java
index 8e7a1f5..f609dd5 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureType.java
@@ -18,11 +18,6 @@
 
 package org.apache.kylin.measure;
 
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
 import org.apache.kylin.common.util.Dictionary;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.MeasureDesc;
@@ -32,6 +27,11 @@ import org.apache.kylin.metadata.realization.SQLDigest;
 import org.apache.kylin.metadata.tuple.Tuple;
 import org.apache.kylin.metadata.tuple.TupleInfo;
 
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
 /**
  * MeasureType captures how a kind of aggregation is defined, how it is calculated 
  * during cube build, and how it is involved in query and storage scan.
@@ -95,8 +95,7 @@ abstract public class MeasureType<T> implements java.io.Serializable {
      * be modified to drop the satisfied dimension or measure, and a CapabilityInfluence object
      * must be returned to mark the contribution of this measure type.
      */
-    public CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions,
-            Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, MeasureDesc measureDesc) {
+    public CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions, Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, MeasureDesc measureDesc) {
         return null;
     }
 
@@ -143,8 +142,7 @@ abstract public class MeasureType<T> implements java.io.Serializable {
     }
 
     /** The advanced filling mode, multiple tuples per storage record. */
-    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo returnTupleInfo,
-            Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo returnTupleInfo, Map<TblColRef, Dictionary<String>> dictionaryMap) {
         throw new UnsupportedOperationException();
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java
index 62cb003..7f3a5f1 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java
@@ -121,8 +121,7 @@ abstract public class MeasureTypeFactory<T> {
                     logger.info("Checking custom measure types from kylin config: " + customFactory);
                     factoryInsts.add((MeasureTypeFactory<?>) Class.forName(customFactory).newInstance());
                 } catch (Exception e) {
-                    throw new IllegalArgumentException("Unrecognized MeasureTypeFactory classname: " + customFactory,
-                            e);
+                    throw new IllegalArgumentException("Unrecognized MeasureTypeFactory classname: " + customFactory, e);
                 }
             }
         } catch (KylinConfigCannotInitException e) {
@@ -133,12 +132,10 @@ abstract public class MeasureTypeFactory<T> {
         for (MeasureTypeFactory<?> factory : factoryInsts) {
             String funcName = factory.getAggrFunctionName();
             if (funcName.equals(funcName.toUpperCase()) == false)
-                throw new IllegalArgumentException(
-                        "Aggregation function name '" + funcName + "' must be in upper case");
+                throw new IllegalArgumentException("Aggregation function name '" + funcName + "' must be in upper case");
             String dataTypeName = factory.getAggrDataTypeName();
             if (dataTypeName.equals(dataTypeName.toLowerCase()) == false)
-                throw new IllegalArgumentException(
-                        "Aggregation data type name '" + dataTypeName + "' must be in lower case");
+                throw new IllegalArgumentException("Aggregation data type name '" + dataTypeName + "' must be in lower case");
             Class<? extends DataTypeSerializer<?>> serializer = factory.getAggrDataTypeSerializer();
 
             logger.info("registering " + funcName + "(" + dataTypeName + "), " + factory.getClass());
@@ -156,8 +153,7 @@ abstract public class MeasureTypeFactory<T> {
     }
 
     private static void registerUDAF(MeasureTypeFactory<?> factory) {
-        MeasureType<?> type = factory.createMeasureType(factory.getAggrFunctionName(),
-                DataType.getType(factory.getAggrDataTypeName()));
+        MeasureType<?> type = factory.createMeasureType(factory.getAggrFunctionName(), DataType.getType(factory.getAggrDataTypeName()));
         Map<String, Class<?>> udafs = type.getRewriteCalciteAggrFunctions();
         if (type.needRewrite() == false || udafs == null)
             return;
@@ -168,8 +164,7 @@ abstract public class MeasureTypeFactory<T> {
                 continue; // skip built-in function
 
             if (udafFactories.containsKey(udaf))
-                throw new IllegalStateException(
-                        "UDAF '" + udaf + "' was dup declared by " + udafFactories.get(udaf) + " and " + factory);
+                throw new IllegalStateException("UDAF '" + udaf + "' was dup declared by " + udafFactories.get(udaf) + " and " + factory);
 
             udafFactories.put(udaf, factory);
             udafMap.put(udaf, udafs.get(udaf));
@@ -191,8 +186,7 @@ abstract public class MeasureTypeFactory<T> {
     public static MeasureType<?> createNoRewriteFieldsMeasureType(String funcName, DataType dataType) {
         // currently only has DimCountDistinctAgg
         if (funcName.equalsIgnoreCase(FunctionDesc.FUNC_COUNT_DISTINCT)) {
-            return new DimCountDistinctMeasureType.DimCountDistinctMeasureTypeFactory().createMeasureType(funcName,
-                    dataType);
+            return new DimCountDistinctMeasureType.DimCountDistinctMeasureTypeFactory().createMeasureType(funcName, dataType);
         }
 
         throw new UnsupportedOperationException("No measure type found.");
@@ -234,8 +228,7 @@ abstract public class MeasureTypeFactory<T> {
                 if (needRewrite == null)
                     needRewrite = Boolean.valueOf(b);
                 else if (needRewrite.booleanValue() != b)
-                    throw new IllegalStateException(
-                            "needRewrite() of factorys " + factory + " does not have consensus");
+                    throw new IllegalStateException("needRewrite() of factorys " + factory + " does not have consensus");
             }
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/basic/BasicMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/basic/BasicMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/basic/BasicMeasureType.java
index 114aa8e..ed493a1 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/basic/BasicMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/basic/BasicMeasureType.java
@@ -67,18 +67,15 @@ public class BasicMeasureType extends MeasureType {
 
         if (funcName.equals(FunctionDesc.FUNC_SUM)) {
             if (rtype.isNumberFamily() == false) {
-                throw new IllegalArgumentException(
-                        "Return type for function " + funcName + " must be one of " + DataType.NUMBER_FAMILY);
+                throw new IllegalArgumentException("Return type for function " + funcName + " must be one of " + DataType.NUMBER_FAMILY);
             }
         } else if (funcName.equals(FunctionDesc.FUNC_COUNT)) {
             if (rtype.isIntegerFamily() == false) {
-                throw new IllegalArgumentException(
-                        "Return type for function " + funcName + " must be one of " + DataType.INTEGER_FAMILY);
+                throw new IllegalArgumentException("Return type for function " + funcName + " must be one of " + DataType.INTEGER_FAMILY);
             }
         } else if (funcName.equals(FunctionDesc.FUNC_MAX) || funcName.equals(FunctionDesc.FUNC_MIN)) {
             if (rtype.isNumberFamily() == false) {
-                throw new IllegalArgumentException(
-                        "Return type for function " + funcName + " must be one of " + DataType.NUMBER_FAMILY);
+                throw new IllegalArgumentException("Return type for function " + funcName + " must be one of " + DataType.NUMBER_FAMILY);
             }
         } else {
             KylinConfig config = KylinConfig.getInstanceFromEnv();
@@ -123,8 +120,7 @@ public class BasicMeasureType extends MeasureType {
             else if (dataType.isNumberFamily())
                 return new DoubleMinAggregator();
         }
-        throw new IllegalArgumentException(
-                "No aggregator for func '" + funcName + "' and return type '" + dataType + "'");
+        throw new IllegalArgumentException("No aggregator for func '" + funcName + "' and return type '" + dataType + "'");
     }
 
     private boolean isSum() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/basic/BigDecimalIngester.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/basic/BigDecimalIngester.java b/core-metadata/src/main/java/org/apache/kylin/measure/basic/BigDecimalIngester.java
index 6391a14..c7541ab 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/basic/BigDecimalIngester.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/basic/BigDecimalIngester.java
@@ -29,8 +29,7 @@ import org.apache.kylin.metadata.model.TblColRef;
 public class BigDecimalIngester extends MeasureIngester<BigDecimal> {
 
     @Override
-    public BigDecimal valueOf(String[] values, MeasureDesc measureDesc,
-            Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    public BigDecimal valueOf(String[] values, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
         if (values.length > 1)
             throw new IllegalArgumentException();
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapIntersectDistinctCountAggFunc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapIntersectDistinctCountAggFunc.java b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapIntersectDistinctCountAggFunc.java
index 033c0ef..a1e2665 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapIntersectDistinctCountAggFunc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapIntersectDistinctCountAggFunc.java
@@ -17,12 +17,12 @@
 */
 package org.apache.kylin.measure.bitmap;
 
+import org.apache.kylin.measure.ParamAsMeasureCount;
+
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.kylin.measure.ParamAsMeasureCount;
-
 /**
  * BitmapIntersectDistinctCountAggFunc is an UDAF used for calculating the intersection of two or more bitmaps
  * Usage:   intersect_count(columnToCount, columnToFilter, filterList)
@@ -99,3 +99,4 @@ public class BitmapIntersectDistinctCountAggFunc implements ParamAsMeasureCount
         return result.result();
     }
 }
+

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapMeasureType.java
index d603116..e4fb079 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapMeasureType.java
@@ -80,14 +80,13 @@ public class BitmapMeasureType extends MeasureType<BitmapCounter> {
     @Override
     public void validate(FunctionDesc functionDesc) throws IllegalArgumentException {
         checkArgument(FUNC_COUNT_DISTINCT.equals(functionDesc.getExpression()),
-                "BitmapMeasureType only support function %s, got %s", FUNC_COUNT_DISTINCT,
-                functionDesc.getExpression());
-        checkArgument(functionDesc.getParameterCount() == 1, "BitmapMeasureType only support 1 parameter, got %d",
-                functionDesc.getParameterCount());
+                "BitmapMeasureType only support function %s, got %s", FUNC_COUNT_DISTINCT, functionDesc.getExpression());
+        checkArgument(functionDesc.getParameterCount() == 1,
+                "BitmapMeasureType only support 1 parameter, got %d", functionDesc.getParameterCount());
 
         String returnType = functionDesc.getReturnDataType().getName();
-        checkArgument(DATATYPE_BITMAP.equals(returnType), "BitmapMeasureType's return type must be %s, got %s",
-                DATATYPE_BITMAP, returnType);
+        checkArgument(DATATYPE_BITMAP.equals(returnType),
+                "BitmapMeasureType's return type must be %s, got %s", DATATYPE_BITMAP, returnType);
     }
 
     @Override
@@ -103,8 +102,7 @@ public class BitmapMeasureType extends MeasureType<BitmapCounter> {
             BitmapCounter current = factory.newBitmap();
 
             @Override
-            public BitmapCounter valueOf(String[] values, MeasureDesc measureDesc,
-                    Map<TblColRef, Dictionary<String>> dictionaryMap) {
+            public BitmapCounter valueOf(String[] values, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
                 checkArgument(values.length == 1, "expect 1 value, got %s", Arrays.toString(values));
 
                 current.clear();
@@ -127,8 +125,7 @@ public class BitmapMeasureType extends MeasureType<BitmapCounter> {
             }
 
             @Override
-            public BitmapCounter reEncodeDictionary(BitmapCounter value, MeasureDesc measureDesc,
-                    Map<TblColRef, Dictionary<String>> oldDicts, Map<TblColRef, Dictionary<String>> newDicts) {
+            public BitmapCounter reEncodeDictionary(BitmapCounter value, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> oldDicts, Map<TblColRef, Dictionary<String>> newDicts) {
                 if (!needDictionaryColumn(measureDesc.getFunction())) {
                     return value;
                 }
@@ -185,7 +182,8 @@ public class BitmapMeasureType extends MeasureType<BitmapCounter> {
         return true;
     }
 
-    static final Map<String, Class<?>> UDAF_MAP = ImmutableMap.of(FUNC_COUNT_DISTINCT, BitmapDistinctCountAggFunc.class,
+    static final Map<String, Class<?>> UDAF_MAP = ImmutableMap.of(
+            FUNC_COUNT_DISTINCT, BitmapDistinctCountAggFunc.class,
             FUNC_INTERSECT_COUNT_DISTINCT, BitmapIntersectDistinctCountAggFunc.class);
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapSerializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapSerializer.java b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapSerializer.java
index d990893..c1b260d 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/BitmapSerializer.java
@@ -18,12 +18,12 @@
 
 package org.apache.kylin.measure.bitmap;
 
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
 import org.apache.kylin.metadata.datatype.DataType;
 import org.apache.kylin.metadata.datatype.DataTypeSerializer;
 
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
 public class BitmapSerializer extends DataTypeSerializer<BitmapCounter> {
     private static final BitmapCounterFactory factory = RoaringBitmapCounterFactory.INSTANCE;
     private static final BitmapCounter DELEGATE = factory.newBitmap();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounter.java b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounter.java
index 47571ad..eec45f2 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounter.java
@@ -18,6 +18,10 @@
 
 package org.apache.kylin.measure.bitmap;
 
+import org.apache.kylin.common.util.ByteBufferOutputStream;
+import org.roaringbitmap.buffer.ImmutableRoaringBitmap;
+import org.roaringbitmap.buffer.MutableRoaringBitmap;
+
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.Serializable;
@@ -25,10 +29,6 @@ import java.nio.BufferOverflowException;
 import java.nio.ByteBuffer;
 import java.util.Iterator;
 
-import org.apache.kylin.common.util.ByteBufferOutputStream;
-import org.roaringbitmap.buffer.ImmutableRoaringBitmap;
-import org.roaringbitmap.buffer.MutableRoaringBitmap;
-
 /**
  * A {@link BitmapCounter} based on roaring bitmap.
  */
@@ -134,7 +134,8 @@ public class RoaringBitmapCounter implements BitmapCounter, Serializable {
 
     @Override
     public boolean equals(Object obj) {
-        return (obj instanceof RoaringBitmapCounter) && bitmap.equals(((RoaringBitmapCounter) obj).bitmap);
+        return (obj instanceof RoaringBitmapCounter) &&
+                bitmap.equals(((RoaringBitmapCounter) obj).bitmap);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounterFactory.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounterFactory.java b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounterFactory.java
index a4ad199..822afa2 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounterFactory.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/bitmap/RoaringBitmapCounterFactory.java
@@ -18,17 +18,16 @@
 
 package org.apache.kylin.measure.bitmap;
 
+import org.roaringbitmap.buffer.MutableRoaringBitmap;
+
 import java.io.IOException;
 import java.io.Serializable;
 import java.nio.ByteBuffer;
 
-import org.roaringbitmap.buffer.MutableRoaringBitmap;
-
 public class RoaringBitmapCounterFactory implements BitmapCounterFactory, Serializable {
     public static final BitmapCounterFactory INSTANCE = new RoaringBitmapCounterFactory();
 
-    private RoaringBitmapCounterFactory() {
-    }
+    private RoaringBitmapCounterFactory() {}
 
     @Override
     public BitmapCounter newBitmap() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/dim/DimCountDistinctMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/dim/DimCountDistinctMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/dim/DimCountDistinctMeasureType.java
index f3342cd..0b3fd94 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/dim/DimCountDistinctMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/dim/DimCountDistinctMeasureType.java
@@ -80,8 +80,7 @@ public class DimCountDistinctMeasureType extends MeasureType<Object> {
         return false;
     }
 
-    static final Map<String, Class<?>> UDAF_MAP = ImmutableMap.<String, Class<?>> of(FunctionDesc.FUNC_COUNT_DISTINCT,
-            DimCountDistinctAggFunc.class);
+    static final Map<String, Class<?>> UDAF_MAP = ImmutableMap.<String, Class<?>> of(FunctionDesc.FUNC_COUNT_DISTINCT, DimCountDistinctAggFunc.class);
 
     @Override
     public Map<String, Class<?>> getRewriteCalciteAggrFunctions() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
index 29090c7..de5ee25 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
@@ -111,8 +111,7 @@ public class ExtendedColumnMeasureType extends MeasureType<ByteArray> {
     }
 
     @Override
-    public CapabilityResult.CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions,
-            Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, MeasureDesc measureDesc) {
+    public CapabilityResult.CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions, Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, MeasureDesc measureDesc) {
         TblColRef extendedCol = getExtendedColumn(measureDesc.getFunction());
 
         if (!unmatchedDimensions.contains(extendedCol)) {
@@ -137,11 +136,9 @@ public class ExtendedColumnMeasureType extends MeasureType<ByteArray> {
         return true;
     }
 
-    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo returnTupleInfo,
-            Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo returnTupleInfo, Map<TblColRef, Dictionary<String>> dictionaryMap) {
         final TblColRef extended = getExtendedColumn(function);
-        final int extendedColumnInTupleIdx = returnTupleInfo.hasColumn(extended)
-                ? returnTupleInfo.getColumnIndex(extended) : -1;
+        final int extendedColumnInTupleIdx = returnTupleInfo.hasColumn(extended) ? returnTupleInfo.getColumnIndex(extended) : -1;
 
         if (extendedColumnInTupleIdx == -1) {
             throw new RuntimeException("Extended column is not required in returnTupleInfo");
@@ -211,8 +208,7 @@ public class ExtendedColumnMeasureType extends MeasureType<ByteArray> {
             }
 
             @Override
-            public ByteArray valueOf(String[] values, MeasureDesc measureDesc,
-                    Map<TblColRef, Dictionary<String>> dictionaryMap) {
+            public ByteArray valueOf(String[] values, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
                 if (values.length <= 1)
                     throw new IllegalArgumentException();
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCMeasureType.java
index 8718869..51c5a66 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCMeasureType.java
@@ -95,12 +95,11 @@ public class HLLCMeasureType extends MeasureType<HLLCounter> {
     public MeasureIngester<HLLCounter> newIngester() {
         return new MeasureIngester<HLLCounter>() {
             private static final long serialVersionUID = 1L;
-
+            
             HLLCounter current = new HLLCounter(dataType.getPrecision());
 
             @Override
-            public HLLCounter valueOf(String[] values, MeasureDesc measureDesc,
-                    Map<TblColRef, Dictionary<String>> dictionaryMap) {
+            public HLLCounter valueOf(String[] values, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
                 HLLCounter hllc = current;
                 hllc.clear();
                 if (values.length == 1) {
@@ -136,9 +135,8 @@ public class HLLCMeasureType extends MeasureType<HLLCounter> {
         return true;
     }
 
-    static final Map<String, Class<?>> UDAF_MAP = ImmutableMap.<String, Class<?>> of(FUNC_COUNT_DISTINCT,
-            HLLDistinctCountAggFunc.class);
-
+    static final Map<String, Class<?>> UDAF_MAP = ImmutableMap.<String, Class<?>> of(FUNC_COUNT_DISTINCT, HLLDistinctCountAggFunc.class);
+    
     @Override
     public Map<String, Class<?>> getRewriteCalciteAggrFunctions() {
         return UDAF_MAP;
@@ -147,5 +145,5 @@ public class HLLCMeasureType extends MeasureType<HLLCounter> {
     public static boolean isCountDistinct(FunctionDesc func) {
         return FUNC_COUNT_DISTINCT.equalsIgnoreCase(func.getExpression());
     }
-
+    
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCSerializer.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCSerializer.java b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCSerializer.java
index 8735ccb..df0cfaf 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCSerializer.java
@@ -18,12 +18,12 @@
 
 package org.apache.kylin.measure.hllc;
 
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
 import org.apache.kylin.metadata.datatype.DataType;
 import org.apache.kylin.metadata.datatype.DataTypeSerializer;
 
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
 /**
  * @author yangli9
  * 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCounter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCounter.java b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCounter.java
index 7ef06d4..b793465 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCounter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCounter.java
@@ -18,6 +18,10 @@
 
 package org.apache.kylin.measure.hllc;
 
+import com.google.common.hash.HashFunction;
+import com.google.common.hash.Hashing;
+import org.apache.kylin.common.util.BytesUtil;
+
 import java.io.IOException;
 import java.io.Serializable;
 import java.nio.ByteBuffer;
@@ -25,11 +29,6 @@ import java.nio.charset.Charset;
 import java.util.Collection;
 import java.util.Map;
 
-import org.apache.kylin.common.util.BytesUtil;
-
-import com.google.common.hash.HashFunction;
-import com.google.common.hash.Hashing;
-
 @SuppressWarnings("serial")
 public class HLLCounter implements Serializable, Comparable<HLLCounter> {
 
@@ -100,7 +99,7 @@ public class HLLCounter implements Serializable, Comparable<HLLCounter> {
         add(hashFunc.hashBytes(value, offset, length).asLong());
     }
 
-    public void addHashDirectly(long hash) {
+    public void addHashDirectly(long hash){
         add(hash);
     }
 
@@ -142,36 +141,36 @@ public class HLLCounter implements Serializable, Comparable<HLLCounter> {
         assert this.p == another.p;
         assert this.hashFunc == another.hashFunc;
         switch (register.getRegisterType()) {
-        case SINGLE_VALUE:
-            switch (another.getRegisterType()) {
             case SINGLE_VALUE:
-                if (register.getSize() > 0 && another.register.getSize() > 0) {
-                    register = ((SingleValueRegister) register).toSparse();
-                } else {
-                    SingleValueRegister sr = (SingleValueRegister) another.register;
-                    if (sr.getSize() > 0)
-                        register.set(sr.getSingleValuePos(), sr.getValue());
-                    return;
+                switch (another.getRegisterType()) {
+                    case SINGLE_VALUE:
+                        if (register.getSize() > 0 && another.register.getSize() > 0) {
+                            register = ((SingleValueRegister) register).toSparse();
+                        } else {
+                            SingleValueRegister sr = (SingleValueRegister) another.register;
+                            if (sr.getSize() > 0)
+                                register.set(sr.getSingleValuePos(), sr.getValue());
+                            return;
+                        }
+                        break;
+                    case SPARSE:
+                        register = ((SingleValueRegister) register).toSparse();
+                        break;
+                    case DENSE:
+                        register = ((SingleValueRegister) register).toDense(this.p);
+                        break;
+                    default:
+                        break;
                 }
+
                 break;
             case SPARSE:
-                register = ((SingleValueRegister) register).toSparse();
-                break;
-            case DENSE:
-                register = ((SingleValueRegister) register).toDense(this.p);
+                if (another.getRegisterType() == RegisterType.DENSE) {
+                    register = ((SparseRegister) register).toDense(p);
+                }
                 break;
             default:
                 break;
-            }
-
-            break;
-        case SPARSE:
-            if (another.getRegisterType() == RegisterType.DENSE) {
-                register = ((SparseRegister) register).toDense(p);
-            }
-            break;
-        default:
-            break;
         }
         register.merge(another.register);
         toDenseIfNeeded();
@@ -253,8 +252,7 @@ public class HLLCounter implements Serializable, Comparable<HLLCounter> {
             double er2 = Math.round(rate * 2 * 10000) / 100D;
             double er3 = Math.round(rate * 3 * 10000) / 100D;
             long size = Math.round(Math.pow(2, p));
-            System.out.println("HLLC" + p + ",\t" + size + " bytes,\t68% err<" + er + "%" + ",\t95% err<" + er2 + "%"
-                    + ",\t99.7% err<" + er3 + "%");
+            System.out.println("HLLC" + p + ",\t" + size + " bytes,\t68% err<" + er + "%" + ",\t95% err<" + er2 + "%" + ",\t99.7% err<" + er3 + "%");
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCounterOld.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCounterOld.java b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCounterOld.java
index 494e173..5cbdd43 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCounterOld.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLCounterOld.java
@@ -294,7 +294,7 @@ public class HLLCounterOld implements Serializable, Comparable<HLLCounterOld> {
     /*public void writeRegistersArray(final ByteBuffer out) {
         out.put(this.registers);
     }
-    
+
     public void readRegistersArray(ByteBuffer in) {
         in.get(registers, 0, m);
         singleBucket = Integer.MIN_VALUE;
@@ -362,8 +362,7 @@ public class HLLCounterOld implements Serializable, Comparable<HLLCounterOld> {
             double er2 = Math.round(rate * 2 * 10000) / 100D;
             double er3 = Math.round(rate * 3 * 10000) / 100D;
             long size = Math.round(Math.pow(2, p));
-            System.out.println("HLLC" + p + ",\t" + size + " bytes,\t68% err<" + er + "%" + ",\t95% err<" + er2 + "%"
-                    + ",\t99.7% err<" + er3 + "%");
+            System.out.println("HLLC" + p + ",\t" + size + " bytes,\t68% err<" + er + "%" + ",\t95% err<" + er2 + "%" + ",\t99.7% err<" + er3 + "%");
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLDistinctCountAggFunc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLDistinctCountAggFunc.java b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLDistinctCountAggFunc.java
index 7c062f8..c635cd6 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLDistinctCountAggFunc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/hllc/HLLDistinctCountAggFunc.java
@@ -91,8 +91,7 @@ public class HLLDistinctCountAggFunc {
             } else {
                 long oldValue = Math.abs(this.value.longValue());
                 long take = Math.max(oldValue, value);
-                logger.warn("Error to aggregate holistic count distinct, old value " + oldValue + ", new value " + value
-                        + ", taking " + take);
+                logger.warn("Error to aggregate holistic count distinct, old value " + oldValue + ", new value " + value + ", taking " + take);
                 this.value = -take; // make it obvious that this value is wrong
             }
         }


[66/67] [abbrv] kylin git commit: KYLIN-216 Update draft

Posted by li...@apache.org.
KYLIN-216 Update draft


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/25a53673
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/25a53673
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/25a53673

Branch: refs/heads/master
Commit: 25a536734fc46ad71cceae24289733e90d214793
Parents: b6b71e8
Author: Luwei-Chen <ch...@apache.org>
Authored: Mon Jun 5 11:41:22 2017 +0800
Committer: liyang-gmt8 <li...@apache.org>
Committed: Mon Jun 5 13:16:15 2017 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/cube/CubeManager.java | 104 ++++++++++++-------
 .../rest/controller2/CubeControllerV2.java      |  16 ---
 .../apache/kylin/rest/service/CubeService.java  |  78 +++++++++-----
 3 files changed, 121 insertions(+), 77 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/25a53673/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index 8546f76..32e2316 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -109,7 +109,8 @@ public class CubeManager implements IRealizationProvider {
                 if (CACHE.size() > 1) {
                     logger.warn("More than one singleton exist");
                     for (KylinConfig kylinConfig : CACHE.keySet()) {
-                        logger.warn("type: " + kylinConfig.getClass() + " reference: " + System.identityHashCode(kylinConfig.base()));
+                        logger.warn("type: " + kylinConfig.getClass() + " reference: "
+                                + System.identityHashCode(kylinConfig.base()));
                     }
                 }
                 return r;
@@ -160,7 +161,8 @@ public class CubeManager implements IRealizationProvider {
         }
 
         @Override
-        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey) throws IOException {
+        public void onEntityChange(Broadcaster broadcaster, String entity, Event event, String cacheKey)
+                throws IOException {
             String cubeName = cacheKey;
 
             if (event == Event.DROP)
@@ -168,7 +170,8 @@ public class CubeManager implements IRealizationProvider {
             else
                 reloadCubeLocal(cubeName);
 
-            for (ProjectInstance prj : ProjectManager.getInstance(config).findProjects(RealizationType.CUBE, cubeName)) {
+            for (ProjectInstance prj : ProjectManager.getInstance(config).findProjects(RealizationType.CUBE,
+                    cubeName)) {
                 broadcaster.notifyProjectDataUpdate(prj.getName());
             }
         }
@@ -214,19 +217,22 @@ public class CubeManager implements IRealizationProvider {
         return result;
     }
 
-    public DictionaryInfo buildDictionary(CubeSegment cubeSeg, TblColRef col, IReadableTable inpTable) throws IOException {
+    public DictionaryInfo buildDictionary(CubeSegment cubeSeg, TblColRef col, IReadableTable inpTable)
+            throws IOException {
         CubeDesc cubeDesc = cubeSeg.getCubeDesc();
         if (!cubeDesc.getAllColumnsNeedDictionaryBuilt().contains(col))
             return null;
 
         String builderClass = cubeDesc.getDictionaryBuilderClass(col);
-        DictionaryInfo dictInfo = getDictionaryManager().buildDictionary(cubeDesc.getModel(), col, inpTable, builderClass);
+        DictionaryInfo dictInfo = getDictionaryManager().buildDictionary(cubeDesc.getModel(), col, inpTable,
+                builderClass);
 
         saveDictionaryInfo(cubeSeg, col, dictInfo);
         return dictInfo;
     }
 
-    public DictionaryInfo saveDictionary(CubeSegment cubeSeg, TblColRef col, IReadableTable inpTable, Dictionary<String> dict) throws IOException {
+    public DictionaryInfo saveDictionary(CubeSegment cubeSeg, TblColRef col, IReadableTable inpTable,
+            Dictionary<String> dict) throws IOException {
         CubeDesc cubeDesc = cubeSeg.getCubeDesc();
         if (!cubeDesc.getAllColumnsNeedDictionaryBuilt().contains(col))
             return null;
@@ -263,7 +269,8 @@ public class CubeManager implements IRealizationProvider {
 
             info = dictMgr.getDictionaryInfo(dictResPath);
             if (info == null)
-                throw new IllegalStateException("No dictionary found by " + dictResPath + ", invalid cube state; cube segment" + cubeSeg + ", col " + col);
+                throw new IllegalStateException("No dictionary found by " + dictResPath
+                        + ", invalid cube state; cube segment" + cubeSeg + ", col " + col);
         } catch (IOException e) {
             throw new IllegalStateException("Failed to get dictionary for cube segment" + cubeSeg + ", col" + col, e);
         }
@@ -318,7 +325,8 @@ public class CubeManager implements IRealizationProvider {
     }
 
     // sync on update
-    public CubeInstance createCube(String cubeName, String projectName, CubeDesc desc, String owner) throws IOException {
+    public CubeInstance createCube(String cubeName, String projectName, CubeDesc desc, String owner)
+            throws IOException {
         logger.info("Creating cube '" + projectName + "-->" + cubeName + "' from desc '" + desc.getName() + "'");
 
         // save cube resource
@@ -326,9 +334,7 @@ public class CubeManager implements IRealizationProvider {
         cube.setOwner(owner);
 
         updateCubeWithRetry(new CubeUpdate(cube), 0);
-        if (!desc.isDraft()) {
-            ProjectManager.getInstance(config).moveRealizationToProject(RealizationType.CUBE, cubeName, projectName, owner);
-        }
+        ProjectManager.getInstance(config).moveRealizationToProject(RealizationType.CUBE, cubeName, projectName, owner);
 
         if (listener != null)
             listener.afterCubeCreate(cube);
@@ -343,7 +349,8 @@ public class CubeManager implements IRealizationProvider {
         cube.setOwner(owner);
 
         updateCubeWithRetry(new CubeUpdate(cube), 0);
-        ProjectManager.getInstance(config).moveRealizationToProject(RealizationType.CUBE, cube.getName(), projectName, owner);
+        ProjectManager.getInstance(config).moveRealizationToProject(RealizationType.CUBE, cube.getName(), projectName,
+                owner);
 
         if (listener != null)
             listener.afterCubeCreate(cube);
@@ -458,12 +465,16 @@ public class CubeManager implements IRealizationProvider {
     }
 
     public CubeSegment appendSegment(CubeInstance cube, SourcePartition sourcePartition) throws IOException {
-        return appendSegment(cube, sourcePartition.getStartDate(), sourcePartition.getEndDate(), sourcePartition.getStartOffset(), sourcePartition.getEndOffset(), sourcePartition.getSourcePartitionOffsetStart(), sourcePartition.getSourcePartitionOffsetEnd());
+        return appendSegment(cube, sourcePartition.getStartDate(), sourcePartition.getEndDate(),
+                sourcePartition.getStartOffset(), sourcePartition.getEndOffset(),
+                sourcePartition.getSourcePartitionOffsetStart(), sourcePartition.getSourcePartitionOffsetEnd());
     }
 
-    CubeSegment appendSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd) throws IOException {
+    CubeSegment appendSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset,
+            Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd)
+            throws IOException {
         checkBuildingSegment(cube);
-        
+
         // fix start/end a bit
         if (cube.getModel().getPartitionDesc().isPartitioned()) {
             // if missing start, set it to where last time ends
@@ -489,19 +500,22 @@ public class CubeManager implements IRealizationProvider {
         return newSegment;
     }
 
-    public CubeSegment refreshSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset) throws IOException {
+    public CubeSegment refreshSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset)
+            throws IOException {
         checkBuildingSegment(cube);
 
         CubeSegment newSegment = newSegment(cube, startDate, endDate, startOffset, endOffset);
 
         Pair<Boolean, Boolean> pair = CubeValidator.fitInSegments(cube.getSegments(), newSegment);
         if (pair.getFirst() == false || pair.getSecond() == false)
-            throw new IllegalArgumentException("The new refreshing segment " + newSegment + " does not match any existing segment in cube " + cube);
+            throw new IllegalArgumentException("The new refreshing segment " + newSegment
+                    + " does not match any existing segment in cube " + cube);
 
         if (startOffset > 0 || endOffset > 0) {
             CubeSegment toRefreshSeg = null;
             for (CubeSegment cubeSegment : cube.getSegments()) {
-                if (cubeSegment.getSourceOffsetStart() == startOffset && cubeSegment.getSourceOffsetEnd() == endOffset) {
+                if (cubeSegment.getSourceOffsetStart() == startOffset
+                        && cubeSegment.getSourceOffsetEnd() == endOffset) {
                     toRefreshSeg = cubeSegment;
                     break;
                 }
@@ -522,7 +536,8 @@ public class CubeManager implements IRealizationProvider {
         return newSegment;
     }
 
-    public CubeSegment mergeSegments(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, boolean force) throws IOException {
+    public CubeSegment mergeSegments(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset,
+            boolean force) throws IOException {
         if (cube.getSegments().isEmpty())
             throw new IllegalArgumentException("Cube " + cube + " has no segments");
         if (startDate >= endDate && startOffset >= endOffset)
@@ -536,9 +551,11 @@ public class CubeManager implements IRealizationProvider {
         if (isOffsetsOn) {
             // offset cube, merge by date range?
             if (startOffset == endOffset) {
-                Pair<CubeSegment, CubeSegment> pair = cube.getSegments(SegmentStatusEnum.READY).findMergeOffsetsByDateRange(startDate, endDate, Long.MAX_VALUE);
+                Pair<CubeSegment, CubeSegment> pair = cube.getSegments(SegmentStatusEnum.READY)
+                        .findMergeOffsetsByDateRange(startDate, endDate, Long.MAX_VALUE);
                 if (pair == null)
-                    throw new IllegalArgumentException("Find no segments to merge by date range " + startDate + "-" + endDate + " for cube " + cube);
+                    throw new IllegalArgumentException("Find no segments to merge by date range " + startDate + "-"
+                            + endDate + " for cube " + cube);
                 startOffset = pair.getFirst().getSourceOffsetStart();
                 endOffset = pair.getSecond().getSourceOffsetEnd();
             }
@@ -558,7 +575,9 @@ public class CubeManager implements IRealizationProvider {
 
         List<CubeSegment> mergingSegments = cube.getMergingSegments(newSegment);
         if (mergingSegments.size() <= 1)
-            throw new IllegalArgumentException("Range " + newSegment.getSourceOffsetStart() + "-" + newSegment.getSourceOffsetEnd() + " must contain at least 2 segments, but there is " + mergingSegments.size());
+            throw new IllegalArgumentException(
+                    "Range " + newSegment.getSourceOffsetStart() + "-" + newSegment.getSourceOffsetEnd()
+                            + " must contain at least 2 segments, but there is " + mergingSegments.size());
 
         CubeSegment first = mergingSegments.get(0);
         CubeSegment last = mergingSegments.get(mergingSegments.size() - 1);
@@ -583,7 +602,9 @@ public class CubeManager implements IRealizationProvider {
             }
 
             if (emptySegment.size() > 0) {
-                throw new IllegalArgumentException("Empty cube segment found, couldn't merge unless 'forceMergeEmptySegment' set to true: " + emptySegment);
+                throw new IllegalArgumentException(
+                        "Empty cube segment found, couldn't merge unless 'forceMergeEmptySegment' set to true: "
+                                + emptySegment);
             }
         }
 
@@ -622,13 +643,15 @@ public class CubeManager implements IRealizationProvider {
     private void checkBuildingSegment(CubeInstance cube) {
         int maxBuldingSeg = cube.getConfig().getMaxBuildingSegments();
         if (cube.getBuildingSegments().size() >= maxBuldingSeg) {
-            throw new IllegalStateException("There is already " + cube.getBuildingSegments().size() + " building segment; ");
+            throw new IllegalStateException(
+                    "There is already " + cube.getBuildingSegments().size() + " building segment; ");
         }
     }
 
     private void checkCubeIsPartitioned(CubeInstance cube) {
         if (cube.getDescriptor().getModel().getPartitionDesc().isPartitioned() == false) {
-            throw new IllegalStateException("there is no partition date column specified, only full build is supported");
+            throw new IllegalStateException(
+                    "there is no partition date column specified, only full build is supported");
         }
     }
 
@@ -652,14 +675,16 @@ public class CubeManager implements IRealizationProvider {
         String[] pkCols = join.getPrimaryKey();
         String snapshotResPath = cubeSegment.getSnapshotResPath(tableName);
         if (snapshotResPath == null)
-            throw new IllegalStateException("No snaphot for table '" + tableName + "' found on cube segment" + cubeSegment.getCubeInstance().getName() + "/" + cubeSegment);
+            throw new IllegalStateException("No snaphot for table '" + tableName + "' found on cube segment"
+                    + cubeSegment.getCubeInstance().getName() + "/" + cubeSegment);
 
         try {
             SnapshotTable snapshot = getSnapshotManager().getSnapshotTable(snapshotResPath);
             TableDesc tableDesc = getMetadataManager().getTableDesc(tableName);
             return new LookupStringTable(tableDesc, pkCols, snapshot);
         } catch (IOException e) {
-            throw new IllegalStateException("Failed to load lookup table " + tableName + " from snapshot " + snapshotResPath, e);
+            throw new IllegalStateException(
+                    "Failed to load lookup table " + tableName + " from snapshot " + snapshotResPath, e);
         }
     }
 
@@ -703,7 +728,8 @@ public class CubeManager implements IRealizationProvider {
 
     public void promoteNewlyBuiltSegments(CubeInstance cube, CubeSegment newSegment) throws IOException {
         if (StringUtils.isBlank(newSegment.getStorageLocationIdentifier()))
-            throw new IllegalStateException("For cube " + cube + ", segment " + newSegment + " missing StorageLocationIdentifier");
+            throw new IllegalStateException(
+                    "For cube " + cube + ", segment " + newSegment + " missing StorageLocationIdentifier");
 
         if (StringUtils.isBlank(newSegment.getLastBuildJobID()))
             throw new IllegalStateException("For cube " + cube + ", segment " + newSegment + " missing LastBuildJobID");
@@ -715,7 +741,8 @@ public class CubeManager implements IRealizationProvider {
         List<CubeSegment> tobe = cube.calculateToBeSegments(newSegment);
 
         if (tobe.contains(newSegment) == false)
-            throw new IllegalStateException("For cube " + cube + ", segment " + newSegment + " is expected but not in the tobe " + tobe);
+            throw new IllegalStateException(
+                    "For cube " + cube + ", segment " + newSegment + " is expected but not in the tobe " + tobe);
 
         newSegment.setStatus(SegmentStatusEnum.READY);
 
@@ -728,7 +755,8 @@ public class CubeManager implements IRealizationProvider {
         logger.info("Promoting cube " + cube + ", new segment " + newSegment + ", to remove segments " + toRemoveSegs);
 
         CubeUpdate cubeBuilder = new CubeUpdate(cube);
-        cubeBuilder.setToRemoveSegs(toRemoveSegs.toArray(new CubeSegment[toRemoveSegs.size()])).setToUpdateSegs(newSegment).setStatus(RealizationStatusEnum.READY);
+        cubeBuilder.setToRemoveSegs(toRemoveSegs.toArray(new CubeSegment[toRemoveSegs.size()]))
+                .setToUpdateSegs(newSegment).setStatus(RealizationStatusEnum.READY);
         updateCube(cubeBuilder);
     }
 
@@ -736,7 +764,8 @@ public class CubeManager implements IRealizationProvider {
         List<CubeSegment> tobe = cube.calculateToBeSegments(newSegments);
         List<CubeSegment> newList = Arrays.asList(newSegments);
         if (tobe.containsAll(newList) == false) {
-            throw new IllegalStateException("For cube " + cube + ", the new segments " + newList + " do not fit in its current " + cube.getSegments() + "; the resulted tobe is " + tobe);
+            throw new IllegalStateException("For cube " + cube + ", the new segments " + newList
+                    + " do not fit in its current " + cube.getSegments() + "; the resulted tobe is " + tobe);
         }
     }
 
@@ -778,7 +807,9 @@ public class CubeManager implements IRealizationProvider {
             CubeDesc cubeDesc = CubeDescManager.getInstance(config).getCubeDesc(cube.getDescName());
             checkNotNull(cubeDesc, "cube descriptor '%s' (for cube '%s') not found", cube.getDescName(), cubeName);
             if (!isSpecialTestCube(cubeName))
-                checkState(cubeDesc.getName().equals(cubeName), "cube name '%s' must be same as descriptor name '%s', but it is not", cubeName, cubeDesc.getName());
+                checkState(cubeDesc.getName().equals(cubeName),
+                        "cube name '%s' must be same as descriptor name '%s', but it is not", cubeName,
+                        cubeDesc.getName());
 
             if (!cubeDesc.getError().isEmpty()) {
                 cube.setStatus(RealizationStatusEnum.DESCBROKEN);
@@ -809,7 +840,8 @@ public class CubeManager implements IRealizationProvider {
 
     private boolean isSpecialTestCube(String cubeName) {
         return cubeName.equals("kylin_sales_cube") //
-                || config.isDevEnv() && (cubeName.startsWith("test_kylin_cube") || cubeName.startsWith("test_streaming"));
+                || config.isDevEnv()
+                        && (cubeName.startsWith("test_kylin_cube") || cubeName.startsWith("test_streaming"));
     }
 
     private MetadataManager getMetadataManager() {
@@ -906,7 +938,8 @@ public class CubeManager implements IRealizationProvider {
                     hole.setDateRangeStart(first.getDateRangeEnd());
                     hole.setDateRangeEnd(second.getDateRangeStart());
                 }
-                hole.setName(CubeSegment.makeSegmentName(hole.getDateRangeStart(), hole.getDateRangeEnd(), hole.getSourceOffsetStart(), hole.getSourceOffsetEnd()));
+                hole.setName(CubeSegment.makeSegmentName(hole.getDateRangeStart(), hole.getDateRangeEnd(),
+                        hole.getSourceOffsetStart(), hole.getSourceOffsetEnd()));
                 holes.add(hole);
             }
         }
@@ -924,7 +957,8 @@ public class CubeManager implements IRealizationProvider {
         List<DictionaryDesc> dictionaryDescList = cubeDesc.getDictionaries();
         if (dictionaryDescList != null) {
             for (DictionaryDesc dictionaryDesc : dictionaryDescList) {
-                if (dictionaryDesc.getBuilderClass() != null && dictionaryDesc.getBuilderClass().equalsIgnoreCase(GLOBAL_DICTIONNARY_CLASS)) {
+                if (dictionaryDesc.getBuilderClass() != null
+                        && dictionaryDesc.getBuilderClass().equalsIgnoreCase(GLOBAL_DICTIONNARY_CLASS)) {
                     for (int i = 0; i < factDictCols.size(); i++) {
                         if (factDictCols.get(i).equals(dictionaryDesc.getColumnRef())) {
                             uhcIndex[i] = 1;

http://git-wip-us.apache.org/repos/asf/kylin/blob/25a53673/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
index 2edfe7d..aac00ec 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/CubeControllerV2.java
@@ -488,22 +488,6 @@ public class CubeControllerV2 extends BasicController {
         return new EnvelopeResponse(ResponseCode.CODE_SUCCESS, cubeService.enableCube(cube), "");
     }
 
-    @RequestMapping(value = "/{cubeName}", method = { RequestMethod.DELETE }, produces = {
-            "application/vnd.apache.kylin-v2+json" })
-    @ResponseBody
-    public void deleteCubeV2(@PathVariable String cubeName) throws IOException {
-        Message msg = MsgPicker.getMsg();
-
-        CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
-        if (null == cube) {
-            throw new BadRequestException(String.format(msg.getCUBE_NOT_FOUND(), cubeName));
-        }
-
-        //drop Cube
-        cubeService.deleteCube(cube);
-
-    }
-
     /**
      * get Hbase Info
      *

http://git-wip-us.apache.org/repos/asf/kylin/blob/25a53673/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 3c0bbc6..ebce61b 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -110,7 +110,8 @@ public class CubeService extends BasicService {
 
         if (modelName != null) {
             for (CubeInstance cubeInstance : cubeInstances) {
-                boolean isCubeMatch = cubeInstance.getDescriptor().getModelName().toLowerCase().equals(modelName.toLowerCase());
+                boolean isCubeMatch = cubeInstance.getDescriptor().getModelName().toLowerCase()
+                        .equals(modelName.toLowerCase());
                 if (isCubeMatch) {
                     filterModelCubes.add(cubeInstance);
                 }
@@ -121,7 +122,8 @@ public class CubeService extends BasicService {
 
         List<CubeInstance> filterCubes = new ArrayList<CubeInstance>();
         for (CubeInstance cubeInstance : filterModelCubes) {
-            boolean isCubeMatch = (null == cubeName) || cubeInstance.getName().toLowerCase().contains(cubeName.toLowerCase());
+            boolean isCubeMatch = (null == cubeName)
+                    || cubeInstance.getName().toLowerCase().contains(cubeName.toLowerCase());
 
             if (isCubeMatch) {
                 filterCubes.add(cubeInstance);
@@ -131,7 +133,8 @@ public class CubeService extends BasicService {
         return filterCubes;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
+            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
     public CubeInstance updateCubeCost(CubeInstance cube, int cost) throws IOException {
 
         if (cube.getCost() == cost) {
@@ -222,7 +225,8 @@ public class CubeService extends BasicService {
             if (projectDataModel.getType() == RealizationType.CUBE) {
                 CubeInstance cube = getCubeManager().getCube(projectDataModel.getRealization());
                 if (cube == null) {
-                    logger.error("Project " + projectName + " contains realization " + projectDataModel.getRealization() + " which is not found by CubeManager");
+                    logger.error("Project " + projectName + " contains realization " + projectDataModel.getRealization()
+                            + " which is not found by CubeManager");
                     continue;
                 }
                 if (cube.equals(target)) {
@@ -233,11 +237,14 @@ public class CubeService extends BasicService {
         return false;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
-    public CubeDesc updateCubeAndDesc(CubeInstance cube, CubeDesc desc, String newProjectName, boolean forceUpdate) throws IOException {
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
+            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
+    public CubeDesc updateCubeAndDesc(CubeInstance cube, CubeDesc desc, String newProjectName, boolean forceUpdate)
+            throws IOException {
         Message msg = MsgPicker.getMsg();
 
-        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING));
+        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null,
+                EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING));
         if (!cubingJobs.isEmpty()) {
             throw new BadRequestException(String.format(msg.getDISCARD_JOB_FIRST(), cube.getName()));
         }
@@ -251,11 +258,15 @@ public class CubeService extends BasicService {
         if (!desc.isDraft()) {
             int cuboidCount = CuboidCLI.simulateCuboidGeneration(updatedCubeDesc, false);
             logger.info("Updated cube " + cube.getName() + " has " + cuboidCount + " cuboids");
+        }
+
+        ProjectManager projectManager = getProjectManager();
+        if (!isCubeInProject(newProjectName, cube)) {
+            String owner = SecurityContextHolder.getContext().getAuthentication().getName();
+            ProjectInstance newProject = projectManager.moveRealizationToProject(RealizationType.CUBE, cube.getName(),
+                    newProjectName, owner);
 
-            ProjectManager projectManager = getProjectManager();
-            if (!isCubeInProject(newProjectName, cube)) {
-                String owner = SecurityContextHolder.getContext().getAuthentication().getName();
-                ProjectInstance newProject = projectManager.moveRealizationToProject(RealizationType.CUBE, cube.getName(), newProjectName, owner);
+            if (!desc.isDraft()) {
                 accessService.inherit(cube, newProject);
             }
         }
@@ -263,11 +274,13 @@ public class CubeService extends BasicService {
         return updatedCubeDesc;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
+            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'MANAGEMENT')")
     public void deleteCube(CubeInstance cube) throws IOException {
         Message msg = MsgPicker.getMsg();
 
-        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING, ExecutableState.ERROR));
+        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null,
+                EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING, ExecutableState.ERROR));
         if (!cubingJobs.isEmpty()) {
             throw new BadRequestException(String.format(msg.getDISCARD_JOB_FIRST(), cube.getName()));
         }
@@ -292,7 +305,8 @@ public class CubeService extends BasicService {
      * @throws IOException
      * @throws JobException
      */
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
+            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
     public CubeInstance purgeCube(CubeInstance cube) throws IOException {
         Message msg = MsgPicker.getMsg();
 
@@ -314,7 +328,8 @@ public class CubeService extends BasicService {
      * @throws IOException
      * @throws JobException
      */
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
+            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
     public CubeInstance disableCube(CubeInstance cube) throws IOException {
         Message msg = MsgPicker.getMsg();
 
@@ -343,7 +358,8 @@ public class CubeService extends BasicService {
      * @return
      * @throws IOException
      */
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
+            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
     public CubeInstance enableCube(CubeInstance cube) throws IOException {
         Message msg = MsgPicker.getMsg();
 
@@ -358,12 +374,14 @@ public class CubeService extends BasicService {
             throw new BadRequestException(String.format(msg.getNO_READY_SEGMENT(), cubeName));
         }
 
-        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING));
+        final List<CubingJob> cubingJobs = jobService.listAllCubingJobs(cube.getName(), null,
+                EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING));
         if (!cubingJobs.isEmpty()) {
             throw new BadRequestException(msg.getENABLE_WITH_RUNNING_JOB());
         }
         if (!cube.getDescriptor().checkSignature()) {
-            throw new BadRequestException(String.format(msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
+            throw new BadRequestException(
+                    String.format(msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
         }
 
         try {
@@ -393,7 +411,8 @@ public class CubeService extends BasicService {
             }
         }
 
-        metrics.increase("aveStorage", (metrics.get("totalCubes") == 0) ? 0 : metrics.get("totalStorage") / metrics.get("totalCubes"));
+        metrics.increase("aveStorage",
+                (metrics.get("totalCubes") == 0) ? 0 : metrics.get("totalStorage") / metrics.get("totalCubes"));
 
         return metrics;
     }
@@ -428,26 +447,31 @@ public class CubeService extends BasicService {
         return hr;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
+            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
     public void updateCubeNotifyList(CubeInstance cube, List<String> notifyList) throws IOException {
         CubeDesc desc = cube.getDescriptor();
         desc.setNotifyList(notifyList);
         getCubeDescManager().updateCubeDesc(desc);
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
-    public CubeInstance rebuildLookupSnapshot(CubeInstance cube, String segmentName, String lookupTable) throws IOException {
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
+            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
+    public CubeInstance rebuildLookupSnapshot(CubeInstance cube, String segmentName, String lookupTable)
+            throws IOException {
         CubeSegment seg = cube.getSegment(segmentName, SegmentStatusEnum.READY);
         getCubeManager().buildSnapshotTable(seg, lookupTable);
 
         return cube;
     }
 
-    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN
+            + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION')  or hasPermission(#cube, 'MANAGEMENT')")
     public CubeInstance deleteSegment(CubeInstance cube, String segmentName) throws IOException {
         Message msg = MsgPicker.getMsg();
 
-        if (!segmentName.equals(cube.getSegments().get(0).getName()) && !segmentName.equals(cube.getSegments().get(cube.getSegments().size() - 1).getName())) {
+        if (!segmentName.equals(cube.getSegments().get(0).getName())
+                && !segmentName.equals(cube.getSegments().get(cube.getSegments().size() - 1).getName())) {
             throw new BadRequestException(String.format(msg.getDELETE_NOT_FIRST_LAST_SEG(), segmentName));
         }
         CubeSegment toDelete = null;
@@ -497,7 +521,8 @@ public class CubeService extends BasicService {
     public void updateOnNewSegmentReady(String cubeName) {
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         String serverMode = kylinConfig.getServerMode();
-        if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase()) || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) {
+        if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase())
+                || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) {
             CubeInstance cube = getCubeManager().getCube(cubeName);
             if (cube != null) {
                 CubeSegment seg = cube.getLatestBuiltSegment();
@@ -555,7 +580,8 @@ public class CubeService extends BasicService {
                 cube = getCubeManager().getCube(cubeName);
                 Pair<Long, Long> offsets = getCubeManager().autoMergeCubeSegments(cube);
                 if (offsets != null) {
-                    CubeSegment newSeg = getCubeManager().mergeSegments(cube, 0, 0, offsets.getFirst(), offsets.getSecond(), true);
+                    CubeSegment newSeg = getCubeManager().mergeSegments(cube, 0, 0, offsets.getFirst(),
+                            offsets.getSecond(), true);
                     logger.debug("Will submit merge job on " + newSeg);
                     DefaultChainedExecutable job = EngineFactory.createBatchMergeJob(newSeg, "SYSTEM");
                     getExecutableManager().addJob(job);


[35/67] [abbrv] kylin git commit: minor, fix job controller

Posted by li...@apache.org.
minor, fix job controller


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c341a627
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c341a627
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c341a627

Branch: refs/heads/master
Commit: c341a627de025dc4aa4c20a03c4337f685b476c1
Parents: e3a79c8
Author: Roger Shi <ro...@hotmail.com>
Authored: Sat May 27 18:34:08 2017 +0800
Committer: 成 <ch...@kyligence.io>
Committed: Sat May 27 18:36:33 2017 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/rest/controller2/JobControllerV2.java  | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/c341a627/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java b/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
index abc8621..2bcc11b 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller2/JobControllerV2.java
@@ -131,18 +131,21 @@ public class JobControllerV2 extends BasicController {
         if (sortby.equals("last_modify")) {
             if (reverse) {
                 Collections.sort(jobInstanceList, lastModifyComparatorReverse);
+            } else {
+                Collections.sort(jobInstanceList, lastModifyComparator);
             }
-            Collections.sort(jobInstanceList, lastModifyComparator);
         } else if (sortby.equals("job_name")) {
             if (reverse) {
                 Collections.sort(jobInstanceList, jobNameComparatorReverse);
+            } else {
+                Collections.sort(jobInstanceList, jobNameComparator);
             }
-            Collections.sort(jobInstanceList, jobNameComparator);
         } else if (sortby.equals("cube_name")) {
             if (reverse) {
                 Collections.sort(jobInstanceList, cubeNameComparatorReverse);
+            } else {
+                Collections.sort(jobInstanceList, cubeNameComparator);
             }
-            Collections.sort(jobInstanceList, cubeNameComparator);
         }
 
         int offset = pageOffset * pageSize;


[03/67] [abbrv] kylin git commit: Revert "reformat code"

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
index 9c29f38..fc52701 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
@@ -95,8 +95,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
         byte[][] splitKeys;
         if (statsEnabled) {
             final Map<Long, Double> cuboidSizeMap = new CubeStatsReader(cubeSegment, kylinConfig).getCuboidSizeMap();
-            splitKeys = getRegionSplitsFromCuboidStatistics(cuboidSizeMap, kylinConfig, cubeSegment,
-                    partitionFilePath.getParent());
+            splitKeys = getRegionSplitsFromCuboidStatistics(cuboidSizeMap, kylinConfig, cubeSegment, partitionFilePath.getParent());
         } else {
             splitKeys = getRegionSplits(conf, partitionFilePath);
         }
@@ -150,9 +149,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
         return result;
     }
 
-    public static byte[][] getRegionSplitsFromCuboidStatistics(final Map<Long, Double> cubeSizeMap,
-            final KylinConfig kylinConfig, final CubeSegment cubeSegment, final Path hfileSplitsOutputFolder)
-            throws IOException {
+    public static byte[][] getRegionSplitsFromCuboidStatistics(final Map<Long, Double> cubeSizeMap, final KylinConfig kylinConfig, final CubeSegment cubeSegment, final Path hfileSplitsOutputFolder) throws IOException {
 
         final CubeDesc cubeDesc = cubeSegment.getCubeDesc();
         float cut = cubeDesc.getConfig().getKylinHBaseRegionCut();
@@ -185,8 +182,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
             }
 
             if (nRegion != original) {
-                logger.info(
-                        "Region count is adjusted from " + original + " to " + nRegion + " to help random sharding");
+                logger.info("Region count is adjusted from " + original + " to " + nRegion + " to help random sharding");
             }
         }
 
@@ -217,13 +213,10 @@ public class CreateHTableJob extends AbstractHadoopJob {
                 }
 
                 if (shardNum > nRegion) {
-                    logger.info(
-                            String.format("Cuboid %d 's estimated size %.2f MB will generate %d regions, reduce to %d",
-                                    cuboidId, estimatedSize, shardNum, nRegion));
+                    logger.info(String.format("Cuboid %d 's estimated size %.2f MB will generate %d regions, reduce to %d", cuboidId, estimatedSize, shardNum, nRegion));
                     shardNum = nRegion;
                 } else {
-                    logger.info(String.format("Cuboid %d 's estimated size %.2f MB will generate %d regions", cuboidId,
-                            estimatedSize, shardNum));
+                    logger.info(String.format("Cuboid %d 's estimated size %.2f MB will generate %d regions", cuboidId, estimatedSize, shardNum));
                 }
 
                 cuboidShards.put(cuboidId, (short) shardNum);
@@ -236,8 +229,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
             }
 
             for (int i = 0; i < nRegion; ++i) {
-                logger.info(String.format("Region %d's estimated size is %.2f MB, accounting for %.2f percent", i,
-                        regionSizes[i], 100.0 * regionSizes[i] / totalSizeInM));
+                logger.info(String.format("Region %d's estimated size is %.2f MB, accounting for %.2f percent", i, regionSizes[i], 100.0 * regionSizes[i] / totalSizeInM));
             }
 
             CuboidShardUtil.saveCuboidShards(cubeSegment, cuboidShards, nRegion);
@@ -255,8 +247,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
                 if (size >= mbPerRegion || (size + cubeSizeMap.get(cuboidId)) >= mbPerRegion * 1.2) {
                     // if the size already bigger than threshold, or it will exceed by 20%, cut for next region
                     regionSplit.add(cuboidId);
-                    logger.info("Region " + regionIndex + " will be " + size + " MB, contains cuboids < " + cuboidId
-                            + " (" + cuboidCount + ") cuboids");
+                    logger.info("Region " + regionIndex + " will be " + size + " MB, contains cuboids < " + cuboidId + " (" + cuboidCount + ") cuboids");
                     size = 0;
                     cuboidCount = 0;
                     regionIndex++;
@@ -274,8 +265,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
         }
     }
 
-    protected static void saveHFileSplits(final List<HashMap<Long, Double>> innerRegionSplits, int mbPerRegion,
-            final Path outputFolder, final KylinConfig kylinConfig) throws IOException {
+    protected static void saveHFileSplits(final List<HashMap<Long, Double>> innerRegionSplits, int mbPerRegion, final Path outputFolder, final KylinConfig kylinConfig) throws IOException {
 
         if (outputFolder == null) {
             logger.warn("outputFolder for hfile split file is null, skip inner region split");
@@ -334,8 +324,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
                     logger.info(String.format("Region %d's hfile %d size is %.2f mb", i, j, accumulatedSize));
                     byte[] split = new byte[RowConstants.ROWKEY_SHARD_AND_CUBOID_LEN];
                     BytesUtil.writeUnsigned(i, split, 0, RowConstants.ROWKEY_SHARDID_LEN);
-                    System.arraycopy(Bytes.toBytes(cuboid), 0, split, RowConstants.ROWKEY_SHARDID_LEN,
-                            RowConstants.ROWKEY_CUBOIDID_LEN);
+                    System.arraycopy(Bytes.toBytes(cuboid), 0, split, RowConstants.ROWKEY_SHARDID_LEN, RowConstants.ROWKEY_CUBOIDID_LEN);
                     splits.add(split);
                     accumulatedSize = 0;
                     j++;
@@ -345,10 +334,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
 
         }
 
-        SequenceFile.Writer hfilePartitionWriter = SequenceFile.createWriter(hbaseConf,
-                SequenceFile.Writer.file(hfilePartitionFile),
-                SequenceFile.Writer.keyClass(ImmutableBytesWritable.class),
-                SequenceFile.Writer.valueClass(NullWritable.class));
+        SequenceFile.Writer hfilePartitionWriter = SequenceFile.createWriter(hbaseConf, SequenceFile.Writer.file(hfilePartitionFile), SequenceFile.Writer.keyClass(ImmutableBytesWritable.class), SequenceFile.Writer.valueClass(NullWritable.class));
 
         for (int i = 0; i < splits.size(); i++) {
             hfilePartitionWriter.append(new ImmutableBytesWritable(splits.get(i)), NullWritable.get());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index 7a5f195..feb4842 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -59,8 +59,7 @@ public class CubeHTableUtil {
         CubeDesc cubeDesc = cubeInstance.getDescriptor();
         KylinConfig kylinConfig = cubeDesc.getConfig();
 
-        HTableDescriptor tableDesc = new HTableDescriptor(
-                TableName.valueOf(cubeSegment.getStorageLocationIdentifier()));
+        HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(cubeSegment.getStorageLocationIdentifier()));
         tableDesc.setValue(HTableDescriptor.SPLIT_POLICY, DisabledRegionSplitPolicy.class.getName());
         tableDesc.setValue(IRealizationConstants.HTableTag, kylinConfig.getMetadataUrlPrefix());
         tableDesc.setValue(IRealizationConstants.HTableCreationTime, String.valueOf(System.currentTimeMillis()));
@@ -104,8 +103,7 @@ public class CubeHTableUtil {
             DeployCoprocessorCLI.deployCoprocessor(tableDesc);
 
             admin.createTable(tableDesc, splitKeys);
-            Preconditions.checkArgument(admin.isTableAvailable(TableName.valueOf(tableName)),
-                    "table " + tableName + " created, but is not available due to some reasons");
+            Preconditions.checkArgument(admin.isTableAvailable(TableName.valueOf(tableName)), "table " + tableName + " created, but is not available due to some reasons");
             logger.info("create hbase table " + tableName + " done.");
         } finally {
             IOUtils.closeQuietly(admin);
@@ -146,8 +144,7 @@ public class CubeHTableUtil {
 
             logger.info("creating hbase table " + tableName);
             admin.createTable(tableDesc, null);
-            Preconditions.checkArgument(admin.isTableAvailable(tableName),
-                    "table " + tableName + " created, but is not available due to some reasons");
+            Preconditions.checkArgument(admin.isTableAvailable(tableName), "table " + tableName + " created, but is not available due to some reasons");
             logger.info("create hbase table " + tableName + " done.");
         } finally {
             IOUtils.closeQuietly(admin);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
index 42fb283..df3cf08 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
@@ -87,8 +87,7 @@ public class DeprecatedGCStep extends AbstractExecutable {
     private void dropHiveTable(ExecutableContext context) throws IOException {
         final String hiveTable = this.getOldHiveTable();
         if (StringUtils.isNotEmpty(hiveTable)) {
-            final String dropSQL = "USE " + context.getConfig().getHiveDatabaseForIntermediateTable() + ";"
-                    + " DROP TABLE IF EXISTS  " + hiveTable + ";";
+            final String dropSQL = "USE " + context.getConfig().getHiveDatabaseForIntermediateTable() + ";" + " DROP TABLE IF EXISTS  " + hiveTable + ";";
             final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
             hiveCmdBuilder.addStatement(dropSQL);
             context.getConfig().getCliCommandExecutor().execute(hiveCmdBuilder.build());
@@ -196,4 +195,4 @@ public class DeprecatedGCStep extends AbstractExecutable {
         return getParam(OLD_HIVE_TABLE);
     }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
index bcea725..6587d4e 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
@@ -101,8 +101,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
             final KeyValue keyValue = keyValueCreators.get(i).create(key, 0, key.length, values);
             final Put put = new Put(copy(key, 0, key.length));
             byte[] family = copy(keyValue.getFamilyArray(), keyValue.getFamilyOffset(), keyValue.getFamilyLength());
-            byte[] qualifier = copy(keyValue.getQualifierArray(), keyValue.getQualifierOffset(),
-                    keyValue.getQualifierLength());
+            byte[] qualifier = copy(keyValue.getQualifierArray(), keyValue.getQualifierOffset(), keyValue.getQualifierLength());
             byte[] value = copy(keyValue.getValueArray(), keyValue.getValueOffset(), keyValue.getValueLength());
             put.add(family, qualifier, value);
             puts.add(put);
@@ -119,8 +118,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
             if (hTable != null) {
                 hTable.put(puts);
             }
-            logger.info(
-                    "commit total " + puts.size() + " puts, totally cost:" + (System.currentTimeMillis() - t) + "ms");
+            logger.info("commit total " + puts.size() + " puts, totally cost:" + (System.currentTimeMillis() - t) + "ms");
             puts.clear();
         }
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
index fe53290..31cb189 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
@@ -97,8 +97,7 @@ public class HBaseMROutput2Transition implements IMROutput2 {
             int reducerNum = 1;
             Class mapperClass = job.getMapperClass();
             if (mapperClass == HiveToBaseCuboidMapper.class || mapperClass == NDCuboidMapper.class) {
-                reducerNum = ReducerNumSizing.getLayeredCubingReduceTaskNum(segment,
-                        AbstractHadoopJob.getTotalMapInputMB(job), level);
+                reducerNum = ReducerNumSizing.getLayeredCubingReduceTaskNum(segment, AbstractHadoopJob.getTotalMapInputMB(job), level);
             } else if (mapperClass == InMemCuboidMapper.class) {
                 reducerNum = ReducerNumSizing.getInmemCubingReduceTaskNum(segment);
             }
@@ -121,10 +120,8 @@ public class HBaseMROutput2Transition implements IMROutput2 {
             }
 
             @Override
-            public void addStepPhase2_BuildCube(CubeSegment seg, List<CubeSegment> mergingSegments,
-                    DefaultChainedExecutable jobFlow) {
-                jobFlow.addTask(
-                        steps.createMergeCuboidDataStep(seg, mergingSegments, jobFlow.getId(), MergeCuboidJob.class));
+            public void addStepPhase2_BuildCube(CubeSegment seg, List<CubeSegment> mergingSegments, DefaultChainedExecutable jobFlow) {
+                jobFlow.addTask(steps.createMergeCuboidDataStep(seg, mergingSegments, jobFlow.getId(), MergeCuboidJob.class));
                 jobFlow.addTask(steps.createConvertCuboidToHfileStep(jobFlow.getId()));
                 jobFlow.addTask(steps.createBulkLoadStep(jobFlow.getId()));
             }
@@ -141,10 +138,9 @@ public class HBaseMROutput2Transition implements IMROutput2 {
         };
     }
 
-    public static class HBaseMergeMROutputFormat implements IMRMergeOutputFormat {
+    public static class HBaseMergeMROutputFormat implements IMRMergeOutputFormat{
 
-        private static final Pattern JOB_NAME_PATTERN = Pattern
-                .compile("kylin-([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})");
+        private static final Pattern JOB_NAME_PATTERN = Pattern.compile("kylin-([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})");
 
         @Override
         public void configureJobInput(Job job, String input) throws Exception {
@@ -153,8 +149,7 @@ public class HBaseMROutput2Transition implements IMROutput2 {
 
         @Override
         public void configureJobOutput(Job job, String output, CubeSegment segment) throws Exception {
-            int reducerNum = ReducerNumSizing.getLayeredCubingReduceTaskNum(segment,
-                    AbstractHadoopJob.getTotalMapInputMB(job), -1);
+            int reducerNum = ReducerNumSizing.getLayeredCubingReduceTaskNum(segment, AbstractHadoopJob.getTotalMapInputMB(job), -1);
             job.setNumReduceTasks(reducerNum);
 
             Path outputPath = new Path(output);
@@ -190,4 +185,4 @@ public class HBaseMROutput2Transition implements IMROutput2 {
             throw new IllegalStateException("No merging segment's last build job ID equals " + jobID);
         }
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
index a121c9c..6f69e8c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
@@ -67,8 +67,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
         appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, inputPath);
         appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, getRowkeyDistributionOutputPath(jobId));
         appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
-        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
-                "Kylin_Region_Splits_Calculator_" + seg.getRealization().getName() + "_Step");
+        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Region_Splits_Calculator_" + seg.getRealization().getName() + "_Step");
 
         rowkeyDistributionStep.setMapReduceParams(cmd.toString());
         rowkeyDistributionStep.setMapReduceJobClass(RangeKeyDistributionJob.class);
@@ -89,8 +88,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
         StringBuilder cmd = new StringBuilder();
         appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
         appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
-        appendExecCmdParameters(cmd, BatchConstants.ARG_PARTITION,
-                getRowkeyDistributionOutputPath(jobId) + "/part-r-00000");
+        appendExecCmdParameters(cmd, BatchConstants.ARG_PARTITION, getRowkeyDistributionOutputPath(jobId) + "/part-r-00000");
         appendExecCmdParameters(cmd, BatchConstants.ARG_STATS_ENABLED, String.valueOf(withStats));
 
         createHtableStep.setJobParams(cmd.toString());
@@ -99,8 +97,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
         return createHtableStep;
     }
 
-    public MapReduceExecutable createMergeCuboidDataStep(CubeSegment seg, List<CubeSegment> mergingSegments,
-            String jobID, Class<? extends AbstractHadoopJob> clazz) {
+    public MapReduceExecutable createMergeCuboidDataStep(CubeSegment seg, List<CubeSegment> mergingSegments, String jobID, Class<? extends AbstractHadoopJob> clazz) {
 
         final List<String> mergingCuboidPaths = Lists.newArrayList();
         for (CubeSegment merging : mergingSegments) {
@@ -118,8 +115,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
         appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
         appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, formattedPath);
         appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
-        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
-                "Kylin_Merge_Cuboid_" + seg.getCubeInstance().getName() + "_Step");
+        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Merge_Cuboid_" + seg.getCubeInstance().getName() + "_Step");
 
         mergeCuboidDataStep.setMapReduceParams(cmd.toString());
         mergeCuboidDataStep.setMapReduceJobClass(clazz);
@@ -136,13 +132,11 @@ public class HBaseMRSteps extends JobBuilderSupport {
 
         appendMapReduceParameters(cmd);
         appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
-        appendExecCmdParameters(cmd, BatchConstants.ARG_PARTITION,
-                getRowkeyDistributionOutputPath(jobId) + "/part-r-00000_hfile");
+        appendExecCmdParameters(cmd, BatchConstants.ARG_PARTITION, getRowkeyDistributionOutputPath(jobId) + "/part-r-00000_hfile");
         appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, inputPath);
         appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, getHFilePath(jobId));
         appendExecCmdParameters(cmd, BatchConstants.ARG_HTABLE_NAME, seg.getStorageLocationIdentifier());
-        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME,
-                "Kylin_HFile_Generator_" + seg.getRealization().getName() + "_Step");
+        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_HFile_Generator_" + seg.getRealization().getName() + "_Step");
 
         createHFilesStep.setMapReduceParams(cmd.toString());
         createHFilesStep.setMapReduceJobClass(CubeHFileJob.class);
@@ -174,10 +168,8 @@ public class HBaseMRSteps extends JobBuilderSupport {
     }
 
     public List<String> getMergingHTables() {
-        final List<CubeSegment> mergingSegments = ((CubeInstance) seg.getRealization())
-                .getMergingSegments((CubeSegment) seg);
-        Preconditions.checkState(mergingSegments.size() > 1,
-                "there should be more than 2 segments to merge, target segment " + seg);
+        final List<CubeSegment> mergingSegments = ((CubeInstance) seg.getRealization()).getMergingSegments((CubeSegment) seg);
+        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge, target segment " + seg);
         final List<String> mergingHTables = Lists.newArrayList();
         for (CubeSegment merging : mergingSegments) {
             mergingHTables.add(merging.getStorageLocationIdentifier());
@@ -186,10 +178,8 @@ public class HBaseMRSteps extends JobBuilderSupport {
     }
 
     public List<String> getMergingHDFSPaths() {
-        final List<CubeSegment> mergingSegments = ((CubeInstance) seg.getRealization())
-                .getMergingSegments((CubeSegment) seg);
-        Preconditions.checkState(mergingSegments.size() > 1,
-                "there should be more than 2 segments to merge, target segment " + seg);
+        final List<CubeSegment> mergingSegments = ((CubeInstance) seg.getRealization()).getMergingSegments((CubeSegment) seg);
+        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge, target segment " + seg);
         final List<String> mergingHDFSPaths = Lists.newArrayList();
         for (CubeSegment merging : mergingSegments) {
             mergingHDFSPaths.add(getJobWorkingDir(merging.getLastBuildJobID()));
@@ -198,13 +188,11 @@ public class HBaseMRSteps extends JobBuilderSupport {
     }
 
     public String getHFilePath(String jobId) {
-        return HBaseConnection.makeQualifiedPathInHBaseCluster(
-                getJobWorkingDir(jobId) + "/" + seg.getRealization().getName() + "/hfile/");
+        return HBaseConnection.makeQualifiedPathInHBaseCluster(getJobWorkingDir(jobId) + "/" + seg.getRealization().getName() + "/hfile/");
     }
 
     public String getRowkeyDistributionOutputPath(String jobId) {
-        return HBaseConnection.makeQualifiedPathInHBaseCluster(
-                getJobWorkingDir(jobId) + "/" + seg.getRealization().getName() + "/rowkey_stats");
+        return HBaseConnection.makeQualifiedPathInHBaseCluster(getJobWorkingDir(jobId) + "/" + seg.getRealization().getName() + "/rowkey_stats");
     }
 
     public void addMergingGarbageCollectionSteps(DefaultChainedExecutable jobFlow) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
index 1bdf58a..2876e3e 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
@@ -99,8 +99,7 @@ public class RangeKeyDistributionJob extends AbstractHadoopJob {
             float hfileSizeGB = kylinConfig.getHBaseHFileSizeGB();
             float regionSplitSize = kylinConfig.getKylinHBaseRegionCut();
 
-            int compactionThreshold = Integer.valueOf(
-                    HBaseConnection.getCurrentHBaseConfiguration().get("hbase.hstore.compactionThreshold", "3"));
+            int compactionThreshold = Integer.valueOf(HBaseConnection.getCurrentHBaseConfiguration().get("hbase.hstore.compactionThreshold", "3"));
             if (hfileSizeGB > 0 && hfileSizeGB * compactionThreshold < regionSplitSize) {
                 hfileSizeGB = regionSplitSize / compactionThreshold;
                 logger.info("Adjust hfile size' to " + hfileSizeGB);
@@ -113,8 +112,7 @@ public class RangeKeyDistributionJob extends AbstractHadoopJob {
             job.getConfiguration().set(BatchConstants.CFG_REGION_NUMBER_MAX, String.valueOf(maxRegionCount));
             job.getConfiguration().set(BatchConstants.CFG_REGION_NUMBER_MIN, String.valueOf(minRegionCount));
             // The partition file for hfile is sequenece file consists of ImmutableBytesWritable and NullWritable
-            TableMapReduceUtil.addDependencyJars(job.getConfiguration(), ImmutableBytesWritable.class,
-                    NullWritable.class);
+            TableMapReduceUtil.addDependencyJars(job.getConfiguration(), ImmutableBytesWritable.class, NullWritable.class);
 
             return waitForCompletion(job);
         } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
index 8f7096a..63433dd 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
@@ -77,16 +77,14 @@ public class RangeKeyDistributionReducer extends KylinReducer<Text, LongWritable
             maxRegionCount = Integer.valueOf(context.getConfiguration().get(BatchConstants.CFG_REGION_NUMBER_MAX));
         }
 
-        logger.info("Chosen cut for htable is " + cut + ", max region count=" + maxRegionCount + ", min region count="
-                + minRegionCount + ", hfile size=" + hfileSizeGB);
+        logger.info("Chosen cut for htable is " + cut + ", max region count=" + maxRegionCount + ", min region count=" + minRegionCount + ", hfile size=" + hfileSizeGB);
 
         // add empty key at position 0
         gbPoints.add(new Text());
     }
 
     @Override
-    public void doReduce(Text key, Iterable<LongWritable> values, Context context)
-            throws IOException, InterruptedException {
+    public void doReduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
         for (LongWritable v : values) {
             bytesRead += v.get();
         }
@@ -117,9 +115,7 @@ public class RangeKeyDistributionReducer extends KylinReducer<Text, LongWritable
         System.out.println(hfilePerRegion + " hfile per region");
 
         Path hfilePartitionFile = new Path(output + "/part-r-00000_hfile");
-        SequenceFile.Writer hfilePartitionWriter = new SequenceFile.Writer(
-                hfilePartitionFile.getFileSystem(context.getConfiguration()), context.getConfiguration(),
-                hfilePartitionFile, ImmutableBytesWritable.class, NullWritable.class);
+        SequenceFile.Writer hfilePartitionWriter = new SequenceFile.Writer(hfilePartitionFile.getFileSystem(context.getConfiguration()), context.getConfiguration(), hfilePartitionFile, ImmutableBytesWritable.class, NullWritable.class);
         int hfileCountInOneRegion = 0;
         for (int i = hfileSizeGB; i < gbPoints.size(); i += hfileSizeGB) {
             hfilePartitionWriter.append(new ImmutableBytesWritable(gbPoints.get(i).getBytes()), NullWritable.get());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
index 5a03985..5e6ad34 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
@@ -68,9 +68,7 @@ public class SequenceFileCuboidWriter extends KVGTRecordWriter {
 
             Path cuboidFile = new Path(cuboidPath, "data.seq");
             logger.debug("Cuboid is written to " + cuboidFile);
-            writer = SequenceFile.createWriter(HadoopUtil.getCurrentConfiguration(),
-                    SequenceFile.Writer.file(cuboidFile), SequenceFile.Writer.keyClass(Text.class),
-                    SequenceFile.Writer.valueClass(Text.class));
+            writer = SequenceFile.createWriter(HadoopUtil.getCurrentConfiguration(), SequenceFile.Writer.file(cuboidFile), SequenceFile.Writer.keyClass(Text.class), SequenceFile.Writer.valueClass(Text.class));
         }
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index 8d74eb3..2154ed1 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -97,8 +97,7 @@ public class CubeMigrationCLI {
     private static final String ACL_INFO_FAMILY_PARENT_COLUMN = "p";
 
     public static void main(String[] args) throws IOException, InterruptedException {
-        logger.warn(
-                "org.apache.kylin.storage.hbase.util.CubeMigrationCLI is deprecated, use org.apache.kylin.tool.CubeMigrationCLI instead");
+        logger.warn("org.apache.kylin.storage.hbase.util.CubeMigrationCLI is deprecated, use org.apache.kylin.tool.CubeMigrationCLI instead");
 
         if (args.length != 8) {
             usage();
@@ -109,22 +108,12 @@ public class CubeMigrationCLI {
     }
 
     private static void usage() {
-        System.out.println(
-                "Usage: CubeMigrationCLI srcKylinConfigUri dstKylinConfigUri cubeName projectName copyAclOrNot purgeOrNot overwriteIfExists realExecute");
-        System.out.println(" srcKylinConfigUri: The KylinConfig of the cube’s source \n"
-                + "dstKylinConfigUri: The KylinConfig of the cube’s new home \n"
-                + "cubeName: the name of cube to be migrated. \n"
-                + "projectName: The target project in the target environment.(Make sure it exist) \n"
-                + "copyAclOrNot: true or false: whether copy cube ACL to target environment. \n"
-                + "purgeOrNot: true or false: whether purge the cube from src server after the migration. \n"
-                + "overwriteIfExists: overwrite cube if it already exists in the target environment. \n"
-                + "realExecute: if false, just print the operations to take, if true, do the real migration. \n");
+        System.out.println("Usage: CubeMigrationCLI srcKylinConfigUri dstKylinConfigUri cubeName projectName copyAclOrNot purgeOrNot overwriteIfExists realExecute");
+        System.out.println(" srcKylinConfigUri: The KylinConfig of the cube’s source \n" + "dstKylinConfigUri: The KylinConfig of the cube’s new home \n" + "cubeName: the name of cube to be migrated. \n" + "projectName: The target project in the target environment.(Make sure it exist) \n" + "copyAclOrNot: true or false: whether copy cube ACL to target environment. \n" + "purgeOrNot: true or false: whether purge the cube from src server after the migration. \n" + "overwriteIfExists: overwrite cube if it already exists in the target environment. \n" + "realExecute: if false, just print the operations to take, if true, do the real migration. \n");
 
     }
 
-    public static void moveCube(KylinConfig srcCfg, KylinConfig dstCfg, String cubeName, String projectName,
-            String copyAcl, String purgeAndDisable, String overwriteIfExists, String realExecute)
-            throws IOException, InterruptedException {
+    public static void moveCube(KylinConfig srcCfg, KylinConfig dstCfg, String cubeName, String projectName, String copyAcl, String purgeAndDisable, String overwriteIfExists, String realExecute) throws IOException, InterruptedException {
 
         srcConfig = srcCfg;
         srcStore = ResourceStore.getStore(srcConfig);
@@ -174,16 +163,12 @@ public class CubeMigrationCLI {
         }
     }
 
-    public static void moveCube(String srcCfgUri, String dstCfgUri, String cubeName, String projectName, String copyAcl,
-            String purgeAndDisable, String overwriteIfExists, String realExecute)
-            throws IOException, InterruptedException {
+    public static void moveCube(String srcCfgUri, String dstCfgUri, String cubeName, String projectName, String copyAcl, String purgeAndDisable, String overwriteIfExists, String realExecute) throws IOException, InterruptedException {
 
-        moveCube(KylinConfig.createInstanceFromUri(srcCfgUri), KylinConfig.createInstanceFromUri(dstCfgUri), cubeName,
-                projectName, copyAcl, purgeAndDisable, overwriteIfExists, realExecute);
+        moveCube(KylinConfig.createInstanceFromUri(srcCfgUri), KylinConfig.createInstanceFromUri(dstCfgUri), cubeName, projectName, copyAcl, purgeAndDisable, overwriteIfExists, realExecute);
     }
 
-    public static void checkMigrationSuccess(KylinConfig kylinConfig, String cubeName, Boolean ifFix)
-            throws IOException {
+    public static void checkMigrationSuccess(KylinConfig kylinConfig, String cubeName, Boolean ifFix) throws IOException {
         CubeMigrationCheckCLI checkCLI = new CubeMigrationCheckCLI(kylinConfig, ifFix);
         checkCLI.execute(cubeName);
     }
@@ -213,14 +198,12 @@ public class CubeMigrationCLI {
 
     private static void changeHtableHost(CubeInstance cube) {
         for (CubeSegment segment : cube.getSegments()) {
-            operations
-                    .add(new Opt(OptType.CHANGE_HTABLE_HOST, new Object[] { segment.getStorageLocationIdentifier() }));
+            operations.add(new Opt(OptType.CHANGE_HTABLE_HOST, new Object[] { segment.getStorageLocationIdentifier() }));
         }
     }
 
     private static void copyACL(CubeInstance cube, String projectName) {
-        operations.add(new Opt(OptType.COPY_ACL,
-                new Object[] { cube.getUuid(), cube.getDescriptor().getModel().getUuid(), projectName }));
+        operations.add(new Opt(OptType.COPY_ACL, new Object[] { cube.getUuid(), cube.getDescriptor().getModel().getUuid(), projectName }));
     }
 
     private static void copyFilesInMetaStore(CubeInstance cube, String overwriteIfExists) throws IOException {
@@ -230,8 +213,7 @@ public class CubeMigrationCLI {
         listCubeRelatedResources(cube, metaItems, dictAndSnapshot);
 
         if (dstStore.exists(cube.getResourcePath()) && !overwriteIfExists.equalsIgnoreCase("true"))
-            throw new IllegalStateException("The cube named " + cube.getName()
-                    + " already exists on target metadata store. Use overwriteIfExists to overwrite it");
+            throw new IllegalStateException("The cube named " + cube.getName() + " already exists on target metadata store. Use overwriteIfExists to overwrite it");
 
         for (String item : metaItems) {
             operations.add(new Opt(OptType.COPY_FILE_IN_META, new Object[] { item }));
@@ -242,8 +224,7 @@ public class CubeMigrationCLI {
         }
     }
 
-    private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName)
-            throws IOException {
+    private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
         String projectResPath = ProjectInstance.concatResourcePath(projectName);
         if (!dstStore.exists(projectResPath))
             throw new IllegalStateException("The target project " + projectName + "does not exist");
@@ -255,8 +236,7 @@ public class CubeMigrationCLI {
         operations.add(new Opt(OptType.PURGE_AND_DISABLE, new Object[] { cubeName }));
     }
 
-    private static void listCubeRelatedResources(CubeInstance cube, List<String> metaResource,
-            Set<String> dictAndSnapshot) throws IOException {
+    private static void listCubeRelatedResources(CubeInstance cube, List<String> metaResource, Set<String> dictAndSnapshot) throws IOException {
 
         CubeDesc cubeDesc = cube.getDescriptor();
         metaResource.add(cube.getResourcePath());
@@ -463,10 +443,8 @@ public class CubeMigrationCLI {
             Table srcAclHtable = null;
             Table destAclHtable = null;
             try {
-                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl())
-                        .getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl())
-                        .getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
 
                 // cube acl
                 Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -477,10 +455,8 @@ public class CubeMigrationCLI {
                         byte[] value = CellUtil.cloneValue(cell);
 
                         // use the target project uuid as the parent
-                        if (Bytes.toString(family).equals(ACL_INFO_FAMILY)
-                                && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
-                            String valueString = "{\"id\":\"" + projUUID
-                                    + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
+                        if (Bytes.toString(family).equals(ACL_INFO_FAMILY) && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
+                            String valueString = "{\"id\":\"" + projUUID + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
                             value = Bytes.toBytes(valueString);
                         }
                         Put put = new Put(Bytes.toBytes(cubeId));
@@ -555,8 +531,7 @@ public class CubeMigrationCLI {
             String modelId = (String) opt.params[1];
             Table destAclHtable = null;
             try {
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl())
-                        .getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
 
                 destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
                 destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
index ec3764b..20d0f7d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
@@ -54,14 +54,11 @@ public class CubeMigrationCheckCLI {
 
     private static final Logger logger = LoggerFactory.getLogger(CubeMigrationCheckCLI.class);
 
-    private static final Option OPTION_FIX = OptionBuilder.withArgName("fix").hasArg().isRequired(false)
-            .withDescription("Fix the inconsistent cube segments' HOST").create("fix");
+    private static final Option OPTION_FIX = OptionBuilder.withArgName("fix").hasArg().isRequired(false).withDescription("Fix the inconsistent cube segments' HOST").create("fix");
 
-    private static final Option OPTION_DST_CFG_URI = OptionBuilder.withArgName("dstCfgUri").hasArg().isRequired(false)
-            .withDescription("The KylinConfig of the cube’s new home").create("dstCfgUri");
+    private static final Option OPTION_DST_CFG_URI = OptionBuilder.withArgName("dstCfgUri").hasArg().isRequired(false).withDescription("The KylinConfig of the cube’s new home").create("dstCfgUri");
 
-    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false)
-            .withDescription("The name of cube migrated").create("cube");
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The name of cube migrated").create("cube");
 
     private KylinConfig dstCfg;
     private Admin hbaseAdmin;
@@ -72,8 +69,7 @@ public class CubeMigrationCheckCLI {
     private boolean ifFix = false;
 
     public static void main(String[] args) throws ParseException, IOException {
-        logger.warn(
-                "org.apache.kylin.storage.hbase.util.CubeMigrationCheckCLI is deprecated, use org.apache.kylin.tool.CubeMigrationCheckCLI instead");
+        logger.warn("org.apache.kylin.storage.hbase.util.CubeMigrationCheckCLI is deprecated, use org.apache.kylin.tool.CubeMigrationCheckCLI instead");
 
         OptionsHelper optionsHelper = new OptionsHelper();
 
@@ -191,9 +187,7 @@ public class CubeMigrationCheckCLI {
             for (String segFullName : inconsistentHTables) {
                 String[] sepNameList = segFullName.split(",");
                 HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
-                logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1]
-                        + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to "
-                        + dstCfg.getMetadataUrlPrefix());
+                logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1] + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to " + dstCfg.getMetadataUrlPrefix());
                 hbaseAdmin.disableTable(TableName.valueOf(sepNameList[0]));
                 desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix());
                 hbaseAdmin.modifyTable(TableName.valueOf(sepNameList[0]), desc);
@@ -213,8 +207,7 @@ public class CubeMigrationCheckCLI {
         logger.info("------ HTables exist issues in hbase : not existing, metadata broken ------");
         for (String segFullName : issueExistHTables) {
             String[] sepNameList = segFullName.split(",");
-            logger.error(sepNameList[0] + " belonging to cube " + sepNameList[1]
-                    + " has some issues and cannot be read successfully!!!");
+            logger.error(sepNameList[0] + " belonging to cube " + sepNameList[1] + " has some issues and cannot be read successfully!!!");
         }
         logger.info("----------------------------------------------------");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index f5fb304..c437e66 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -137,14 +137,10 @@ public class DeployCoprocessorCLI {
 
     private static void printUsageAndExit() {
         logger.info("Usage: ");
-        logger.info(
-                "$KYLIN_HOME/bin/kylin.sh  org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI $KYLIN_HOME/lib/kylin-coprocessor-*.jar all");
-        logger.info(
-                "$KYLIN_HOME/bin/kylin.sh  org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI $KYLIN_HOME/lib/kylin-coprocessor-*.jar -table tableName1 tableName2 ...");
-        logger.info(
-                "$KYLIN_HOME/bin/kylin.sh  org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI $KYLIN_HOME/lib/kylin-coprocessor-*.jar -cube cubeName1 cubeName2 ... ");
-        logger.info(
-                "$KYLIN_HOME/bin/kylin.sh  org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI $KYLIN_HOME/lib/kylin-coprocessor-*.jar -project projectName1 projectName2 ...");
+        logger.info("$KYLIN_HOME/bin/kylin.sh  org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI $KYLIN_HOME/lib/kylin-coprocessor-*.jar all");
+        logger.info("$KYLIN_HOME/bin/kylin.sh  org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI $KYLIN_HOME/lib/kylin-coprocessor-*.jar -table tableName1 tableName2 ...");
+        logger.info("$KYLIN_HOME/bin/kylin.sh  org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI $KYLIN_HOME/lib/kylin-coprocessor-*.jar -cube cubeName1 cubeName2 ... ");
+        logger.info("$KYLIN_HOME/bin/kylin.sh  org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI $KYLIN_HOME/lib/kylin-coprocessor-*.jar -project projectName1 projectName2 ...");
         System.exit(0);
     }
 
@@ -161,7 +157,7 @@ public class DeployCoprocessorCLI {
 
             ProjectInstance projectInstance = projectManager.getProject(p);
             List<RealizationEntry> cubeList = projectInstance.getRealizationEntries(RealizationType.CUBE);
-            for (RealizationEntry cube : cubeList) {
+            for (RealizationEntry cube: cubeList) {
                 CubeInstance cubeInstance = cubeManager.getCube(cube.getRealization());
                 for (CubeSegment segment : cubeInstance.getSegments()) {
                     String tableName = segment.getStorageLocationIdentifier();
@@ -234,8 +230,7 @@ public class DeployCoprocessorCLI {
         desc.addCoprocessor(CubeEndpointClass, hdfsCoprocessorJar, 1001, null);
     }
 
-    public static boolean resetCoprocessor(String tableName, Admin hbaseAdmin, Path hdfsCoprocessorJar)
-            throws IOException {
+    public static boolean resetCoprocessor(String tableName, Admin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
 
@@ -284,15 +279,13 @@ public class DeployCoprocessorCLI {
         return true;
     }
 
-    private static List<String> resetCoprocessorOnHTables(final Admin hbaseAdmin, final Path hdfsCoprocessorJar,
-            List<String> tableNames) throws IOException {
+    private static List<String> resetCoprocessorOnHTables(final Admin hbaseAdmin, final Path hdfsCoprocessorJar, List<String> tableNames) throws IOException {
         List<String> processedTables = Collections.synchronizedList(new ArrayList<String>());
         ExecutorService coprocessorPool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2);
         CountDownLatch countDownLatch = new CountDownLatch(tableNames.size());
 
         for (final String tableName : tableNames) {
-            coprocessorPool.execute(new ResetCoprocessorWorker(countDownLatch, hbaseAdmin, hdfsCoprocessorJar,
-                    tableName, processedTables));
+            coprocessorPool.execute(new ResetCoprocessorWorker(countDownLatch, hbaseAdmin, hdfsCoprocessorJar, tableName, processedTables));
         }
 
         try {
@@ -312,8 +305,7 @@ public class DeployCoprocessorCLI {
         private final String tableName;
         private final List<String> processedTables;
 
-        public ResetCoprocessorWorker(CountDownLatch countDownLatch, Admin hbaseAdmin, Path hdfsCoprocessorJar,
-                String tableName, List<String> processedTables) {
+        public ResetCoprocessorWorker(CountDownLatch countDownLatch, Admin hbaseAdmin, Path hdfsCoprocessorJar, String tableName, List<String> processedTables) {
             this.countDownLatch = countDownLatch;
             this.hbaseAdmin = hbaseAdmin;
             this.hdfsCoprocessorJar = hdfsCoprocessorJar;
@@ -358,8 +350,7 @@ public class DeployCoprocessorCLI {
         return path;
     }
 
-    public synchronized static Path uploadCoprocessorJar(String localCoprocessorJar, FileSystem fileSystem,
-            Set<String> oldJarPaths) throws IOException {
+    public synchronized static Path uploadCoprocessorJar(String localCoprocessorJar, FileSystem fileSystem, Set<String> oldJarPaths) throws IOException {
         Path uploadPath = null;
         File localCoprocessorFile = new File(localCoprocessorJar);
 
@@ -417,8 +408,7 @@ public class DeployCoprocessorCLI {
     }
 
     private static boolean isSame(File localCoprocessorFile, FileStatus fileStatus) {
-        return fileStatus.getLen() == localCoprocessorFile.length()
-                && fileStatus.getModificationTime() == localCoprocessorFile.lastModified();
+        return fileStatus.getLen() == localCoprocessorFile.length() && fileStatus.getModificationTime() == localCoprocessorFile.lastModified();
     }
 
     private static String getBaseFileName(String localCoprocessorJar) {
@@ -476,8 +466,7 @@ public class DeployCoprocessorCLI {
 
         ArrayList<String> result = new ArrayList<String>();
         for (CubeInstance cube : cubeMgr.listAllCubes()) {
-            if (cube.getStorageType() == IStorageAware.ID_HBASE
-                    || cube.getStorageType() == IStorageAware.ID_SHARDED_HBASE) {
+            if (cube.getStorageType() == IStorageAware.ID_HBASE || cube.getStorageType() == IStorageAware.ID_SHARDED_HBASE) {
                 for (CubeSegment seg : cube.getSegments(SegmentStatusEnum.READY)) {
                     String tableName = seg.getStorageLocationIdentifier();
                     if (StringUtils.isBlank(tableName) == false) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
index 0c2b251..1cdb2f8 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
@@ -86,8 +86,7 @@ public class ExtendCubeToHybridCLI {
     }
 
     public static void main(String[] args) throws Exception {
-        logger.warn(
-                "org.apache.kylin.storage.hbase.util.ExtendCubeToHybridCLI is deprecated, use org.apache.kylin.tool.ExtendCubeToHybridCLI instead");
+        logger.warn("org.apache.kylin.storage.hbase.util.ExtendCubeToHybridCLI is deprecated, use org.apache.kylin.tool.ExtendCubeToHybridCLI instead");
 
         if (args.length != 2 && args.length != 3) {
             System.out.println("Usage: ExtendCubeToHybridCLI project cube [partition_date]");
@@ -123,8 +122,7 @@ public class ExtendCubeToHybridCLI {
     }
 
     public void createFromCube(String projectName, String cubeName, String partitionDateStr) throws Exception {
-        logger.info("Create hybrid for cube[" + cubeName + "], project[" + projectName + "], partition_date["
-                + partitionDateStr + "].");
+        logger.info("Create hybrid for cube[" + cubeName + "], project[" + projectName + "], partition_date[" + partitionDateStr + "].");
 
         CubeInstance cubeInstance = cubeManager.getCube(cubeName);
         if (!validateCubeInstance(cubeInstance)) {
@@ -158,8 +156,7 @@ public class ExtendCubeToHybridCLI {
         CubeSegment currentSeg = null;
         while (segmentIterator.hasNext()) {
             currentSeg = segmentIterator.next();
-            if (partitionDateStr != null && (currentSeg.getDateRangeStart() >= partitionDate
-                    || currentSeg.getDateRangeEnd() > partitionDate)) {
+            if (partitionDateStr != null && (currentSeg.getDateRangeStart() >= partitionDate || currentSeg.getDateRangeEnd() > partitionDate)) {
                 segmentIterator.remove();
                 logger.info("CubeSegment[" + currentSeg + "] was removed.");
             }
@@ -202,11 +199,9 @@ public class ExtendCubeToHybridCLI {
         List<RealizationEntry> realizationEntries = Lists.newArrayListWithCapacity(2);
         realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, cubeInstance.getName()));
         realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, newCubeInstance.getName()));
-        HybridInstance hybridInstance = HybridInstance.create(kylinConfig, renameHybrid(cubeInstance.getName()),
-                realizationEntries);
+        HybridInstance hybridInstance = HybridInstance.create(kylinConfig, renameHybrid(cubeInstance.getName()), realizationEntries);
         store.putResource(hybridInstance.getResourcePath(), hybridInstance, HybridManager.HYBRID_SERIALIZER);
-        ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID,
-                hybridInstance.getName(), projectName, owner);
+        ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID, hybridInstance.getName(), projectName, owner);
         logger.info("HybridInstance was saved at: " + hybridInstance.getResourcePath());
 
         // copy Acl from old cube to new cube
@@ -243,8 +238,7 @@ public class ExtendCubeToHybridCLI {
         String projUUID = project.getUuid();
         Table aclHtable = null;
         try {
-            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl())
-                    .getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
+            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
 
             // cube acl
             Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -255,10 +249,8 @@ public class ExtendCubeToHybridCLI {
                     byte[] value = CellUtil.cloneValue(cell);
 
                     // use the target project uuid as the parent
-                    if (Bytes.toString(family).equals(ACL_INFO_FAMILY)
-                            && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
-                        String valueString = "{\"id\":\"" + projUUID
-                                + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
+                    if (Bytes.toString(family).equals(ACL_INFO_FAMILY) && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
+                        String valueString = "{\"id\":\"" + projUUID + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
                         value = Bytes.toBytes(valueString);
                     }
                     Put put = new Put(Bytes.toBytes(newCubeId));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
index 56ac814..a317110 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
@@ -83,8 +83,7 @@ public class GridTableHBaseBenchmark {
         Hits hits = new Hits(N_ROWS, hitRatio, indexRatio);
 
         for (int i = 0; i < ROUND; i++) {
-            System.out.println("==================================== ROUND " + (i + 1)
-                    + " ========================================");
+            System.out.println("==================================== ROUND " + (i + 1) + " ========================================");
             testRowScanWithIndex(conn, hits.getHitsForRowScanWithIndex());
             testRowScanNoIndexFullScan(conn, hits.getHitsForRowScanNoIndex());
             testRowScanNoIndexSkipScan(conn, hits.getHitsForRowScanNoIndex());
@@ -387,8 +386,7 @@ public class GridTableHBaseBenchmark {
         public void markEnd() {
             endTime = System.currentTimeMillis();
             System.out.println();
-            System.out.println(name + " ends, " + (endTime - startTime) + " ms, " + rowsRead + " rows read, "
-                    + bytesRead + " bytes read");
+            System.out.println(name + " ends, " + (endTime - startTime) + " ms, " + rowsRead + " rows read, " + bytesRead + " bytes read");
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
index 34c5fa5..940d64a 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
@@ -44,12 +44,10 @@ import com.google.common.collect.Lists;
 public class HBaseClean extends AbstractApplication {
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(true)
-            .withDescription("actually delete or not").create("delete");
+    private static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(true).withDescription("actually delete or not").create("delete");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_TAG = OptionBuilder.withArgName("tag").hasArg().isRequired(true)
-            .withDescription("the tag of HTable").create("tag");
+    private static final Option OPTION_TAG = OptionBuilder.withArgName("tag").hasArg().isRequired(true).withDescription("the tag of HTable").create("tag");
 
     protected static final Logger logger = LoggerFactory.getLogger(HBaseClean.class);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
index 693be33..8dd2164 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
@@ -197,8 +197,7 @@ public class HbaseStreamingInput {
                         logger.error("value size invalid!!!!!");
                     }
 
-                    hash += Arrays.hashCode(Arrays.copyOfRange(value, cell.getValueOffset(),
-                            cell.getValueLength() + cell.getValueOffset()));
+                    hash += Arrays.hashCode(Arrays.copyOfRange(value, cell.getValueOffset(), cell.getValueLength() + cell.getValueOffset()));
                     rowCount++;
                 }
                 scanner.close();

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
index 11aaf9c..ea05ab2 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
@@ -40,12 +40,9 @@ import org.slf4j.LoggerFactory;
 @SuppressWarnings("static-access")
 public class HtableAlterMetadataCLI extends AbstractApplication {
 
-    private static final Option OPTION_METADATA_KEY = OptionBuilder.withArgName("key").hasArg().isRequired(true)
-            .withDescription("The metadata key").create("key");
-    private static final Option OPTION_METADATA_VALUE = OptionBuilder.withArgName("value").hasArg().isRequired(true)
-            .withDescription("The metadata value").create("value");
-    protected static final Option OPTION_HTABLE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_HTABLE_NAME)
-            .hasArg().isRequired(true).withDescription("HTable name").create(BatchConstants.ARG_HTABLE_NAME);
+    private static final Option OPTION_METADATA_KEY = OptionBuilder.withArgName("key").hasArg().isRequired(true).withDescription("The metadata key").create("key");
+    private static final Option OPTION_METADATA_VALUE = OptionBuilder.withArgName("value").hasArg().isRequired(true).withDescription("The metadata value").create("value");
+    protected static final Option OPTION_HTABLE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_HTABLE_NAME).hasArg().isRequired(true).withDescription("HTable name").create(BatchConstants.ARG_HTABLE_NAME);
 
     protected static final Logger logger = LoggerFactory.getLogger(HtableAlterMetadataCLI.class);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
index 0a524f8..df4e912 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
@@ -46,11 +46,9 @@ import org.slf4j.LoggerFactory;
 public class OrphanHBaseCleanJob extends AbstractApplication {
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false)
-            .withDescription("Delete the unused storage").create("delete");
+    private static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false).withDescription("Delete the unused storage").create("delete");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_WHITELIST = OptionBuilder.withArgName("whitelist").hasArg().isRequired(true)
-            .withDescription("metadata store whitelist, separated with comma").create("whitelist");
+    private static final Option OPTION_WHITELIST = OptionBuilder.withArgName("whitelist").hasArg().isRequired(true).withDescription("metadata store whitelist, separated with comma").create("whitelist");
 
     protected static final Logger logger = LoggerFactory.getLogger(OrphanHBaseCleanJob.class);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
index 1377dd9..bba6745 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
@@ -49,13 +49,11 @@ public class PingHBaseCLI {
         Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
         if (User.isHBaseSecurityEnabled(hconf)) {
             try {
-                System.out.println("--------------Getting kerberos credential for user "
-                        + UserGroupInformation.getCurrentUser().getUserName());
+                System.out.println("--------------Getting kerberos credential for user " + UserGroupInformation.getCurrentUser().getUserName());
                 TokenUtil.obtainAndCacheToken(hconf, UserGroupInformation.getCurrentUser());
             } catch (InterruptedException e) {
                 Thread.currentThread().interrupt();
-                System.out.println("--------------Error while getting kerberos credential for user "
-                        + UserGroupInformation.getCurrentUser().getUserName());
+                System.out.println("--------------Error while getting kerberos credential for user " + UserGroupInformation.getCurrentUser().getUserName());
             }
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PrintHBaseConfig.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PrintHBaseConfig.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PrintHBaseConfig.java
index bd965dc..f9b7daf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PrintHBaseConfig.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PrintHBaseConfig.java
@@ -31,7 +31,7 @@ public class PrintHBaseConfig {
 
     public static void main(String[] args) throws IOException {
         MyConfig config = new MyConfig(HBaseConfiguration.create());
-
+        
         if (args.length == 0) {
             for (Map.Entry<Object, Object> item : config.getProps().entrySet()) {
                 System.out.println(item.getKey() + "=" + item.getValue());
@@ -43,18 +43,18 @@ public class PrintHBaseConfig {
             System.out.println(config.get(args[0]));
             System.exit(0);
         }
-
+        
         for (String arg : args) {
             System.out.println(arg + "=" + config.get(arg));
         }
         System.exit(0);
     }
-
+    
     private static class MyConfig extends Configuration {
         MyConfig(Configuration other) {
             super(other);
         }
-
+        
         protected synchronized Properties getProps() {
             return super.getProps();
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/Results.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/Results.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/Results.java
index 59bebc9..f619007 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/Results.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/Results.java
@@ -37,10 +37,8 @@ public class Results {
             return null;
         } else {
             for (Cell c : cells) {
-                if (Bytes.compareTo(cf, 0, cf.length, c.getFamilyArray(), c.getFamilyOffset(), c.getFamilyLength()) == 0
-                        && //
-                        Bytes.compareTo(cq, 0, cq.length, c.getQualifierArray(), c.getQualifierOffset(),
-                                c.getQualifierLength()) == 0) {
+                if (Bytes.compareTo(cf, 0, cf.length, c.getFamilyArray(), c.getFamilyOffset(), c.getFamilyLength()) == 0 && //
+                        Bytes.compareTo(cq, 0, cq.length, c.getQualifierArray(), c.getQualifierOffset(), c.getQualifierLength()) == 0) {
                     return ByteBuffer.wrap(c.getValueArray(), c.getValueOffset(), c.getValueLength());
                 }
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
index 6d58c3c..db516bb 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
@@ -42,8 +42,7 @@ public class RowCounterCLI {
     public static void main(String[] args) throws IOException {
 
         if (args == null || args.length != 3) {
-            System.out.println(
-                    "Usage: hbase org.apache.hadoop.util.RunJar kylin-job-latest.jar org.apache.kylin.job.tools.RowCounterCLI [HTABLE_NAME] [STARTKEY] [ENDKEY]");
+            System.out.println("Usage: hbase org.apache.hadoop.util.RunJar kylin-job-latest.jar org.apache.kylin.job.tools.RowCounterCLI [HTABLE_NAME] [STARTKEY] [ENDKEY]");
         }
 
         System.out.println(args[0]);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index 7eef298..f6b65ab 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -67,18 +67,15 @@ import org.slf4j.LoggerFactory;
 public class StorageCleanupJob extends AbstractApplication {
 
     @SuppressWarnings("static-access")
-    protected static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false)
-            .withDescription("Delete the unused storage").create("delete");
-    protected static final Option OPTION_FORCE = OptionBuilder.withArgName("force").hasArg().isRequired(false)
-            .withDescription("Warning: will delete all kylin intermediate hive tables").create("force");
+    protected static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false).withDescription("Delete the unused storage").create("delete");
+    protected static final Option OPTION_FORCE = OptionBuilder.withArgName("force").hasArg().isRequired(false).withDescription("Warning: will delete all kylin intermediate hive tables").create("force");
 
     protected static final Logger logger = LoggerFactory.getLogger(StorageCleanupJob.class);
     public static final int deleteTimeout = 10; // Unit minute
 
     protected boolean delete = false;
     protected boolean force = false;
-    protected static ExecutableManager executableManager = ExecutableManager
-            .getInstance(KylinConfig.getInstanceFromEnv());
+    protected static ExecutableManager executableManager = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv());
 
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
         CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
@@ -102,8 +99,7 @@ public class StorageCleanupJob extends AbstractApplication {
                 String tablename = seg.getStorageLocationIdentifier();
                 if (allTablesNeedToBeDropped.contains(tablename)) {
                     allTablesNeedToBeDropped.remove(tablename);
-                    logger.info("Exclude table " + tablename + " from drop list, as the table belongs to cube "
-                            + cube.getName() + " with status " + cube.getStatus());
+                    logger.info("Exclude table " + tablename + " from drop list, as the table belongs to cube " + cube.getName() + " with status " + cube.getStatus());
                 }
             }
         }
@@ -117,8 +113,7 @@ public class StorageCleanupJob extends AbstractApplication {
                 try {
                     futureTask.get(deleteTimeout, TimeUnit.MINUTES);
                 } catch (TimeoutException e) {
-                    logger.warn("It fails to delete htable " + htableName + ", for it cost more than " + deleteTimeout
-                            + " minutes!");
+                    logger.warn("It fails to delete htable " + htableName + ", for it cost more than " + deleteTimeout + " minutes!");
                     futureTask.cancel(true);
                 } catch (Exception e) {
                     e.printStackTrace();
@@ -213,8 +208,7 @@ public class StorageCleanupJob extends AbstractApplication {
             if (!state.isFinalState()) {
                 String path = JobBuilderSupport.getJobWorkingDir(engineConfig.getHdfsWorkingDirectory(), jobId);
                 allHdfsPathsNeedToBeDeleted.remove(path);
-                logger.info("Skip " + path + " from deletion list, as the path belongs to job " + jobId
-                        + " with status " + state);
+                logger.info("Skip " + path + " from deletion list, as the path belongs to job " + jobId + " with status " + state);
             }
         }
 
@@ -225,8 +219,7 @@ public class StorageCleanupJob extends AbstractApplication {
                 if (jobUuid != null && jobUuid.equals("") == false) {
                     String path = JobBuilderSupport.getJobWorkingDir(engineConfig.getHdfsWorkingDirectory(), jobUuid);
                     allHdfsPathsNeedToBeDeleted.remove(path);
-                    logger.info("Skip " + path + " from deletion list, as the path belongs to segment " + seg
-                            + " of cube " + cube.getName());
+                    logger.info("Skip " + path + " from deletion list, as the path belongs to segment " + seg + " of cube " + cube.getName());
                 }
             }
         }
@@ -363,8 +356,7 @@ public class StorageCleanupJob extends AbstractApplication {
     }
 
     public static void main(String[] args) throws Exception {
-        logger.warn(
-                "org.apache.kylin.storage.hbase.util.StorageCleanupJob is deprecated, use org.apache.kylin.tool.StorageCleanupJob instead");
+        logger.warn("org.apache.kylin.storage.hbase.util.StorageCleanupJob is deprecated, use org.apache.kylin.tool.StorageCleanupJob instead");
 
         StorageCleanupJob cli = new StorageCleanupJob();
         cli.execute(args);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/TarGZUtil.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/TarGZUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/TarGZUtil.java
index f749247..f0c4c5b 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/TarGZUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/TarGZUtil.java
@@ -35,8 +35,7 @@ public class TarGZUtil {
         dest.mkdir();
         TarArchiveInputStream tarIn = null;
 
-        tarIn = new TarArchiveInputStream(
-                new GzipCompressorInputStream(new BufferedInputStream(new FileInputStream(tarFile))));
+        tarIn = new TarArchiveInputStream(new GzipCompressorInputStream(new BufferedInputStream(new FileInputStream(tarFile))));
 
         TarArchiveEntry tarEntry = tarIn.getNextTarEntry();
         // tarIn is a TarArchiveInputStream

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperDistributedLock.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperDistributedLock.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperDistributedLock.java
index 6edb970..63ffda0 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperDistributedLock.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperDistributedLock.java
@@ -182,8 +182,7 @@ public class ZookeeperDistributedLock implements DistributedLock, JobLock {
             }
 
             if (lock(lockPath)) {
-                logger.debug(client + " waited " + (System.currentTimeMillis() - waitStart) + " ms for lock path "
-                        + lockPath);
+                logger.debug(client + " waited " + (System.currentTimeMillis() - waitStart) + " ms for lock path " + lockPath);
                 return true;
             }
         }
@@ -224,11 +223,9 @@ public class ZookeeperDistributedLock implements DistributedLock, JobLock {
 
         String owner = peekLock(lockPath);
         if (owner == null)
-            throw new IllegalStateException(
-                    client + " cannot unlock path " + lockPath + " which is not locked currently");
+            throw new IllegalStateException(client + " cannot unlock path " + lockPath + " which is not locked currently");
         if (client.equals(owner) == false)
-            throw new IllegalStateException(
-                    client + " cannot unlock path " + lockPath + " which is locked by " + owner);
+            throw new IllegalStateException(client + " cannot unlock path " + lockPath + " which is locked by " + owner);
 
         try {
             curator.delete().guaranteed().deletingChildrenIfNeeded().forPath(lockPath);
@@ -266,12 +263,10 @@ public class ZookeeperDistributedLock implements DistributedLock, JobLock {
                 public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
                     switch (event.getType()) {
                     case CHILD_ADDED:
-                        watcher.onLock(event.getData().getPath(),
-                                new String(event.getData().getData(), Charset.forName("UTF-8")));
+                        watcher.onLock(event.getData().getPath(), new String(event.getData().getData(), Charset.forName("UTF-8")));
                         break;
                     case CHILD_REMOVED:
-                        watcher.onUnlock(event.getData().getPath(),
-                                new String(event.getData().getData(), Charset.forName("UTF-8")));
+                        watcher.onUnlock(event.getData().getPath(), new String(event.getData().getData(), Charset.forName("UTF-8")));
                         break;
                     default:
                         break;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
index 3c2d497..991a750 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
@@ -29,9 +29,8 @@ import org.apache.kylin.job.lock.JobLock;
  */
 public class ZookeeperJobLock implements DistributedLock, JobLock {
 
-    private ZookeeperDistributedLock lock = (ZookeeperDistributedLock) new ZookeeperDistributedLock.Factory()
-            .lockForCurrentProcess();
-
+    private ZookeeperDistributedLock lock = (ZookeeperDistributedLock) new ZookeeperDistributedLock.Factory().lockForCurrentProcess();
+    
     @Override
     public String getClient() {
         return lock.getClient();
@@ -61,7 +60,7 @@ public class ZookeeperJobLock implements DistributedLock, JobLock {
     public boolean isLockedByMe(String lockPath) {
         return lock.isLockedByMe(lockPath);
     }
-
+    
     @Override
     public void unlock(String lockPath) {
         lock.unlock(lockPath);
@@ -71,7 +70,6 @@ public class ZookeeperJobLock implements DistributedLock, JobLock {
     public void purgeLocks(String lockPathRoot) {
         lock.purgeLocks(lockPathRoot);
     }
-
     @Override
     public Closeable watchLocks(String lockPathRoot, Executor executor, Watcher watcher) {
         return lock.watchLocks(lockPathRoot, executor, watcher);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperUtil.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperUtil.java
index b884b44..b5ebe89 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperUtil.java
@@ -41,13 +41,12 @@ public class ZookeeperUtil {
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
         final String serverList = conf.get(HConstants.ZOOKEEPER_QUORUM);
         final String port = conf.get(HConstants.ZOOKEEPER_CLIENT_PORT);
-        return StringUtils
-                .join(Iterables.transform(Arrays.asList(serverList.split(",")), new Function<String, String>() {
-                    @Nullable
-                    @Override
-                    public String apply(String input) {
-                        return input + ":" + port;
-                    }
-                }), ",");
+        return StringUtils.join(Iterables.transform(Arrays.asList(serverList.split(",")), new Function<String, String>() {
+            @Nullable
+            @Override
+            public String apply(String input) {
+                return input + ":" + port;
+            }
+        }), ",");
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
index aa235ae..fe1ad4e 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
@@ -53,7 +53,7 @@ public class HDFSResourceStore extends ResourceStore {
     public HDFSResourceStore(KylinConfig kylinConfig) throws Exception {
         super(kylinConfig);
         StorageURL metadataUrl = kylinConfig.getMetadataUrl();
-
+        
         if (!metadataUrl.getScheme().equals("hdfs"))
             throw new IOException("kylin.metadata.url not recognized for HDFSResourceStore:" + metadataUrl);
 
@@ -101,8 +101,7 @@ public class HDFSResourceStore extends ResourceStore {
     }
 
     @Override
-    protected List<RawResource> getAllResourcesImpl(String folderPath, long timeStart, long timeEndExclusive)
-            throws IOException {
+    protected List<RawResource> getAllResourcesImpl(String folderPath, long timeStart, long timeEndExclusive) throws IOException {
         NavigableSet<String> resources = listResources(folderPath);
         if (resources == null)
             return Collections.emptyList();
@@ -178,20 +177,17 @@ public class HDFSResourceStore extends ResourceStore {
     }
 
     @Override
-    protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS)
-            throws IOException, IllegalStateException {
+    protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS) throws IOException, IllegalStateException {
         Path p = getRealHDFSPath(resPath);
         if (!fs.exists(p)) {
             if (oldTS != 0) {
-                throw new IllegalStateException(
-                        "For not exist file. OldTS have to be 0. but Actual oldTS is : " + oldTS);
+                throw new IllegalStateException("For not exist file. OldTS have to be 0. but Actual oldTS is : " + oldTS);
             }
 
         } else {
             long realLastModify = getResourceTimestamp(resPath);
             if (realLastModify != oldTS) {
-                throw new IllegalStateException("Overwriting conflict " + resPath + ", expect old TS " + oldTS
-                        + ", but found " + realLastModify);
+                throw new IllegalStateException("Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but found " + realLastModify);
             }
         }
         putResourceImpl(resPath, new ByteArrayInputStream(content), newTS);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/LockManager.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/LockManager.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/LockManager.java
index 7f8df78..96ec653 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/LockManager.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/LockManager.java
@@ -83,6 +83,7 @@ public class LockManager {
 
     }
 
+
     public String getLockPath(String resourceName) {
         if (!resourceName.startsWith("/"))
             resourceName = "/" + resourceName;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/ResourceLock.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/ResourceLock.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/ResourceLock.java
index 0b553d8..ee5a415 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/ResourceLock.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/ResourceLock.java
@@ -17,9 +17,10 @@
 */
 package org.apache.kylin.storage.hdfs;
 
+import org.apache.curator.framework.recipes.locks.InterProcessMutex;
+
 import java.util.concurrent.TimeUnit;
 
-import org.apache.curator.framework.recipes.locks.InterProcessMutex;
 
 public class ResourceLock {
 
@@ -34,13 +35,13 @@ public class ResourceLock {
 
     public void acquire(long time, TimeUnit unit) throws Exception {
         boolean success = lock.acquire(time, unit);
-        if (!success) {
+        if(!success){
             throw new IllegalStateException("Fail to get Zookeeper lock");
         }
     }
 
-    public void acquire() throws Exception {
-        lock.acquire();
+    public void acquire() throws Exception{
+       lock.acquire();
     }
 
     protected void release() throws Exception {


[61/67] [abbrv] kylin git commit: minor refactor of scripts

Posted by li...@apache.org.
minor refactor of scripts


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0a95de00
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0a95de00
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0a95de00

Branch: refs/heads/master
Commit: 0a95de0049e5bc2c0707618266141116e6f9a139
Parents: 58a6307
Author: Yang Li <li...@apache.org>
Authored: Sat Jun 3 15:59:08 2017 +0800
Committer: liyang-gmt8 <li...@apache.org>
Committed: Sat Jun 3 21:35:01 2017 +0800

----------------------------------------------------------------------
 build/bin/check-acl-migration.sh                |  42 +++++++
 build/bin/check-env.sh                          |   0
 build/bin/check_acl_migration.sh                |  42 -------
 build/bin/diag.sh                               |   0
 build/bin/find-hadoop-conf-dir.sh               |   4 +-
 build/bin/find-hbase-dependency.sh              |   2 +-
 build/bin/find-hive-dependency.sh               |   2 +-
 build/bin/find-kafka-dependency.sh              |   4 +-
 build/bin/find-spark-dependency.sh              |   0
 build/bin/header.sh                             |   0
 build/bin/health-check.sh                       |   0
 build/bin/kylin-port-replace-util.sh            | 120 +++++++++++++++++++
 build/bin/kylin.sh                              |   2 +-
 build/bin/kylin_port_replace_util.sh            | 120 -------------------
 build/bin/load-hive-conf.sh                     |   0
 build/bin/sample.sh                             |   0
 build/bin/setenv.sh                             |   2 +-
 build/script/prepare-libs.sh                    |  42 +++++++
 build/script/prepare.sh                         |   2 +-
 build/script/prepare_libs.sh                    |  42 -------
 .../engine/mr/common/AbstractHadoopJob.java     |   8 --
 .../apache/kylin/tool/AclTableMigrationCLI.java |   4 +-
 22 files changed, 215 insertions(+), 223 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/check-acl-migration.sh
----------------------------------------------------------------------
diff --git a/build/bin/check-acl-migration.sh b/build/bin/check-acl-migration.sh
new file mode 100755
index 0000000..1259f09
--- /dev/null
+++ b/build/bin/check-acl-migration.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+set -o pipefail  # trace ERR through pipes
+set -o errtrace  # trace ERR through 'time command' and other functions
+function error() {
+   SCRIPT="$0"           # script name
+   LASTLINE="$1"         # line of error occurrence
+   LASTERR="$2"          # error code
+   echo "ERROR exit from ${SCRIPT} : line ${LASTLINE} with exit code ${LASTERR}"
+   exit 1
+}
+trap 'error ${LINENO} ${?}' ERR
+
+
+#check kylin home
+if [ -z "$KYLIN_HOME" ]
+then
+    echo 'Please make sure KYLIN_HOME has been set'
+    exit 1
+else
+    echo "KYLIN_HOME is set to ${KYLIN_HOME}"
+fi
+
+echo "Start to check whether we need to migrate acl tables"
+${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.AclTableMigrationCLI CHECK
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/check-env.sh
----------------------------------------------------------------------
diff --git a/build/bin/check-env.sh b/build/bin/check-env.sh
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/check_acl_migration.sh
----------------------------------------------------------------------
diff --git a/build/bin/check_acl_migration.sh b/build/bin/check_acl_migration.sh
deleted file mode 100644
index 1259f09..0000000
--- a/build/bin/check_acl_migration.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-set -o pipefail  # trace ERR through pipes
-set -o errtrace  # trace ERR through 'time command' and other functions
-function error() {
-   SCRIPT="$0"           # script name
-   LASTLINE="$1"         # line of error occurrence
-   LASTERR="$2"          # error code
-   echo "ERROR exit from ${SCRIPT} : line ${LASTLINE} with exit code ${LASTERR}"
-   exit 1
-}
-trap 'error ${LINENO} ${?}' ERR
-
-
-#check kylin home
-if [ -z "$KYLIN_HOME" ]
-then
-    echo 'Please make sure KYLIN_HOME has been set'
-    exit 1
-else
-    echo "KYLIN_HOME is set to ${KYLIN_HOME}"
-fi
-
-echo "Start to check whether we need to migrate acl tables"
-${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.AclTableMigrationCLI CHECK
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/diag.sh
----------------------------------------------------------------------
diff --git a/build/bin/diag.sh b/build/bin/diag.sh
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/find-hadoop-conf-dir.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-hadoop-conf-dir.sh b/build/bin/find-hadoop-conf-dir.sh
old mode 100644
new mode 100755
index 5334b8a..403bc97
--- a/build/bin/find-hadoop-conf-dir.sh
+++ b/build/bin/find-hadoop-conf-dir.sh
@@ -24,7 +24,7 @@ echo Retrieving hadoop conf dir...
 override_hadoop_conf_dir=`bash ${KYLIN_HOME}/bin/get-properties.sh kylin.env.hadoop-conf-dir`
 
 if [ -n "$override_hadoop_conf_dir" ]; then
-    echo "$override_hadoop_conf_dir is override as the kylin_hadoop_conf_dir"
+    verbose "kylin_hadoop_conf_dir is override as $override_hadoop_conf_dir"
     export kylin_hadoop_conf_dir=${override_hadoop_conf_dir}
     return
 fi
@@ -69,7 +69,7 @@ do
             continue
         fi
         
-        verbose "$result is chosen as the kylin_hadoop_conf_dir"
+        verbose "kylin_hadoop_conf_dir is $result"
         export kylin_hadoop_conf_dir=$result
         return
     fi

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/find-hbase-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-hbase-dependency.sh b/build/bin/find-hbase-dependency.sh
old mode 100644
new mode 100755
index 14dde3b..c98b5ac
--- a/build/bin/find-hbase-dependency.sh
+++ b/build/bin/find-hbase-dependency.sh
@@ -25,7 +25,7 @@ hbase_classpath=`hbase classpath`
 
 # special handling for Amazon EMR, to prevent re-init of hbase-setenv
 is_aws=`uname -r | grep amzn`
-if [ -n is_aws ] && [ -d "/usr/lib/oozie/lib" ]; then
+if [ -n "$is_aws" ] && [ -d "/usr/lib/oozie/lib" ]; then
     export HBASE_ENV_INIT="true"
 fi
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/find-hive-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-hive-dependency.sh b/build/bin/find-hive-dependency.sh
old mode 100644
new mode 100755
index 8841687..0830015
--- a/build/bin/find-hive-dependency.sh
+++ b/build/bin/find-hive-dependency.sh
@@ -155,6 +155,6 @@ checkFileExist ${hive_lib}
 checkFileExist ${hcatalog}
 
 hive_dependency=${hive_conf_path}:${hive_lib}:${hcatalog}
-verbose "hive dependency: $hive_dependency"
+verbose "hive dependency is $hive_dependency"
 export hive_dependency
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/find-kafka-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-kafka-dependency.sh b/build/bin/find-kafka-dependency.sh
old mode 100644
new mode 100755
index 999face..500d0cd
--- a/build/bin/find-kafka-dependency.sh
+++ b/build/bin/find-kafka-dependency.sh
@@ -45,10 +45,10 @@ then
     then
         quit "kafka client lib not found"
     else
-        verbose "kafka dependency: $kafka_dependency"
+        verbose "kafka dependency is $kafka_dependency"
         export kafka_dependency
     fi
 else
-    verbose "kafka dependency: $kafka_dependency"
+    verbose "kafka dependency is $kafka_dependency"
     export kafka_dependency
 fi

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/find-spark-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-spark-dependency.sh b/build/bin/find-spark-dependency.sh
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/header.sh
----------------------------------------------------------------------
diff --git a/build/bin/header.sh b/build/bin/header.sh
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/health-check.sh
----------------------------------------------------------------------
diff --git a/build/bin/health-check.sh b/build/bin/health-check.sh
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/kylin-port-replace-util.sh
----------------------------------------------------------------------
diff --git a/build/bin/kylin-port-replace-util.sh b/build/bin/kylin-port-replace-util.sh
new file mode 100755
index 0000000..47b0d74
--- /dev/null
+++ b/build/bin/kylin-port-replace-util.sh
@@ -0,0 +1,120 @@
+#!/bin/bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#exit if find error
+# ============================================================================
+
+set -o pipefail  # trace ERR through pipes
+set -o errtrace  # trace ERR through 'time command' and other functions
+function error() {
+   SCRIPT="$0"           # script name
+   LASTLINE="$1"         # line of error occurrence
+   LASTERR="$2"          # error code
+   echo "ERROR exit from ${SCRIPT} : line ${LASTLINE} with exit code ${LASTERR}"
+   exit 1
+}
+trap 'error ${LINENO} ${?}' ERR
+
+
+#check input parameters
+if [ $# -eq 0 ]; then
+  echo "Usage : port_offset_util.sh set PORT_OFFSET --> Modify all conflict ports base on a offset"
+  echo "Usage : port_offset_util.sh reset --> Recover to original setting"
+  exit 0
+fi
+
+#check kylin home
+if [ -z "$KYLIN_HOME" ]
+then
+    echo 'Please make sure KYLIN_HOME has been set'
+    exit 1
+else
+    echo "KYLIN_HOME is set to ${KYLIN_HOME}"
+fi
+
+#variables
+TOMCAT_INIT_FILE="${KYLIN_HOME}/tomcat/conf/server.xml.init"
+TOMCAT_BACKUP_FILE="${KYLIN_HOME}/tomcat/conf/server.xml.backup"
+TOMCAT_CONFIG_FILE="${KYLIN_HOME}/tomcat/conf/server.xml"
+KYLIN_CONFIG_FILE="${KYLIN_HOME}/conf/kylin.properties"
+KYLIN_BACKUP_FILE="${KYLIN_HOME}/conf/kylin.properties.backup"
+TOMCAT_PORT_LIST=(9005 7070 9443 7443 9009)
+KYLIN_DEFAULT_PORT=7070
+
+if [ "$1" == "set" ] 
+then
+    OFFSET=$2
+    echo "Port offset is : ${OFFSET}"
+
+    #check config file exist
+    if [ ! -f ${KYLIN_CONFIG_FILE} ] || [ ! -f ${TOMCAT_CONFIG_FILE} ]; then
+        echo "Some of the config file not exist"
+        exit 1
+    fi
+
+
+    #backup tomccat file
+    if [ ! -f ${TOMCAT_BACKUP_FILE} ]; then
+        cp -f ${TOMCAT_CONFIG_FILE} ${TOMCAT_BACKUP_FILE}
+    fi
+
+    #force reset
+    cp -f ${TOMCAT_INIT_FILE} ${TOMCAT_CONFIG_FILE} #reset if exist
+
+    #back or reset
+    if [ ! -f ${KYLIN_BACKUP_FILE} ]; then  #backup if not exist
+        cp -f ${KYLIN_CONFIG_FILE} ${KYLIN_BACKUP_FILE}
+    else
+        cp -r ${KYLIN_BACKUP_FILE} ${KYLIN_CONFIG_FILE} #reset if exist
+    fi
+
+
+    #replace ports in kylin.properties
+    new_kylin_port=`expr ${KYLIN_DEFAULT_PORT} + ${OFFSET}`
+
+    sed -i "s/kylin.server.cluster-servers=\(.*\).*:\(.*\)/kylin.server.cluster-servers=\1:${new_kylin_port}/g" ${KYLIN_CONFIG_FILE}
+
+    echo "New kylin port is : ${new_kylin_port}"
+
+    #replace ports in server.xml
+
+    for port in ${TOMCAT_PORT_LIST[@]}
+    do
+      new_port=`expr ${port} + ${OFFSET} `
+      #echo "Replace old port : ${port} to new port : ${new_port}"
+      sed -i "s/$port/${new_port}/g" ${TOMCAT_CONFIG_FILE}
+
+    done
+    echo "Files below modified:"
+    echo ${KYLIN_CONFIG_FILE}
+    echo ${TOMCAT_CONFIG_FILE}
+elif [ "$1" == "reset" ]
+then
+    #reset kylin.properties
+    cp  -f ${KYLIN_BACKUP_FILE} ${KYLIN_CONFIG_FILE}
+    cp  -f ${TOMCAT_BACKUP_FILE} ${TOMCAT_CONFIG_FILE}
+    rm  -f ${KYLIN_BACKUP_FILE}
+    rm  -f ${TOMCAT_BACKUP_FILE}
+    echo "Files below reset to original:"
+    echo ${KYLIN_CONFIG_FILE}
+    echo ${TOMCAT_CONFIG_FILE}
+else
+    echo "Unrecognized command"
+    exit 1
+fi

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/kylin.sh
----------------------------------------------------------------------
diff --git a/build/bin/kylin.sh b/build/bin/kylin.sh
old mode 100644
new mode 100755
index d9e932e..3efa43e
--- a/build/bin/kylin.sh
+++ b/build/bin/kylin.sh
@@ -95,7 +95,7 @@ then
         quit "Port ${kylin_rest_address} is not available, could not start Kylin."
     fi
 
-    ${KYLIN_HOME}/bin/check_acl_migration.sh || { exit 1; }
+    ${KYLIN_HOME}/bin/check-acl-migration.sh || { exit 1; }
     #debug if encounter NoClassDefError
     verbose "kylin classpath is: $(hbase classpath)"
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/kylin_port_replace_util.sh
----------------------------------------------------------------------
diff --git a/build/bin/kylin_port_replace_util.sh b/build/bin/kylin_port_replace_util.sh
deleted file mode 100755
index 47b0d74..0000000
--- a/build/bin/kylin_port_replace_util.sh
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/bin/bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#exit if find error
-# ============================================================================
-
-set -o pipefail  # trace ERR through pipes
-set -o errtrace  # trace ERR through 'time command' and other functions
-function error() {
-   SCRIPT="$0"           # script name
-   LASTLINE="$1"         # line of error occurrence
-   LASTERR="$2"          # error code
-   echo "ERROR exit from ${SCRIPT} : line ${LASTLINE} with exit code ${LASTERR}"
-   exit 1
-}
-trap 'error ${LINENO} ${?}' ERR
-
-
-#check input parameters
-if [ $# -eq 0 ]; then
-  echo "Usage : port_offset_util.sh set PORT_OFFSET --> Modify all conflict ports base on a offset"
-  echo "Usage : port_offset_util.sh reset --> Recover to original setting"
-  exit 0
-fi
-
-#check kylin home
-if [ -z "$KYLIN_HOME" ]
-then
-    echo 'Please make sure KYLIN_HOME has been set'
-    exit 1
-else
-    echo "KYLIN_HOME is set to ${KYLIN_HOME}"
-fi
-
-#variables
-TOMCAT_INIT_FILE="${KYLIN_HOME}/tomcat/conf/server.xml.init"
-TOMCAT_BACKUP_FILE="${KYLIN_HOME}/tomcat/conf/server.xml.backup"
-TOMCAT_CONFIG_FILE="${KYLIN_HOME}/tomcat/conf/server.xml"
-KYLIN_CONFIG_FILE="${KYLIN_HOME}/conf/kylin.properties"
-KYLIN_BACKUP_FILE="${KYLIN_HOME}/conf/kylin.properties.backup"
-TOMCAT_PORT_LIST=(9005 7070 9443 7443 9009)
-KYLIN_DEFAULT_PORT=7070
-
-if [ "$1" == "set" ] 
-then
-    OFFSET=$2
-    echo "Port offset is : ${OFFSET}"
-
-    #check config file exist
-    if [ ! -f ${KYLIN_CONFIG_FILE} ] || [ ! -f ${TOMCAT_CONFIG_FILE} ]; then
-        echo "Some of the config file not exist"
-        exit 1
-    fi
-
-
-    #backup tomccat file
-    if [ ! -f ${TOMCAT_BACKUP_FILE} ]; then
-        cp -f ${TOMCAT_CONFIG_FILE} ${TOMCAT_BACKUP_FILE}
-    fi
-
-    #force reset
-    cp -f ${TOMCAT_INIT_FILE} ${TOMCAT_CONFIG_FILE} #reset if exist
-
-    #back or reset
-    if [ ! -f ${KYLIN_BACKUP_FILE} ]; then  #backup if not exist
-        cp -f ${KYLIN_CONFIG_FILE} ${KYLIN_BACKUP_FILE}
-    else
-        cp -r ${KYLIN_BACKUP_FILE} ${KYLIN_CONFIG_FILE} #reset if exist
-    fi
-
-
-    #replace ports in kylin.properties
-    new_kylin_port=`expr ${KYLIN_DEFAULT_PORT} + ${OFFSET}`
-
-    sed -i "s/kylin.server.cluster-servers=\(.*\).*:\(.*\)/kylin.server.cluster-servers=\1:${new_kylin_port}/g" ${KYLIN_CONFIG_FILE}
-
-    echo "New kylin port is : ${new_kylin_port}"
-
-    #replace ports in server.xml
-
-    for port in ${TOMCAT_PORT_LIST[@]}
-    do
-      new_port=`expr ${port} + ${OFFSET} `
-      #echo "Replace old port : ${port} to new port : ${new_port}"
-      sed -i "s/$port/${new_port}/g" ${TOMCAT_CONFIG_FILE}
-
-    done
-    echo "Files below modified:"
-    echo ${KYLIN_CONFIG_FILE}
-    echo ${TOMCAT_CONFIG_FILE}
-elif [ "$1" == "reset" ]
-then
-    #reset kylin.properties
-    cp  -f ${KYLIN_BACKUP_FILE} ${KYLIN_CONFIG_FILE}
-    cp  -f ${TOMCAT_BACKUP_FILE} ${TOMCAT_CONFIG_FILE}
-    rm  -f ${KYLIN_BACKUP_FILE}
-    rm  -f ${TOMCAT_BACKUP_FILE}
-    echo "Files below reset to original:"
-    echo ${KYLIN_CONFIG_FILE}
-    echo ${TOMCAT_CONFIG_FILE}
-else
-    echo "Unrecognized command"
-    exit 1
-fi

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/load-hive-conf.sh
----------------------------------------------------------------------
diff --git a/build/bin/load-hive-conf.sh b/build/bin/load-hive-conf.sh
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/sample.sh
----------------------------------------------------------------------
diff --git a/build/bin/sample.sh b/build/bin/sample.sh
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/bin/setenv.sh
----------------------------------------------------------------------
diff --git a/build/bin/setenv.sh b/build/bin/setenv.sh
index d838362..0e9b185 100755
--- a/build/bin/setenv.sh
+++ b/build/bin/setenv.sh
@@ -34,7 +34,7 @@ export KYLIN_EXTRA_START_OPTS=""
 
 if [ ! -z "${KYLIN_JVM_SETTINGS}" ]
 then
-    echo "KYLIN_JVM_SETTINGS is ${KYLIN_JVM_SETTINGS}"
+    verbose "KYLIN_JVM_SETTINGS is ${KYLIN_JVM_SETTINGS}"
     KYLIN_EXTRA_START_OPTS="${KYLIN_JVM_SETTINGS} ${KYLIN_EXTRA_START_OPTS}"
 else
     verbose "KYLIN_JVM_SETTINGS is not set, using default jvm settings: ${KYLIN_JVM_SETTINGS}"

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/script/prepare-libs.sh
----------------------------------------------------------------------
diff --git a/build/script/prepare-libs.sh b/build/script/prepare-libs.sh
new file mode 100644
index 0000000..ae5c5e4
--- /dev/null
+++ b/build/script/prepare-libs.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+dir=$(dirname ${0})
+cd ${dir}/../..
+
+if [ -z "$version" ]
+then
+    echo 'version not set'
+    version=`mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version |  grep -E '^[0-9]+\.[0-9]+\.[0-9]+' `
+fi
+echo "version ${version}"
+
+echo "copy lib file"
+rm -rf build/lib build/tool
+mkdir build/lib build/tool
+cp assembly/target/kylin-assembly-${version}-job.jar build/lib/kylin-job-${version}.jar
+cp storage-hbase/target/kylin-storage-hbase-${version}-coprocessor.jar build/lib/kylin-coprocessor-${version}.jar
+cp jdbc/target/kylin-jdbc-${version}.jar build/lib/kylin-jdbc-${version}.jar
+cp tool-assembly/target/kylin-tool-assembly-${version}-assembly.jar build/tool/kylin-tool-${version}.jar
+
+# Copied file becomes 000 for some env (e.g. my Cygwin)
+chmod 644 build/lib/kylin-job-${version}.jar
+chmod 644 build/lib/kylin-coprocessor-${version}.jar
+chmod 644 build/lib/kylin-jdbc-${version}.jar
+chmod 644 build/tool/kylin-tool-${version}.jar

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/script/prepare.sh
----------------------------------------------------------------------
diff --git a/build/script/prepare.sh b/build/script/prepare.sh
index a8dde10..deaf58d 100755
--- a/build/script/prepare.sh
+++ b/build/script/prepare.sh
@@ -28,7 +28,7 @@ fi
 echo "version ${version}"
 export version
 
-sh build/script/prepare_libs.sh || { exit 1; }
+sh build/script/prepare-libs.sh || { exit 1; }
 
 cp server/target/kylin-server-${version}.war build/tomcat/webapps/kylin.war
 chmod 644 build/tomcat/webapps/kylin.war

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/build/script/prepare_libs.sh
----------------------------------------------------------------------
diff --git a/build/script/prepare_libs.sh b/build/script/prepare_libs.sh
deleted file mode 100755
index ae5c5e4..0000000
--- a/build/script/prepare_libs.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-dir=$(dirname ${0})
-cd ${dir}/../..
-
-if [ -z "$version" ]
-then
-    echo 'version not set'
-    version=`mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version |  grep -E '^[0-9]+\.[0-9]+\.[0-9]+' `
-fi
-echo "version ${version}"
-
-echo "copy lib file"
-rm -rf build/lib build/tool
-mkdir build/lib build/tool
-cp assembly/target/kylin-assembly-${version}-job.jar build/lib/kylin-job-${version}.jar
-cp storage-hbase/target/kylin-storage-hbase-${version}-coprocessor.jar build/lib/kylin-coprocessor-${version}.jar
-cp jdbc/target/kylin-jdbc-${version}.jar build/lib/kylin-jdbc-${version}.jar
-cp tool-assembly/target/kylin-tool-assembly-${version}-assembly.jar build/tool/kylin-tool-${version}.jar
-
-# Copied file becomes 000 for some env (e.g. my Cygwin)
-chmod 644 build/lib/kylin-job-${version}.jar
-chmod 644 build/lib/kylin-coprocessor-${version}.jar
-chmod 644 build/lib/kylin-jdbc-${version}.jar
-chmod 644 build/tool/kylin-tool-${version}.jar

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
index abfa224..f9d9808 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
@@ -166,9 +166,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
         }
 
         String kylinHiveDependency = System.getProperty("kylin.hive.dependency");
-        String kylinHBaseDependency = System.getProperty("kylin.hbase.dependency");
         String kylinKafkaDependency = System.getProperty("kylin.kafka.dependency");
-        logger.trace("append kylin.hbase.dependency: " + kylinHBaseDependency + " to " + MAP_REDUCE_CLASSPATH);
 
         Configuration jobConf = job.getConfiguration();
         String classpath = jobConf.get(MAP_REDUCE_CLASSPATH);
@@ -178,12 +176,6 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
             logger.info("The default mapred classpath is: " + classpath);
         }
 
-        if (kylinHBaseDependency != null) {
-            // yarn classpath is comma separated
-            kylinHBaseDependency = kylinHBaseDependency.replace(":", ",");
-            classpath = classpath + "," + kylinHBaseDependency;
-        }
-
         jobConf.set(MAP_REDUCE_CLASSPATH, classpath);
         logger.trace("Hadoop job classpath is: " + job.getConfiguration().get(MAP_REDUCE_CLASSPATH));
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/0a95de00/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
index 6c8a6b0..c42254b 100644
--- a/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
@@ -40,8 +40,8 @@ public class AclTableMigrationCLI {
         case CHECK:
             boolean needMigrate = tool.checkIfNeedMigrate(KylinConfig.getInstanceFromEnv());
             if (needMigrate) {
-                System.out.println("Found acl tables that need to migrate. Please execute command : ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.AclTableMigrationCLI MIGRATE");
-                System.exit(1);
+                System.out.println("Found ACL metadata in legacy format. Please execute command : ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.AclTableMigrationCLI MIGRATE");
+                System.exit(2);
             }
             break;
         default:


[56/67] [abbrv] kylin git commit: KYLIN-2593 fix hard coding in TopNMeasureType

Posted by li...@apache.org.
KYLIN-2593 fix hard coding in TopNMeasureType


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/38308bc4
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/38308bc4
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/38308bc4

Branch: refs/heads/master
Commit: 38308bc4c0383517ace73ad213a9f265ae74756b
Parents: 84408d5
Author: shaofengshi <sh...@apache.org>
Authored: Wed May 31 21:24:38 2017 +0800
Committer: hongbin ma <ma...@kyligence.io>
Committed: Thu Jun 1 13:11:34 2017 +0800

----------------------------------------------------------------------
 .../main/java/org/apache/kylin/measure/topn/TopNMeasureType.java | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/38308bc4/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
index f936cb8..89ed8ae 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
@@ -40,6 +40,7 @@ import org.apache.kylin.metadata.datatype.DataType;
 import org.apache.kylin.metadata.datatype.DataTypeSerializer;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.MeasureDesc;
+import org.apache.kylin.metadata.model.ParameterDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.realization.CapabilityResult.CapabilityInfluence;
 import org.apache.kylin.metadata.realization.SQLDigest;
@@ -371,7 +372,8 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
         if (numericCol != null) {
             numericTupleIdx = tupleInfo.hasColumn(numericCol) ? tupleInfo.getColumnIndex(numericCol) : -1;
         } else {
-            numericTupleIdx = tupleInfo.getFieldIndex("COUNT__");
+            FunctionDesc countFunction = FunctionDesc.newInstance(FunctionDesc.FUNC_COUNT, ParameterDesc.newInstance("1"), "bigint");
+            numericTupleIdx = tupleInfo.getFieldIndex(countFunction.getRewriteFieldName());
         }
         return new IAdvMeasureFiller() {
             private TopNCounter<ByteArray> topNCounter;


[49/67] [abbrv] kylin git commit: KYLIN-2535 Use ResourceStore to manage ACL and saved queries

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 61ddbb0..5130e55 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -20,6 +20,8 @@ package org.apache.kylin.rest.service;
 
 import static org.apache.kylin.common.util.CheckUtil.checkCondition;
 
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
@@ -31,7 +33,6 @@ import java.sql.Statement;
 import java.sql.Time;
 import java.sql.Timestamp;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
@@ -46,21 +47,17 @@ import javax.annotation.PostConstruct;
 import javax.sql.DataSource;
 
 import org.apache.calcite.avatica.ColumnMetaData.Rep;
-import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang3.exception.ExceptionUtils;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.QueryContext;
-import org.apache.kylin.common.StorageURL;
 import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.common.exceptions.ResourceLimitExceededException;
-import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.common.persistence.ResourceStore;
+import org.apache.kylin.common.persistence.RootPersistentEntity;
+import org.apache.kylin.common.persistence.Serializer;
 import org.apache.kylin.common.util.DBUtils;
+import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.SetThreadName;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
@@ -92,9 +89,7 @@ import org.apache.kylin.rest.request.SQLRequest;
 import org.apache.kylin.rest.response.SQLResponse;
 import org.apache.kylin.rest.util.AclUtil;
 import org.apache.kylin.rest.util.AdHocUtil;
-import org.apache.kylin.rest.util.Serializer;
 import org.apache.kylin.rest.util.TableauInterceptor;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hybrid.HybridInstance;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -105,6 +100,7 @@ import org.springframework.security.core.GrantedAuthority;
 import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.stereotype.Component;
 
+import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.CharMatcher;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Splitter;
@@ -123,18 +119,12 @@ public class QueryService extends BasicService {
 
     private static final Logger logger = LoggerFactory.getLogger(QueryService.class);
 
-    public static final String USER_QUERY_FAMILY = "q";
-    private static final String USER_TABLE_NAME = "_user";
-    private static final String USER_QUERY_COLUMN = "c";
-
     public static final String SUCCESS_QUERY_CACHE = "StorageCache";
     public static final String EXCEPTION_QUERY_CACHE = "ExceptionQueryCache";
+    public static final String QUERY_STORE_PATH_PREFIX = "/query/";
 
-    private final Serializer<Query[]> querySerializer = new Serializer<Query[]>(Query[].class);
-    protected final BadQueryDetector badQueryDetector = new BadQueryDetector();
-
-    private final StorageURL hbaseUrl;
-    private final String userTableName;
+    final BadQueryDetector badQueryDetector = new BadQueryDetector();
+    final ResourceStore queryStore;
 
     @Autowired
     protected CacheManager cacheManager;
@@ -156,10 +146,7 @@ public class QueryService extends BasicService {
     }
 
     public QueryService() {
-        KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        hbaseUrl = kylinConfig.getMetadataUrl();
-        userTableName = hbaseUrl.getIdentifier() + USER_TABLE_NAME;
-
+        queryStore = ResourceStore.getStore(getConfig());
         badQueryDetector.start();
     }
 
@@ -183,18 +170,10 @@ public class QueryService extends BasicService {
         List<Query> queries = getQueries(creator);
         queries.add(query);
         Query[] queryArray = new Query[queries.size()];
-
-        byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
-        Table htable = null;
-        try {
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
-            Put put = new Put(Bytes.toBytes(creator));
-            put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
-
-            htable.put(put);
-        } finally {
-            IOUtils.closeQuietly(htable);
-        }
+        QueryRecord record = new QueryRecord(queries.toArray(queryArray));
+        queryStore.deleteResource(getQueryKeyById(creator));
+        queryStore.putResource(getQueryKeyById(creator), record, 0, QueryRecordSerializer.getInstance());
+        return;
     }
 
     public void removeQuery(final String creator, final String id) throws IOException {
@@ -214,45 +193,24 @@ public class QueryService extends BasicService {
         if (!changed) {
             return;
         }
-
         Query[] queryArray = new Query[queries.size()];
-        byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
-        Table htable = null;
-        try {
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
-            Put put = new Put(Bytes.toBytes(creator));
-            put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
-
-            htable.put(put);
-        } finally {
-            IOUtils.closeQuietly(htable);
-        }
+        QueryRecord record = new QueryRecord(queries.toArray(queryArray));
+        queryStore.deleteResource(getQueryKeyById(creator));
+        queryStore.putResource(getQueryKeyById(creator), record, 0, QueryRecordSerializer.getInstance());
+        return;
     }
 
     public List<Query> getQueries(final String creator) throws IOException {
         if (null == creator) {
             return null;
         }
-
         List<Query> queries = new ArrayList<Query>();
-        Table htable = null;
-        try {
-            org.apache.hadoop.hbase.client.Connection conn = HBaseConnection.get(hbaseUrl);
-            HBaseConnection.createHTableIfNeeded(conn, userTableName, USER_QUERY_FAMILY);
-
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
-            Get get = new Get(Bytes.toBytes(creator));
-            get.addFamily(Bytes.toBytes(USER_QUERY_FAMILY));
-            Result result = htable.get(get);
-            Query[] query = querySerializer.deserialize(result.getValue(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN)));
-
-            if (null != query) {
-                queries.addAll(Arrays.asList(query));
+        QueryRecord record = queryStore.getResource(getQueryKeyById(creator), QueryRecord.class, QueryRecordSerializer.getInstance());
+        if (record != null) {
+            for (Query query : record.getQueries()) {
+                queries.add(query);
             }
-        } finally {
-            IOUtils.closeQuietly(htable);
         }
-
         return queries;
     }
 
@@ -892,4 +850,58 @@ public class QueryService extends BasicService {
     public void setCacheManager(CacheManager cacheManager) {
         this.cacheManager = cacheManager;
     }
+
+    private static String getQueryKeyById(String creator) {
+        return QUERY_STORE_PATH_PREFIX + creator;
+    }
+
+    private static class QueryRecordSerializer implements Serializer<QueryRecord> {
+
+        private static final QueryRecordSerializer serializer = new QueryRecordSerializer();
+
+        QueryRecordSerializer() {
+
+        }
+
+        public static QueryRecordSerializer getInstance() {
+            return serializer;
+        }
+
+        @Override
+        public void serialize(QueryRecord record, DataOutputStream out) throws IOException {
+            String jsonStr = JsonUtil.writeValueAsString(record);
+            out.writeUTF(jsonStr);
+        }
+
+        @Override
+        public QueryRecord deserialize(DataInputStream in) throws IOException {
+            String jsonStr = in.readUTF();
+            return JsonUtil.readValue(jsonStr, QueryRecord.class);
+        }
+    }
+
+}
+
+@SuppressWarnings("serial")
+class QueryRecord extends RootPersistentEntity {
+
+    @JsonProperty()
+    private Query[] queries;
+
+    public QueryRecord() {
+
+    }
+
+    public QueryRecord(Query[] queries) {
+        this.queries = queries;
+    }
+
+    public Query[] getQueries() {
+        return queries;
+    }
+
+    public void setQueries(Query[] queries) {
+        this.queries = queries;
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/server/src/main/resources/applicationContext.xml
----------------------------------------------------------------------
diff --git a/server/src/main/resources/applicationContext.xml b/server/src/main/resources/applicationContext.xml
index 100b202..8416f25 100644
--- a/server/src/main/resources/applicationContext.xml
+++ b/server/src/main/resources/applicationContext.xml
@@ -112,11 +112,4 @@
               p:configLocation="classpath:ehcache-test.xml" p:shared="true"/>
     </beans>
 
-    <!-- hbase storage/global lock Config -->
-    <beans profile="ldap,saml">
-        <bean id="aclHBaseStorage" class="org.apache.kylin.rest.security.RealAclHBaseStorage"/>
-    </beans>
-    <beans profile="testing">
-        <bean id="aclHBaseStorage" class="org.apache.kylin.rest.security.MockAclHBaseStorage"/>
-    </beans>
 </beans>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index 81349ef..615c845 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -67,10 +67,15 @@ public class HBaseResourceStore extends ResourceStore {
     private static final Logger logger = LoggerFactory.getLogger(HBaseResourceStore.class);
 
     private static final String FAMILY = "f";
+
     private static final byte[] B_FAMILY = Bytes.toBytes(FAMILY);
+
     private static final String COLUMN = "c";
+
     private static final byte[] B_COLUMN = Bytes.toBytes(COLUMN);
+
     private static final String COLUMN_TS = "t";
+
     private static final byte[] B_COLUMN_TS = Bytes.toBytes(COLUMN_TS);
 
     final String tableName;
@@ -82,10 +87,9 @@ public class HBaseResourceStore extends ResourceStore {
 
     public HBaseResourceStore(KylinConfig kylinConfig) throws IOException {
         super(kylinConfig);
-
         metadataUrl = buildMetadataUrl(kylinConfig);
         tableName = metadataUrl.getIdentifier();
-        createHTableIfNeeded(getAllInOneTableName());
+        createHTableIfNeeded(tableName);
     }
 
     private StorageURL buildMetadataUrl(KylinConfig kylinConfig) throws IOException {
@@ -107,10 +111,6 @@ public class HBaseResourceStore extends ResourceStore {
         HBaseConnection.createHTableIfNeeded(getConnection(), tableName, FAMILY);
     }
 
-    private String getAllInOneTableName() {
-        return tableName;
-    }
-
     @Override
     protected boolean existsImpl(String resPath) throws IOException {
         Result r = getFromHTable(resPath, false, false);
@@ -164,7 +164,7 @@ public class HBaseResourceStore extends ResourceStore {
         byte[] endRow = Bytes.toBytes(lookForPrefix);
         endRow[endRow.length - 1]++;
 
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        Table table = getConnection().getTable(TableName.valueOf(tableName));
         Scan scan = new Scan(startRow, endRow);
         if ((filter != null && filter instanceof KeyOnlyFilter) == false) {
             scan.addColumn(B_FAMILY, B_COLUMN_TS);
@@ -288,7 +288,7 @@ public class HBaseResourceStore extends ResourceStore {
         IOUtils.copy(content, bout);
         bout.close();
 
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        Table table = getConnection().getTable(TableName.valueOf(tableName));
         try {
             byte[] row = Bytes.toBytes(resPath);
             Put put = buildPut(resPath, ts, row, bout.toByteArray(), table);
@@ -302,7 +302,7 @@ public class HBaseResourceStore extends ResourceStore {
     @Override
     protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS)
             throws IOException, IllegalStateException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        Table table = getConnection().getTable(TableName.valueOf(tableName));
         try {
             byte[] row = Bytes.toBytes(resPath);
             byte[] bOldTS = oldTS == 0 ? null : Bytes.toBytes(oldTS);
@@ -325,7 +325,7 @@ public class HBaseResourceStore extends ResourceStore {
 
     @Override
     protected void deleteResourceImpl(String resPath) throws IOException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        Table table = getConnection().getTable(TableName.valueOf(tableName));
         try {
             boolean hdfsResourceExist = false;
             Result result = internalGetFromHTable(table, resPath, true, false);
@@ -354,11 +354,11 @@ public class HBaseResourceStore extends ResourceStore {
 
     @Override
     protected String getReadableResourcePathImpl(String resPath) {
-        return getAllInOneTableName() + "(key='" + resPath + "')@" + kylinConfig.getMetadataUrl();
+        return tableName + "(key='" + resPath + "')@" + kylinConfig.getMetadataUrl();
     }
 
     private Result getFromHTable(String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        Table table = getConnection().getTable(TableName.valueOf(tableName));
         try {
             return internalGetFromHTable(table, path, fetchContent, fetchTimestamp);
         } finally {
@@ -429,6 +429,6 @@ public class HBaseResourceStore extends ResourceStore {
 
     @Override
     public String toString() {
-        return getAllInOneTableName() + "@hbase";
+        return tableName + "@hbase";
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
index 991a750..6e7890b 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
@@ -30,7 +30,7 @@ import org.apache.kylin.job.lock.JobLock;
 public class ZookeeperJobLock implements DistributedLock, JobLock {
 
     private ZookeeperDistributedLock lock = (ZookeeperDistributedLock) new ZookeeperDistributedLock.Factory().lockForCurrentProcess();
-    
+
     @Override
     public String getClient() {
         return lock.getClient();
@@ -60,7 +60,7 @@ public class ZookeeperJobLock implements DistributedLock, JobLock {
     public boolean isLockedByMe(String lockPath) {
         return lock.isLockedByMe(lockPath);
     }
-    
+
     @Override
     public void unlock(String lockPath) {
         lock.unlock(lockPath);
@@ -70,6 +70,7 @@ public class ZookeeperJobLock implements DistributedLock, JobLock {
     public void purgeLocks(String lockPathRoot) {
         lock.purgeLocks(lockPathRoot);
     }
+
     @Override
     public Closeable watchLocks(String lockPathRoot, Executor executor, Watcher watcher) {
         return lock.watchLocks(lockPathRoot, executor, watcher);

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperUtil.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperUtil.java
index b5ebe89..20569d3 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperUtil.java
@@ -18,35 +18,17 @@
 
 package org.apache.kylin.storage.hbase.util;
 
-import java.util.Arrays;
-
-import javax.annotation.Nullable;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.kylin.storage.hbase.HBaseConnection;
-
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
+import org.apache.kylin.common.KylinConfig;
 
 public class ZookeeperUtil {
 
+    public static String ZOOKEEPER_UTIL_HBASE_CLASSNAME = "org.apache.kylin.storage.hbase.util.ZooKeeperUtilHbase";
+
     /**
-     * Get zookeeper connection string from HBase Configuration
-     *
-     * @return Zookeeper Connection string
+     * Get zookeeper connection string from HBase Configuration or from kylin.properties
      */
     public static String getZKConnectString() {
-        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        final String serverList = conf.get(HConstants.ZOOKEEPER_QUORUM);
-        final String port = conf.get(HConstants.ZOOKEEPER_CLIENT_PORT);
-        return StringUtils.join(Iterables.transform(Arrays.asList(serverList.split(",")), new Function<String, String>() {
-            @Nullable
-            @Override
-            public String apply(String input) {
-                return input + ":" + port;
-            }
-        }), ",");
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        return config.getZookeeperConnectString();
     }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
index fe1ad4e..d185f4e 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hdfs/HDFSResourceStore.java
@@ -40,6 +40,7 @@ import org.apache.kylin.common.util.HadoopUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 
 public class HDFSResourceStore extends ResourceStore {
@@ -50,13 +51,13 @@ public class HDFSResourceStore extends ResourceStore {
 
     private FileSystem fs;
 
+    private static final String HDFS_SCHEME = "hdfs";
+
     public HDFSResourceStore(KylinConfig kylinConfig) throws Exception {
         super(kylinConfig);
         StorageURL metadataUrl = kylinConfig.getMetadataUrl();
+        Preconditions.checkState(HDFS_SCHEME.equals(metadataUrl.getScheme()));
         
-        if (!metadataUrl.getScheme().equals("hdfs"))
-            throw new IOException("kylin.metadata.url not recognized for HDFSResourceStore:" + metadataUrl);
-
         String path = metadataUrl.getIdentifier();
         fs = HadoopUtil.getFileSystem(path);
         Path metadataPath = new Path(path);

http://git-wip-us.apache.org/repos/asf/kylin/blob/afaa95a0/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java b/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
index e1f994f..0fdc740 100644
--- a/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
@@ -20,6 +20,7 @@ package org.apache.kylin.tool;
 
 import java.io.File;
 import java.io.IOException;
+import java.lang.reflect.Method;
 import java.util.Arrays;
 import java.util.List;
 import java.util.concurrent.ExecutorService;
@@ -31,6 +32,7 @@ import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.project.ProjectManager;
@@ -159,11 +161,13 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
                 public void run() {
                     logger.info("Start to extract HBase usage.");
                     try {
+                        // use reflection to isolate NoClassDef errors when HBase is not available
                         String[] hbaseArgs = { "-destDir", new File(exportDir, "hbase").getAbsolutePath(), "-project", projectNames, "-compress", "false", "-submodule", "true" };
-                        HBaseUsageExtractor hBaseUsageExtractor = new HBaseUsageExtractor();
                         logger.info("HBaseUsageExtractor args: " + Arrays.toString(hbaseArgs));
-                        hBaseUsageExtractor.execute(hbaseArgs);
-                    } catch (Exception e) {
+                        Object extractor = ClassUtil.newInstance("org.apache.kylin.tool.HBaseUsageExtractor");
+                        Method execute = extractor.getClass().getDeclaredMethod("execute", String[].class);
+                        execute.invoke(extractor, (Object) hbaseArgs);
+                    } catch (Throwable e) {
                         logger.error("Error in export HBase usage.", e);
                     }
                 }