You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@kylin.apache.org by GitBox <gi...@apache.org> on 2018/11/30 11:48:47 UTC

[GitHub] shaofengshi closed pull request #368: KYLIN-3187 Forbid calls to JDK APIs that use the default locale

shaofengshi closed pull request #368: KYLIN-3187 Forbid calls to JDK APIs that use the default locale
URL: https://github.com/apache/kylin/pull/368
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
index 524c2e428e..cda850fab4 100644
--- a/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
+++ b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
@@ -22,12 +22,14 @@
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
-import java.io.FileReader;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.InputStreamReader;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
 import org.apache.commons.io.IOUtils;
@@ -76,7 +78,7 @@ public static void deployMetadata(String localMetaData) throws IOException {
             CubeDescManager.getInstance(config()).updateCubeDesc(cube.getDescriptor());//enforce signature updating
         }
     }
-    
+
     public static void deployMetadata() throws IOException {
         deployMetadata(LocalFileMetadataTestCase.LOCALMETA_TEST_DATA);
     }
@@ -92,7 +94,8 @@ public static void overrideJobJarLocations() {
     private static String getPomVersion() {
         try {
             MavenXpp3Reader pomReader = new MavenXpp3Reader();
-            Model model = pomReader.read(new FileReader("../pom.xml"));
+            Model model = pomReader
+                    .read(new InputStreamReader(new FileInputStream("../pom.xml"), StandardCharsets.UTF_8));
             return model.getVersion();
         } catch (Exception e) {
             throw new RuntimeException(e.getMessage(), e);
@@ -138,9 +141,11 @@ public static void prepareTestDataForNormalCubes(String modelName) throws Except
         deployTables(modelName);
     }
 
-    public static void prepareTestDataForStreamingCube(long startTime, long endTime, int numberOfRecords, String cubeName, StreamDataLoader streamDataLoader) throws IOException {
+    public static void prepareTestDataForStreamingCube(long startTime, long endTime, int numberOfRecords,
+            String cubeName, StreamDataLoader streamDataLoader) throws IOException {
         CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
-        List<String> data = StreamingTableDataGenerator.generate(numberOfRecords, startTime, endTime, cubeInstance.getRootFactTable(), cubeInstance.getProject());
+        List<String> data = StreamingTableDataGenerator.generate(numberOfRecords, startTime, endTime,
+                cubeInstance.getRootFactTable(), cubeInstance.getProject());
         //load into kafka
         streamDataLoader.loadIntoKafka(data);
         logger.info("Write {} messages into {}", data.size(), streamDataLoader.toString());
@@ -151,7 +156,8 @@ public static void prepareTestDataForStreamingCube(long startTime, long endTime,
         TimedJsonStreamParser timedJsonStreamParser = new TimedJsonStreamParser(tableColumns, null);
         StringBuilder sb = new StringBuilder();
         for (String json : data) {
-            List<String> rowColumns = timedJsonStreamParser.parse(ByteBuffer.wrap(json.getBytes())).get(0).getData();
+            List<String> rowColumns = timedJsonStreamParser
+                    .parse(ByteBuffer.wrap(json.getBytes(StandardCharsets.UTF_8))).get(0).getData();
             sb.append(StringUtils.join(rowColumns, ","));
             sb.append(System.getProperty("line.separator"));
         }
@@ -200,26 +206,26 @@ private static void deployTables(String modelName) throws Exception {
 
         Set<TableRef> tables = model.getAllTables();
         Set<String> TABLE_NAMES = new HashSet<String>();
-        for (TableRef tr:tables){
-            if (!tr.getTableDesc().isView()){
+        for (TableRef tr : tables) {
+            if (!tr.getTableDesc().isView()) {
                 String tableName = tr.getTableName();
                 String schema = tr.getTableDesc().getDatabase();
-                String identity = String.format("%s.%s", schema, tableName);
+                String identity = String.format(Locale.ROOT, "%s.%s", schema, tableName);
                 TABLE_NAMES.add(identity);
             }
         }
         TABLE_NAMES.add(TABLE_SELLER_TYPE_DIM_TABLE); // the wrapper view VIEW_SELLER_TYPE_DIM need this table
-        
+
         // scp data files, use the data from hbase, instead of local files
         File tempDir = Files.createTempDir();
         String tempDirAbsPath = tempDir.getAbsolutePath();
         for (String tablename : TABLE_NAMES) {
-            tablename = tablename.toUpperCase();
+            tablename = tablename.toUpperCase(Locale.ROOT);
 
             File localBufferFile = new File(tempDirAbsPath + "/" + tablename + ".csv");
             localBufferFile.createNewFile();
 
-            logger.info(String.format("get resource from hbase:/data/%s.csv", tablename));
+            logger.info(String.format(Locale.ROOT, "get resource from hbase:/data/%s.csv", tablename));
             InputStream hbaseDataStream = metaMgr.getStore().getResource("/data/" + tablename + ".csv").inputStream;
             FileOutputStream localFileStream = new FileOutputStream(localBufferFile);
             IOUtils.copy(hbaseDataStream, localFileStream);
@@ -233,21 +239,21 @@ private static void deployTables(String modelName) throws Exception {
 
         ISampleDataDeployer sampleDataDeployer = SourceManager.getSource(model.getRootFactTable().getTableDesc())
                 .getSampleDataDeployer();
-        
+
         // create hive tables
         sampleDataDeployer.createSampleDatabase("EDW");
         for (String tablename : TABLE_NAMES) {
-            logger.info(String.format("get table desc %s", tablename));
+            logger.info(String.format(Locale.ROOT, "get table desc %s", tablename));
             sampleDataDeployer.createSampleTable(metaMgr.getTableDesc(tablename, model.getProject()));
         }
 
         // load data to hive tables
         // LOAD DATA LOCAL INPATH 'filepath' [OVERWRITE] INTO TABLE tablename
         for (String tablename : TABLE_NAMES) {
-            logger.info(String.format("load data into %s", tablename));
+            logger.info(String.format(Locale.ROOT, "load data into %s", tablename));
             sampleDataDeployer.loadSampleData(tablename, tempDirAbsPath);
         }
-        
+
         // create the view automatically here
         sampleDataDeployer.createWrapperView(TABLE_SELLER_TYPE_DIM_TABLE, VIEW_SELLER_TYPE_DIM);
     }
diff --git a/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java b/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
index 1fef07e888..59b6ee6366 100644
--- a/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
+++ b/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
@@ -20,6 +20,7 @@
 
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
 
 import org.apache.kylin.common.KylinConfig;
@@ -66,7 +67,7 @@
             kvs.clear();
             kvs.put("timestamp", String.valueOf(time));
             for (ColumnDesc columnDesc : tableDesc.getColumns()) {
-                String lowerCaseColumnName = columnDesc.getName().toLowerCase();
+                String lowerCaseColumnName = columnDesc.getName().toLowerCase(Locale.ROOT);
                 DataType dataType = columnDesc.getType();
                 if (dataType.isDateTimeFamily()) {
                     //TimedJsonStreamParser will derived minute_start,hour_start,day_start from timestamp
@@ -78,7 +79,7 @@
                     int v = r.nextInt(10000);
                     kvs.put(lowerCaseColumnName, String.valueOf(v));
                 } else if (dataType.isNumberFamily()) {
-                    String v = String.format("%.4f", r.nextDouble() * 100);
+                    String v = String.format(Locale.ROOT, "%.4f", r.nextDouble() * 100);
                     kvs.put(lowerCaseColumnName, v);
                 }
             }
diff --git a/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java b/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
index 426ebb9b26..cbe2e4c0c5 100644
--- a/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
@@ -73,13 +73,14 @@ private void init(InputStream is) {
         for (Entry<Object, Object> kv : props.entrySet()) {
             String key = (String) kv.getKey();
             String value = (String) kv.getValue();
-            
+
             if (key.equals(value))
                 continue; // no change
-            
+
             if (value.contains(key))
-                throw new IllegalStateException("New key '" + value + "' contains old key '" + key + "' causes trouble to repeated find & replace");
-            
+                throw new IllegalStateException("New key '" + value + "' contains old key '" + key
+                        + "' causes trouble to repeated find & replace");
+
             if (value.endsWith("."))
                 old2newPrefix.put(key, value);
             else
@@ -122,7 +123,7 @@ public Properties check(Properties props) {
         return result;
     }
 
-    public OrderedProperties check(OrderedProperties props){
+    public OrderedProperties check(OrderedProperties props) {
         OrderedProperties result = new OrderedProperties();
         for (Entry<String, String> kv : props.entrySet()) {
             result.setProperty(check(kv.getKey()), kv.getValue());
@@ -147,7 +148,7 @@ private static void generateFindAndReplaceScript(String kylinRepoPath, String ou
         // generate sed file
         File sedFile = new File(outputDir, "upgrade-old-config.sed");
         try {
-            out = new PrintWriter(sedFile);
+            out = new PrintWriter(sedFile, "UTF-8");
             for (Entry<String, String> e : bcc.old2new.entrySet()) {
                 out.println("s/" + quote(e.getKey()) + "/" + e.getValue() + "/g");
             }
@@ -161,7 +162,7 @@ private static void generateFindAndReplaceScript(String kylinRepoPath, String ou
         // generate sh file
         File shFile = new File(outputDir, "upgrade-old-config.sh");
         try {
-            out = new PrintWriter(shFile);
+            out = new PrintWriter(shFile, "UTF-8");
             out.println("#!/bin/bash");
             Stack<File> stack = new Stack<>();
             stack.push(repoDir);
@@ -180,7 +181,7 @@ private static void generateFindAndReplaceScript(String kylinRepoPath, String ou
         } finally {
             IOUtils.closeQuietly(out);
         }
-        
+
         System.out.println("Files generated:");
         System.out.println(shFile);
         System.out.println(sedFile);
@@ -211,6 +212,7 @@ else if (name.equals("KylinConfigTest.java"))
         else if (name.endsWith("-site.xml"))
             return false;
         else
-            return name.endsWith(".java") || name.endsWith(".js") || name.endsWith(".sh") || name.endsWith(".properties") || name.endsWith(".xml");
+            return name.endsWith(".java") || name.endsWith(".js") || name.endsWith(".sh")
+                    || name.endsWith(".properties") || name.endsWith(".xml");
     }
 }
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 0041402d80..a43377dfaf 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -23,6 +23,7 @@
 import java.io.Serializable;
 import java.util.Collection;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
@@ -773,7 +774,7 @@ public String getHivePassword() {
     }
 
     public String getOverrideHiveTableLocation(String table) {
-        return getOptional("kylin.source.hive.table-location." + table.toUpperCase());
+        return getOptional("kylin.source.hive.table-location." + table.toUpperCase(Locale.ROOT));
     }
 
     public boolean isHiveKeepFlatTable() {
@@ -1173,7 +1174,6 @@ public String getKylinJobMRLibDir() {
         return getPropertiesByPrefix("kylin.engine.spark-conf-" + configName + ".");
     }
 
-
     public double getDefaultHadoopJobReducerInputMB() {
         return Double.parseDouble(getOptional("kylin.engine.mr.reduce-input-mb", "500"));
     }
@@ -1708,7 +1708,7 @@ public boolean isKylinMetricsReporterForJobEnabled() {
     }
 
     public String getKylinMetricsPrefix() {
-        return getOptional("kylin.metrics.prefix", "KYLIN").toUpperCase();
+        return getOptional("kylin.metrics.prefix", "KYLIN").toUpperCase(Locale.ROOT);
     }
 
     public String getKylinMetricsActiveReservoirDefaultClass() {
diff --git a/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java b/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java
index cd1c2b15d6..e03d0e860c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java
+++ b/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.common.lock;
 
 import java.lang.management.ManagementFactory;
+import java.nio.charset.StandardCharsets;
 
 public abstract class DistributedLockFactory {
 
@@ -35,9 +36,9 @@ public DistributedLock lockForCurrentProcess() {
     private static String threadProcessAndHost() {
         return Thread.currentThread().getId() + "-" + processAndHost();
     }
-    
+
     private static String processAndHost() {
-        byte[] bytes = ManagementFactory.getRuntimeMXBean().getName().getBytes();
-        return new String(bytes);
+        byte[] bytes = ManagementFactory.getRuntimeMXBean().getName().getBytes(StandardCharsets.UTF_8);
+        return new String(bytes, StandardCharsets.UTF_8);
     }
 }
diff --git a/core-common/src/main/java/org/apache/kylin/common/metrics/common/MetricsNameBuilder.java b/core-common/src/main/java/org/apache/kylin/common/metrics/common/MetricsNameBuilder.java
index 656a478488..8e3315209f 100644
--- a/core-common/src/main/java/org/apache/kylin/common/metrics/common/MetricsNameBuilder.java
+++ b/core-common/src/main/java/org/apache/kylin/common/metrics/common/MetricsNameBuilder.java
@@ -18,21 +18,23 @@
 
 package org.apache.kylin.common.metrics.common;
 
+import java.util.Locale;
+
 public final class MetricsNameBuilder {
     public final static String METRICS = "metrics:";
     public final static String PROJECT_TEMPLATE = METRICS + "project=%s";
     public final static String CUBE_TEMPLATE = METRICS + "project=%s,cube=%s";
 
     public static String buildMetricName(String prefix, String name) {
-        return String.format(prefix + ",name=%s", name);
+        return String.format(Locale.ROOT, prefix + ",name=%s", name);
     }
 
     public static String buildCubeMetricPrefix(String project) {
-        return String.format(PROJECT_TEMPLATE, project);
+        return String.format(Locale.ROOT, PROJECT_TEMPLATE, project);
     }
 
     public static String buildCubeMetricPrefix(String project, String cube) {
-        return String.format(CUBE_TEMPLATE, project, cube);
+        return String.format(Locale.ROOT, CUBE_TEMPLATE, project, cube);
     }
 
 }
diff --git a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
index e728759c1c..7487930bd0 100644
--- a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
+++ b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
@@ -24,6 +24,7 @@
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
@@ -450,7 +451,7 @@ private boolean initMetricsReporter() {
         MetricsReporting reporter = null;
         for (String metricsReportingName : metricsReporterNames) {
             try {
-                reporter = MetricsReporting.valueOf(metricsReportingName.trim().toUpperCase());
+                reporter = MetricsReporting.valueOf(metricsReportingName.trim().toUpperCase(Locale.ROOT));
             } catch (IllegalArgumentException e) {
                 LOGGER.error("Invalid reporter name " + metricsReportingName, e);
                 throw e;
diff --git a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/JsonFileMetricsReporter.java b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/JsonFileMetricsReporter.java
index 7bacaf106b..95c51162c6 100644
--- a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/JsonFileMetricsReporter.java
+++ b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/JsonFileMetricsReporter.java
@@ -22,6 +22,7 @@
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
@@ -100,7 +101,8 @@ public void run() {
                     BufferedWriter bw = null;
                     try {
                         fs.delete(tmpPath, true);
-                        bw = new BufferedWriter(new OutputStreamWriter(fs.create(tmpPath, true)));
+                        bw = new BufferedWriter(
+                                new OutputStreamWriter(fs.create(tmpPath, true), StandardCharsets.UTF_8));
                         bw.write(json);
                         fs.setPermission(tmpPath, FsPermission.createImmutable((short) 0644));
                     } catch (IOException e) {
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
index 450eb57c9a..ca3809114f 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
@@ -22,6 +22,7 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.NavigableSet;
 import java.util.Set;
@@ -141,7 +142,7 @@ public static String cat(KylinConfig config, String path) throws IOException {
         StringBuffer sb = new StringBuffer();
         String line;
         try {
-            br = new BufferedReader(new InputStreamReader(is));
+            br = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));
             while ((line = br.readLine()) != null) {
                 System.out.println(line);
                 sb.append(line).append('\n');
@@ -202,8 +203,8 @@ public static void copy(KylinConfig srcConfig, KylinConfig dstConfig, boolean co
         copy(srcConfig, dstConfig, "/", copyImmutableResource);
     }
 
-    public static void copyR(ResourceStore src, ResourceStore dst, String path, TreeSet<String> pathsSkipChildrenCheck, boolean copyImmutableResource)
-            throws IOException {
+    public static void copyR(ResourceStore src, ResourceStore dst, String path, TreeSet<String> pathsSkipChildrenCheck,
+            boolean copyImmutableResource) throws IOException {
 
         if (!copyImmutableResource && IMMUTABLE_PREFIX.contains(path)) {
             return;
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java b/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java
index a4877f2e28..a8c61423e6 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java
@@ -21,6 +21,7 @@
 import java.io.Serializable;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.Locale;
 
 import org.apache.commons.lang.time.FastDateFormat;
 import org.apache.kylin.common.KylinVersion;
@@ -45,7 +46,7 @@
 
     static final String DATE_PATTERN = "yyyy-MM-dd HH:mm:ss z";
     static FastDateFormat format = FastDateFormat.getInstance(DATE_PATTERN);
-    static DateFormat df = new SimpleDateFormat(DATE_PATTERN);
+    static DateFormat df = new SimpleDateFormat(DATE_PATTERN, Locale.ROOT);
 
     public static String formatTime(long millis) {
         return format.format(millis);
@@ -58,7 +59,7 @@ public static String formatTime(long millis) {
 
     @JsonProperty("last_modified")
     protected long lastModified;
-    
+
     // if cached and shared, the object MUST NOT be modified (call setXXX() for example)
     protected boolean isCachedAndShared = false;
 
@@ -101,7 +102,7 @@ public void setLastModified(long lastModified) {
     public void updateRandomUuid() {
         setUuid(RandomUtil.randomUUID().toString());
     }
-    
+
     public boolean isCachedAndShared() {
         return isCachedAndShared;
     }
diff --git a/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java b/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
index 11284f6401..9f95a7c316 100644
--- a/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
+++ b/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
@@ -22,6 +22,7 @@
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.regex.Matcher;
@@ -283,7 +284,8 @@ public HttpResponse query(String sql, String project) throws IOException {
         return response;
     }
 
-    public void clearCacheForCubeMigration(String cube, String project, String model, Map<String, String> tableToProjects) throws IOException{
+    public void clearCacheForCubeMigration(String cube, String project, String model,
+            Map<String, String> tableToProjects) throws IOException {
         String url = baseUrl + "/cache/migration";
         HttpPost post = new HttpPost(url);
 
@@ -309,7 +311,8 @@ public void buildLookupSnapshotCache(String project, String lookupTableName, Str
         HttpResponse response = client.execute(put);
         getContent(response);
         if (response.getStatusLine().getStatusCode() != 200) {
-            throw new IOException("Invalid response " + response.getStatusLine().getStatusCode() + " with url " + url + "\n");
+            throw new IOException(
+                    "Invalid response " + response.getStatusLine().getStatusCode() + " with url " + url + "\n");
         }
     }
 
@@ -319,7 +322,8 @@ public String getLookupSnapshotCacheState(String lookupTableName, String snapsho
         HttpResponse response = client.execute(get);
         String content = getContent(response);
         if (response.getStatusLine().getStatusCode() != 200) {
-            throw new IOException("Invalid response " + response.getStatusLine().getStatusCode() + " with url " + url + "\n");
+            throw new IOException(
+                    "Invalid response " + response.getStatusLine().getStatusCode() + " with url " + url + "\n");
         }
         return content;
     }
@@ -336,7 +340,8 @@ private HashMap dealResponse(HttpResponse response) throws IOException {
     private void addHttpHeaders(HttpRequestBase method) {
         method.addHeader("Accept", "application/json, text/plain, */*");
         method.addHeader("Content-Type", "application/json");
-        String basicAuth = DatatypeConverter.printBase64Binary((this.userName + ":" + this.password).getBytes());
+        String basicAuth = DatatypeConverter
+                .printBase64Binary((this.userName + ":" + this.password).getBytes(StandardCharsets.UTF_8));
         method.addHeader("Authorization", "Basic " + basicAuth);
     }
 
@@ -384,7 +389,7 @@ private String getContent(HttpResponse response) throws IOException {
         BufferedReader rd = null;
         StringBuffer result = new StringBuffer();
         try {
-            reader = new InputStreamReader(response.getEntity().getContent());
+            reader = new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8);
             rd = new BufferedReader(reader);
             String line = null;
             while ((line = rd.readLine()) != null) {
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java b/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java
index fc501ea9e1..33fc31a987 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java
@@ -37,6 +37,7 @@
 import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -284,10 +285,11 @@ public static String toStringBinary(final byte[] b, int off, int len) {
             len = b.length - off;
         for (int i = off; i < off + len; ++i) {
             int ch = b[i] & 0xFF;
-            if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z') || " `~!@#$%^&*()-_=+[]{}|;:'\",.<>/?".indexOf(ch) >= 0) {
+            if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z')
+                    || " `~!@#$%^&*()-_=+[]{}|;:'\",.<>/?".indexOf(ch) >= 0) {
                 result.append((char) ch);
             } else {
-                result.append(String.format("\\x%02X", ch));
+                result.append(String.format(Locale.ROOT, "\\x%02X", ch));
             }
         }
         return result.toString();
@@ -441,12 +443,14 @@ public static long toLong(byte[] bytes, int offset, final int length) {
         }
     }
 
-    private static IllegalArgumentException explainWrongLengthOrOffset(final byte[] bytes, final int offset, final int length, final int expectedLength) {
+    private static IllegalArgumentException explainWrongLengthOrOffset(final byte[] bytes, final int offset,
+            final int length, final int expectedLength) {
         String reason;
         if (length != expectedLength) {
             reason = "Wrong length: " + length + ", expected " + expectedLength;
         } else {
-            reason = "offset (" + offset + ") + length (" + length + ") exceed the" + " capacity of the array: " + bytes.length;
+            reason = "offset (" + offset + ") + length (" + length + ") exceed the" + " capacity of the array: "
+                    + bytes.length;
         }
         return new IllegalArgumentException(reason);
     }
@@ -463,7 +467,8 @@ private static IllegalArgumentException explainWrongLengthOrOffset(final byte[]
      */
     public static int putLong(byte[] bytes, int offset, long val) {
         if (bytes.length - offset < SIZEOF_LONG) {
-            throw new IllegalArgumentException("Not enough room to put a long at" + " offset " + offset + " in a " + bytes.length + " byte array");
+            throw new IllegalArgumentException(
+                    "Not enough room to put a long at" + " offset " + offset + " in a " + bytes.length + " byte array");
         }
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.isAvailable()) {
             return putLongUnsafe(bytes, offset, val);
@@ -489,7 +494,10 @@ public static int putLongUnsafe(byte[] bytes, int offset, long val) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
             val = Long.reverseBytes(val);
         }
-        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putLong(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET, val);
+        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putLong(bytes,
+                (long) offset
+                        + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET,
+                val);
         return offset + SIZEOF_LONG;
     }
 
@@ -645,9 +653,14 @@ public static int toInt(byte[] bytes, int offset, final int length) {
      */
     public static int toIntUnsafe(byte[] bytes, int offset) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
-            return Integer.reverseBytes(org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getInt(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
+            return Integer.reverseBytes(
+                    org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                            .getInt(bytes, (long) offset
+                                    + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
         } else {
-            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getInt(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
+            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                    .getInt(bytes, (long) offset
+                            + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
         }
     }
 
@@ -660,9 +673,14 @@ public static int toIntUnsafe(byte[] bytes, int offset) {
      */
     public static short toShortUnsafe(byte[] bytes, int offset) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
-            return Short.reverseBytes(org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getShort(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
+            return Short.reverseBytes(
+                    org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                            .getShort(bytes, (long) offset
+                                    + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
         } else {
-            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getShort(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
+            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                    .getShort(bytes, (long) offset
+                            + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
         }
     }
 
@@ -675,9 +693,14 @@ public static short toShortUnsafe(byte[] bytes, int offset) {
      */
     public static long toLongUnsafe(byte[] bytes, int offset) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
-            return Long.reverseBytes(org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getLong(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
+            return Long.reverseBytes(
+                    org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                            .getLong(bytes, (long) offset
+                                    + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
         } else {
-            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getLong(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
+            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                    .getLong(bytes, (long) offset
+                            + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
         }
     }
 
@@ -693,7 +716,8 @@ public static long toLongUnsafe(byte[] bytes, int offset) {
      */
     public static int readAsInt(byte[] bytes, int offset, final int length) {
         if (offset + length > bytes.length) {
-            throw new IllegalArgumentException("offset (" + offset + ") + length (" + length + ") exceed the" + " capacity of the array: " + bytes.length);
+            throw new IllegalArgumentException("offset (" + offset + ") + length (" + length + ") exceed the"
+                    + " capacity of the array: " + bytes.length);
         }
         int n = 0;
         for (int i = offset; i < (offset + length); i++) {
@@ -715,7 +739,8 @@ public static int readAsInt(byte[] bytes, int offset, final int length) {
      */
     public static int putInt(byte[] bytes, int offset, int val) {
         if (bytes.length - offset < SIZEOF_INT) {
-            throw new IllegalArgumentException("Not enough room to put an int at" + " offset " + offset + " in a " + bytes.length + " byte array");
+            throw new IllegalArgumentException(
+                    "Not enough room to put an int at" + " offset " + offset + " in a " + bytes.length + " byte array");
         }
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.isAvailable()) {
             return putIntUnsafe(bytes, offset, val);
@@ -741,7 +766,10 @@ public static int putIntUnsafe(byte[] bytes, int offset, int val) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
             val = Integer.reverseBytes(val);
         }
-        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putInt(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET, val);
+        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putInt(bytes,
+                (long) offset
+                        + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET,
+                val);
         return offset + SIZEOF_INT;
     }
 
@@ -830,7 +858,8 @@ public static short toShort(byte[] bytes, int offset, final int length) {
      */
     public static int putShort(byte[] bytes, int offset, short val) {
         if (bytes.length - offset < SIZEOF_SHORT) {
-            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a " + bytes.length + " byte array");
+            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a "
+                    + bytes.length + " byte array");
         }
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.isAvailable()) {
             return putShortUnsafe(bytes, offset, val);
@@ -854,7 +883,10 @@ public static int putShortUnsafe(byte[] bytes, int offset, short val) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
             val = Short.reverseBytes(val);
         }
-        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putShort(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET, val);
+        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putShort(bytes,
+                (long) offset
+                        + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET,
+                val);
         return offset + SIZEOF_SHORT;
     }
 
@@ -873,7 +905,8 @@ public static int putShortUnsafe(byte[] bytes, int offset, short val) {
      */
     public static int putAsShort(byte[] bytes, int offset, int val) {
         if (bytes.length - offset < SIZEOF_SHORT) {
-            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a " + bytes.length + " byte array");
+            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a "
+                    + bytes.length + " byte array");
         }
         bytes[offset + 1] = (byte) val;
         val >>= 8;
@@ -964,7 +997,8 @@ public static int compareTo(final byte[] left, final byte[] right) {
      * @return 0 if equal, < 0 if left is less than right, etc.
      */
     public static int compareTo(byte[] buffer1, int offset1, int length1, byte[] buffer2, int offset2, int length2) {
-        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(buffer1, offset1, length1, buffer2, offset2, length2);
+        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(buffer1, offset1, length1, buffer2, offset2,
+                length2);
     }
 
     interface Comparer<T> {
@@ -1118,8 +1152,8 @@ public int compareTo(byte[] buffer1, int offset1, int length1, byte[] buffer2, i
                 }
                 final int minLength = Math.min(length1, length2);
                 final int minWords = minLength / SIZEOF_LONG;
-                final long offset1Adj = offset1 + (long)BYTE_ARRAY_BASE_OFFSET;
-                final long offset2Adj = offset2 + (long)BYTE_ARRAY_BASE_OFFSET;
+                final long offset1Adj = offset1 + (long) BYTE_ARRAY_BASE_OFFSET;
+                final long offset2Adj = offset2 + (long) BYTE_ARRAY_BASE_OFFSET;
 
                 /*
                  * Compare 8 bytes at a time. Benchmarking shows comparing 8 bytes at a
@@ -1202,7 +1236,8 @@ public static boolean equals(final byte[] left, final byte[] right) {
         return compareTo(left, right) == 0;
     }
 
-    public static boolean equals(final byte[] left, int leftOffset, int leftLen, final byte[] right, int rightOffset, int rightLen) {
+    public static boolean equals(final byte[] left, int leftOffset, int leftLen, final byte[] right, int rightOffset,
+            int rightLen) {
         // short circuit case
         if (left == right && leftOffset == rightOffset && leftLen == rightLen) {
             return true;
@@ -1221,7 +1256,8 @@ public static boolean equals(final byte[] left, int leftOffset, int leftLen, fin
         if (left[leftOffset + leftLen - 1] != right[rightOffset + rightLen - 1])
             return false;
 
-        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(left, leftOffset, leftLen, right, rightOffset, rightLen) == 0;
+        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(left, leftOffset, leftLen, right, rightOffset,
+                rightLen) == 0;
     }
 
     /**
@@ -1252,7 +1288,9 @@ public static boolean equals(byte[] a, ByteBuffer buf) {
      * array on the left.
      */
     public static boolean startsWith(byte[] bytes, byte[] prefix) {
-        return bytes != null && prefix != null && bytes.length >= prefix.length && LexicographicalComparerHolder.BEST_COMPARER.compareTo(bytes, 0, prefix.length, prefix, 0, prefix.length) == 0;
+        return bytes != null && prefix != null && bytes.length >= prefix.length
+                && LexicographicalComparerHolder.BEST_COMPARER.compareTo(bytes, 0, prefix.length, prefix, 0,
+                        prefix.length) == 0;
     }
 
     /**
@@ -1640,7 +1678,8 @@ public static int hashCode(byte[] bytes, int offset, int length) {
     public static void writeStringFixedSize(final DataOutput out, String s, int size) throws IOException {
         byte[] b = toBytes(s);
         if (b.length > size) {
-            throw new IOException("Trying to write " + b.length + " bytes (" + toStringBinary(b) + ") into a field of length " + size);
+            throw new IOException("Trying to write " + b.length + " bytes (" + toStringBinary(b)
+                    + ") into a field of length " + size);
         }
 
         out.writeBytes(s);
@@ -1926,7 +1965,7 @@ public static void random(byte[] b, int offset, int length) {
      */
     public static String toHex(byte[] b) {
         checkArgument(b.length > 0, "length must be greater than 0");
-        return String.format("%x", new BigInteger(1, b));
+        return String.format(Locale.ROOT, "%x", new BigInteger(1, b));
     }
 
     /**
@@ -1939,10 +1978,11 @@ public static String toHex(byte[] b) {
         checkArgument(hex.length() > 0, "length must be greater than 0");
         checkArgument(hex.length() % 2 == 0, "length must be a multiple of 2");
         // Make sure letters are upper case
-        hex = hex.toUpperCase();
+        hex = hex.toUpperCase(Locale.ROOT);
         byte[] b = new byte[hex.length() / 2];
         for (int i = 0; i < b.length; i++) {
-            b[i] = (byte) ((toBinaryFromHex((byte) hex.charAt(2 * i)) << 4) + (toBinaryFromHex((byte) hex.charAt((2 * i + 1))) & 0xff));
+            b[i] = (byte) ((toBinaryFromHex((byte) hex.charAt(2 * i)) << 4)
+                    + (toBinaryFromHex((byte) hex.charAt((2 * i + 1))) & 0xff));
         }
         return b;
     }
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java
index 4a54c480c3..faabbd1f10 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java
@@ -20,6 +20,7 @@
 
 import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
+import java.util.Locale;
 
 import com.google.common.primitives.Shorts;
 
@@ -452,7 +453,7 @@ public static String toHex(byte[] array, int offset, int length) {
         StringBuilder sb = new StringBuilder(length * 4);
         for (int i = 0; i < length; i++) {
             int b = array[offset + i];
-            sb.append(String.format("\\x%02X", b & 0xFF));
+            sb.append(String.format(Locale.ROOT, "\\x%02X", b & 0xFF));
         }
         return sb.toString();
     }
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/CaseInsensitiveString.java b/core-common/src/main/java/org/apache/kylin/common/util/CaseInsensitiveString.java
index 4389aaba38..3e87226688 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/CaseInsensitiveString.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/CaseInsensitiveString.java
@@ -17,6 +17,8 @@
  */
 package org.apache.kylin.common.util;
 
+import java.util.Locale;
+
 /**
  * A string wrapper that makes .equals a caseInsensitive match
  * <p>
@@ -53,7 +55,7 @@ public boolean equals(Object o) {
 
     @Override
     public int hashCode() {
-        return (str != null) ? str.toUpperCase().hashCode() : 0;
+        return (str != null) ? str.toUpperCase(Locale.ROOT).hashCode() : 0;
     }
 
     @Override
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/CliCommandExecutor.java b/core-common/src/main/java/org/apache/kylin/common/util/CliCommandExecutor.java
index 38b32d59a6..5fef77b25c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/CliCommandExecutor.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/CliCommandExecutor.java
@@ -22,6 +22,7 @@
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.commons.io.FileUtils;
 import org.slf4j.LoggerFactory;
@@ -131,7 +132,8 @@ private void copyRemote(String localFile, String destDir) throws IOException {
         builder.redirectErrorStream(true);
         Process proc = builder.start();
 
-        BufferedReader reader = new BufferedReader(new InputStreamReader(proc.getInputStream()));
+        BufferedReader reader = new BufferedReader(
+                new InputStreamReader(proc.getInputStream(), StandardCharsets.UTF_8));
         String line;
         StringBuilder result = new StringBuilder();
         while ((line = reader.readLine()) != null && !Thread.currentThread().isInterrupted()) {
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/EncryptUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/EncryptUtil.java
index 482b949580..deb54d4749 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/EncryptUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/EncryptUtil.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.common.util;
 
+import java.nio.charset.StandardCharsets;
 import org.apache.commons.codec.binary.Base64;
 
 import javax.crypto.Cipher;
@@ -35,7 +36,8 @@ public static String encrypt(String strToEncrypt) {
             Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding");
             final SecretKeySpec secretKey = new SecretKeySpec(key, "AES");
             cipher.init(Cipher.ENCRYPT_MODE, secretKey);
-            final String encryptedString = Base64.encodeBase64String(cipher.doFinal(strToEncrypt.getBytes()));
+            final String encryptedString = Base64.encodeBase64String(cipher.doFinal(strToEncrypt.getBytes(
+                StandardCharsets.UTF_8)));
             return encryptedString;
         } catch (Exception e) {
             throw new RuntimeException(e.getMessage(), e);
@@ -47,7 +49,7 @@ public static String decrypt(String strToDecrypt) {
             Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5PADDING");
             final SecretKeySpec secretKey = new SecretKeySpec(key, "AES");
             cipher.init(Cipher.DECRYPT_MODE, secretKey);
-            final String decryptedString = new String(cipher.doFinal(Base64.decodeBase64(strToDecrypt)));
+            final String decryptedString = new String(cipher.doFinal(Base64.decodeBase64(strToDecrypt)), StandardCharsets.UTF_8);
             return decryptedString;
         } catch (Exception e) {
             throw new RuntimeException(e.getMessage(), e);
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/HiveCmdBuilder.java b/core-common/src/main/java/org/apache/kylin/common/util/HiveCmdBuilder.java
index 1c023aa8aa..c04952d5b7 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/HiveCmdBuilder.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/HiveCmdBuilder.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.common.util;
 
 import java.util.ArrayList;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
@@ -47,7 +48,7 @@ public HiveCmdBuilder() {
     }
 
     public String build() {
-        HiveClientMode clientMode = HiveClientMode.valueOf(kylinConfig.getHiveClientMode().toUpperCase());
+        HiveClientMode clientMode = HiveClientMode.valueOf(kylinConfig.getHiveClientMode().toUpperCase(Locale.ROOT));
         String beelineShell = kylinConfig.getHiveBeelineShell();
         String beelineParams = kylinConfig.getHiveBeelineParams();
         if (kylinConfig.getEnableSparkSqlForTableOps()) {
@@ -80,7 +81,7 @@ public String build() {
                     hql.append(statement);
                     hql.append("\n");
                 }
-                String createFileCmd = String.format(CREATE_HQL_TMP_FILE_TEMPLATE, tmpHqlPath, hql);
+                String createFileCmd = String.format(Locale.ROOT, CREATE_HQL_TMP_FILE_TEMPLATE, tmpHqlPath, hql);
                 buf.append(createFileCmd);
                 buf.append("\n");
                 buf.append(beelineShell);
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/SSHClient.java b/core-common/src/main/java/org/apache/kylin/common/util/SSHClient.java
index 5e27d9d8e3..26729207bb 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/SSHClient.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/SSHClient.java
@@ -29,6 +29,7 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.commons.io.IOUtils;
 import org.slf4j.LoggerFactory;
@@ -93,7 +94,7 @@ public void scpFileToRemote(String localFile, String remoteTargetDirectory) thro
                 // The access time should be sent here,
                 // but it is not accessible with JavaAPI ;-<
                 command += (" " + (_lfile.lastModified() / 1000) + " 0\n");
-                out.write(command.getBytes());
+                out.write(command.getBytes(StandardCharsets.UTF_8));
                 out.flush();
                 if (checkAck(in) != 0) {
                     throw new Exception("Error in checkAck()");
@@ -111,7 +112,7 @@ public void scpFileToRemote(String localFile, String remoteTargetDirectory) thro
                 command += localFile;
             }
             command += "\n";
-            out.write(command.getBytes());
+            out.write(command.getBytes(StandardCharsets.UTF_8));
             out.flush();
             if (checkAck(in) != 0) {
                 throw new Exception("Error in checkAck()");
@@ -200,7 +201,7 @@ public void scpFileToLocal(String rfile, String lfile) throws Exception {
                 for (int i = 0;; i++) {
                     in.read(buf, i, 1);
                     if (buf[i] == (byte) 0x0a) {
-                        file = new String(buf, 0, i);
+                        file = new String(buf, 0, i, StandardCharsets.UTF_8);
                         break;
                     }
                 }
@@ -288,7 +289,7 @@ public SSHClientOutput execCommand(String command, int timeoutSeconds, Logger lo
                     if (i < 0)
                         break;
 
-                    String line = new String(tmp, 0, i);
+                    String line = new String(tmp, 0, i, StandardCharsets.UTF_8);
                     text.append(line);
                     if (logAppender != null) {
                         logAppender.log(line);
@@ -299,7 +300,7 @@ public SSHClientOutput execCommand(String command, int timeoutSeconds, Logger lo
                     if (i < 0)
                         break;
 
-                    String line = new String(tmp, 0, i);
+                    String line = new String(tmp, 0, i, StandardCharsets.UTF_8);
                     text.append(line);
                     if (logAppender != null) {
                         logAppender.log(line);
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/SetThreadName.java b/core-common/src/main/java/org/apache/kylin/common/util/SetThreadName.java
index 9e5197ed24..699445a888 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/SetThreadName.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/SetThreadName.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.common.util;
 
 import java.io.Closeable;
+import java.util.Locale;
 
 /**
  *
@@ -39,7 +40,7 @@
 
     public SetThreadName(String format, Object... args) {
         originThreadName = Thread.currentThread().getName();
-        Thread.currentThread().setName(String.format(format, args) + "-" + Thread.currentThread().getId());
+        Thread.currentThread().setName(String.format(Locale.ROOT, format, args) + "-" + Thread.currentThread().getId());
     }
 
     @Override
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
index e67d756938..0b94d9c8d7 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
@@ -21,6 +21,7 @@
 import java.util.ArrayList;
 import java.util.Collection;
 
+import java.util.Locale;
 import org.apache.commons.lang.StringUtils;
 
 /**
@@ -79,7 +80,7 @@ public static void toUpperCaseArray(String[] source, String[] target) {
         if (source != null) {
             for (int i = 0; i < source.length; i++) {
                 if (source[i] != null) {
-                    target[i] = source[i].toUpperCase();
+                    target[i] = source[i].toUpperCase(Locale.ROOT);
                 }
             }
         }
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/TimeUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/TimeUtil.java
index 7a24c100de..73209f5dfd 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/TimeUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/TimeUtil.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.common.util;
 
 import java.util.Calendar;
+import java.util.Locale;
 import java.util.TimeZone;
 
 /**
@@ -43,14 +44,14 @@ public static long getDayStart(long ts) {
     }
 
     public static long getWeekStart(long ts) {
-        Calendar calendar = Calendar.getInstance(gmt);
+        Calendar calendar = Calendar.getInstance(gmt, Locale.ROOT);
         calendar.setTimeInMillis(getDayStart(ts));
         calendar.add(Calendar.DAY_OF_WEEK, calendar.getFirstDayOfWeek() - calendar.get(Calendar.DAY_OF_WEEK));
         return calendar.getTimeInMillis();
     }
 
     public static long getMonthStart(long ts) {
-        Calendar calendar = Calendar.getInstance(gmt);
+        Calendar calendar = Calendar.getInstance(gmt, Locale.ROOT);
         calendar.setTimeInMillis(ts);
         int year = calendar.get(Calendar.YEAR);
         int month = calendar.get(Calendar.MONTH);
@@ -60,7 +61,7 @@ public static long getMonthStart(long ts) {
     }
 
     public static long getQuarterStart(long ts) {
-        Calendar calendar = Calendar.getInstance(gmt);
+        Calendar calendar = Calendar.getInstance(gmt, Locale.ROOT);
         calendar.setTimeInMillis(ts);
         int year = calendar.get(Calendar.YEAR);
         int month = calendar.get(Calendar.MONTH);
@@ -70,7 +71,7 @@ public static long getQuarterStart(long ts) {
     }
 
     public static long getYearStart(long ts) {
-        Calendar calendar = Calendar.getInstance(gmt);
+        Calendar calendar = Calendar.getInstance(gmt, Locale.ROOT);
         calendar.setTimeInMillis(ts);
         int year = calendar.get(Calendar.YEAR);
         calendar.clear();
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java b/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
index e53ad1697b..f67b2a27eb 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
@@ -20,6 +20,7 @@
 
 import java.io.File;
 import java.io.IOException;
+import java.util.Locale;
 
 import org.apache.commons.compress.archivers.ArchiveException;
 import org.apache.commons.compress.archivers.ArchiveStreamFactory;
@@ -53,6 +54,6 @@ public static void decompressZipfileToDirectory(String zipFileName, File outputF
     }
 
     private static boolean validateZipFilename(String filename) {
-        return !StringUtils.isEmpty(filename) && filename.trim().toLowerCase().endsWith(".zip");
+        return !StringUtils.isEmpty(filename) && filename.trim().toLowerCase(Locale.ROOT).endsWith(".zip");
     }
 }
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java b/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java
index 4b81daf9c8..e4c65fbc18 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java
@@ -28,6 +28,8 @@
 import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -55,19 +57,6 @@
 public class BasicTest {
     protected static final org.slf4j.Logger logger = LoggerFactory.getLogger(BasicTest.class);
 
-    private void log(ByteBuffer a) {
-        Integer x = 4;
-        foo(x);
-    }
-
-    private void foo(Long a) {
-        System.out.printf("a");
-    }
-
-    private void foo(Integer b) {
-        System.out.printf("b");
-    }
-
     private enum MetricType {
         Count, DimensionAsMetric, DistinctCount, Normal
     }
@@ -185,9 +174,9 @@ public void test1() throws Exception {
         long current = System.currentTimeMillis();
         System.out.println(time(current));
 
-        Calendar a = Calendar.getInstance();
-        Calendar b = Calendar.getInstance();
-        Calendar c = Calendar.getInstance();
+        Calendar a = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
+        Calendar b = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
+        Calendar c = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         b.clear();
         c.clear();
 
@@ -244,8 +233,8 @@ public void testStringSplit() throws Exception {
     }
 
     private static String time(long t) {
-        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
-        Calendar cal = Calendar.getInstance();
+        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss", Locale.ROOT);
+        Calendar cal = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         cal.setTimeInMillis(t);
         return dateFormat.format(cal.getTime());
     }
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java b/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java
index 6dcdaf5fcb..f0326b59f1 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java
@@ -19,8 +19,12 @@
 package org.apache.kylin.common.util;
 
 import java.io.File;
-import java.io.FileWriter;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.nio.charset.StandardCharsets;
+import java.util.Locale;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -35,7 +39,8 @@ public void testInstall() throws IOException {
         File folder = new File("/export/home/b_kylin/tmp");
         File out = new File("/export/home/b_kylin/tmp/out.sh");
         out.createNewFile();
-        FileWriter fw = new FileWriter(out);
+
+        Writer fw = new OutputStreamWriter(new FileOutputStream(out), StandardCharsets.UTF_8);
 
         for (File file : folder.listFiles()) {
             String name = file.getName();
@@ -53,7 +58,8 @@ public void testInstall() throws IOException {
             String artifactId = name.substring(0, match.start());
             String version = name.substring(match.start() + 1, lastDot);
 
-            fw.write(String.format("mvn install:install-file -Dfile=%s -DgroupId=%s -DartifactId=%s -Dversion=%s -Dpackaging=jar", name, "org.apache." + groupId, artifactId, version));
+            fw.write(String.format(Locale.ROOT, "mvn install:install-file -Dfile=%s -DgroupId=%s -DartifactId=%s "
+                    + "-Dversion=%s " + "-Dpackaging=jar", name, "org.apache." + groupId, artifactId, version));
             fw.write("\n");
         }
         fw.close();
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java b/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java
index 15f54f9efe..ced71258a5 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java
@@ -21,6 +21,7 @@
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
+import java.util.Locale;
 import java.util.TimeZone;
 
 import org.junit.Assert;
@@ -35,8 +36,8 @@
     }
 
     public static long normalizeTime(long timeMillis, NormalizedTimeUnit unit) {
-        Calendar a = Calendar.getInstance();
-        Calendar b = Calendar.getInstance();
+        Calendar a = Calendar.getInstance(TimeZone.getTimeZone("GMT"), Locale.ROOT);
+        Calendar b = Calendar.getInstance(TimeZone.getTimeZone("GMT"), Locale.ROOT);
         b.clear();
 
         a.setTimeInMillis(timeMillis);
@@ -50,7 +51,7 @@ public static long normalizeTime(long timeMillis, NormalizedTimeUnit unit) {
 
     @Test
     public void basicTest() throws ParseException {
-        java.text.DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
+        java.text.DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss", Locale.ROOT);
         dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
 
         long t1 = dateFormat.parse("2012/01/01 00:00:01").getTime();
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
index a2127a8f96..90cce14d04 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
@@ -23,6 +23,7 @@
 import java.util.Collection;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.TimeZone;
@@ -150,7 +151,7 @@ public static String makeSegmentName(TSRange tsRange, SegmentRange segRange, Dat
         }
 
         // using time
-        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
+        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss", Locale.ROOT);
         dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
         return dateFormat.format(tsRange.start.v) + "_" + dateFormat.format(tsRange.end.v);
     }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidModeEnum.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidModeEnum.java
index f55c9dbb46..bcd4cf0d10 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidModeEnum.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidModeEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.cube.cuboid;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum CuboidModeEnum {
@@ -39,7 +41,7 @@ public static CuboidModeEnum getByModeName(String modeName) {
             return null;
         }
         for (CuboidModeEnum mode : CuboidModeEnum.values()) {
-            if (mode.modeName.equals(modeName.toUpperCase())) {
+            if (mode.modeName.equals(modeName.toUpperCase(Locale.ROOT))) {
                 return mode;
             }
         }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/BPUSCalculator.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/BPUSCalculator.java
index e29332585f..ea91c6cb81 100755
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/BPUSCalculator.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/BPUSCalculator.java
@@ -18,14 +18,16 @@
 
 package org.apache.kylin.cube.cuboid.algorithm;
 
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Map;
-import java.util.Set;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 
 /**
  * Calculate the benefit based on Benefit Per Unit Space.
@@ -132,8 +134,8 @@ private long getCuboidAggregationCost(long cuboid) {
     @Override
     public boolean ifEfficient(CuboidBenefitModel best) {
         if (best.getBenefit() < getMinBenefitRatio()) {
-            logger.info(String.format("The recommended cuboid %s doesn't meet minimum benifit ratio %f", best,
-                    getMinBenefitRatio()));
+            logger.info(String.format(Locale.ROOT, "The recommended cuboid %s doesn't meet minimum benifit ratio %f",
+                    best, getMinBenefitRatio()));
             return false;
         }
         return true;
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/generic/GeneticAlgorithm.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/generic/GeneticAlgorithm.java
index 27d59fa88f..a512a5c00a 100755
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/generic/GeneticAlgorithm.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/generic/GeneticAlgorithm.java
@@ -18,7 +18,10 @@
 
 package org.apache.kylin.cube.cuboid.algorithm.generic;
 
-import com.google.common.collect.Lists;
+import java.util.BitSet;
+import java.util.List;
+import java.util.Locale;
+
 import org.apache.commons.math3.genetics.Chromosome;
 import org.apache.commons.math3.genetics.ElitisticListPopulation;
 import org.apache.commons.math3.genetics.FixedGenerationCount;
@@ -30,8 +33,7 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.BitSet;
-import java.util.List;
+import com.google.common.collect.Lists;
 
 /**
  * Implementation of a genetic algorithm to recommend a list of cuboids.
@@ -101,10 +103,10 @@ public GeneticAlgorithm(final long timeout, BenefitPolicy benefitPolicy, CuboidS
             for (Long cuboid : finalList) {
                 Double unitSpace = cuboidStats.getCuboidSize(cuboid);
                 if (unitSpace != null) {
-                    logger.trace(String.format("cuboidId %d and Space: %f", cuboid, unitSpace));
+                    logger.trace(String.format(Locale.ROOT, "cuboidId %d and Space: %f", cuboid, unitSpace));
                     totalSpace += unitSpace;
                 } else {
-                    logger.trace(String.format("mandatory cuboidId %d", cuboid));
+                    logger.trace(String.format(Locale.ROOT, "mandatory cuboidId %d", cuboid));
                 }
             }
             logger.trace("Total Space:" + totalSpace);
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/greedy/GreedyAlgorithm.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/greedy/GreedyAlgorithm.java
index 0f2dcc39fc..7f415de0bc 100755
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/greedy/GreedyAlgorithm.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/greedy/GreedyAlgorithm.java
@@ -18,10 +18,14 @@
 
 package org.apache.kylin.cube.cuboid.algorithm.greedy;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import java.util.List;
+import java.util.Locale;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.kylin.cube.cuboid.algorithm.AbstractRecommendAlgorithm;
 import org.apache.kylin.cube.cuboid.algorithm.BenefitPolicy;
 import org.apache.kylin.cube.cuboid.algorithm.CuboidBenefitModel;
@@ -29,12 +33,10 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.atomic.AtomicReference;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 /**
  * A simple implementation of the Greedy Algorithm , it chooses the cuboids which give
@@ -100,7 +102,7 @@ public GreedyAlgorithm(final long timeout, BenefitPolicy benefitPolicy, CuboidSt
             benefitPolicy.propagateAggregationCost(best.getCuboidId(), selected);
             round++;
             if (logger.isTraceEnabled()) {
-                logger.trace(String.format("Recommend in round %d : %s", round, best.toString()));
+                logger.trace(String.format(Locale.ROOT, "Recommend in round %d : %s", round, best.toString()));
             }
         }
 
@@ -116,7 +118,7 @@ public GreedyAlgorithm(final long timeout, BenefitPolicy benefitPolicy, CuboidSt
             logger.trace("Excluded cuboidId size:" + excluded.size());
             logger.trace("Excluded cuboidId detail:");
             for (Long cuboid : excluded) {
-                logger.trace(String.format("cuboidId %d and Cost: %d and Space: %f", cuboid,
+                logger.trace(String.format(Locale.ROOT, "cuboidId %d and Cost: %d and Space: %f", cuboid,
                         cuboidStats.getCuboidQueryCost(cuboid), cuboidStats.getCuboidSize(cuboid)));
             }
             logger.trace("Total Space:" + (spaceLimit - remainingSpace));
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
index f8889facda..45756fd73e 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
@@ -18,16 +18,30 @@
 
 package org.apache.kylin.cube.model;
 
-import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.google.common.base.Joiner;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
+import java.lang.reflect.Method;
+import java.nio.charset.StandardCharsets;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.TreeSet;
+
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.ArrayUtils;
@@ -65,27 +79,16 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.lang.reflect.Method;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.TreeSet;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-import static com.google.common.base.Preconditions.checkState;
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 
 /**
  */
@@ -131,7 +134,7 @@ public String toString() {
         }
 
     }
-    
+
     // ============================================================================
 
     private KylinConfigExt config;
@@ -222,7 +225,7 @@ public String toString() {
     public String resourceName() {
         return name;
     }
-    
+
     public boolean isEnableSharding() {
         //in the future may extend to other storage that is shard-able
         return storageType != IStorageAware.ID_HBASE && storageType != IStorageAware.ID_HYBRID;
@@ -285,7 +288,7 @@ public TblColRef findColumnRef(String table, String column) {
     }
 
     public DimensionDesc findDimensionByTable(String lookupTableName) {
-        lookupTableName = lookupTableName.toUpperCase();
+        lookupTableName = lookupTableName.toUpperCase(Locale.ROOT);
         for (DimensionDesc dim : dimensions)
             if (dim.getTableRef() != null && dim.getTableRef().getTableIdentity().equals(lookupTableName))
                 return dim;
@@ -577,10 +580,10 @@ public String calculateSignature() {
                 }
             }
 
-            String signatureInput = sigString.toString().replaceAll("\\s+", "").toLowerCase();
+            String signatureInput = sigString.toString().replaceAll("\\s+", "").toLowerCase(Locale.ROOT);
 
-            byte[] signature = md.digest(signatureInput.getBytes());
-            String ret = new String(Base64.encodeBase64(signature));
+            byte[] signature = md.digest(signatureInput.getBytes(StandardCharsets.UTF_8));
+            String ret = new String(Base64.encodeBase64(signature), StandardCharsets.UTF_8);
             return ret;
         } catch (NoSuchAlgorithmException | JsonProcessingException e) {
             throw new RuntimeException("Failed to calculate signature");
@@ -652,7 +655,8 @@ public void init(KylinConfig config) {
                 Class<?> hbaseMappingAdapterClass = Class.forName(hbaseMappingAdapterName);
                 Method initMethod = hbaseMappingAdapterClass.getMethod("initHBaseMapping", CubeDesc.class);
                 initMethod.invoke(null, this);
-                Method initMeasureReferenceToColumnFamilyMethod = hbaseMappingAdapterClass.getMethod("initMeasureReferenceToColumnFamilyWithChecking", CubeDesc.class);
+                Method initMeasureReferenceToColumnFamilyMethod = hbaseMappingAdapterClass
+                        .getMethod("initMeasureReferenceToColumnFamilyWithChecking", CubeDesc.class);
                 initMeasureReferenceToColumnFamilyMethod.invoke(null, this);
             } catch (Exception e) {
                 throw new RuntimeException("Error during adapting hbase mapping", e);
@@ -844,7 +848,7 @@ public void validateNotifyList() {
         List<String> notifyList = getNotifyList();
         if (notifyList != null && !notifyList.isEmpty()) {
             EmailValidator emailValidator = EmailValidator.getInstance();
-            for (String email: notifyList) {
+            for (String email : notifyList) {
                 if (!emailValidator.isValid(email)) {
                     throw new IllegalArgumentException("Email [" + email + "] is not validation.");
                 }
@@ -1064,10 +1068,10 @@ private void initMeasureColumns() {
         }
 
         for (MeasureDesc m : measures) {
-            m.setName(m.getName().toUpperCase());
+            m.setName(m.getName().toUpperCase(Locale.ROOT));
 
             if (m.getDependentMeasureRef() != null) {
-                m.setDependentMeasureRef(m.getDependentMeasureRef().toUpperCase());
+                m.setDependentMeasureRef(m.getDependentMeasureRef().toUpperCase(Locale.ROOT));
             }
 
             FunctionDesc func = m.getFunction();
@@ -1215,7 +1219,7 @@ public void setRetentionRange(long retentionRange) {
     public void setAutoMergeTimeRanges(long[] autoMergeTimeRanges) {
         this.autoMergeTimeRanges = autoMergeTimeRanges;
     }
-    
+
     public boolean isBroken() {
         return !errors.isEmpty();
     }
@@ -1416,7 +1420,7 @@ public boolean isExtSnapshotTable(String tableName) {
         }
         return desc.isExtSnapshotTable();
     }
-    
+
     public List<String> getAllExtLookupSnapshotTypes() {
         List<String> result = Lists.newArrayList();
         for (SnapshotTableDesc snapshotTableDesc : snapshotTableDescList) {
@@ -1463,7 +1467,7 @@ public String getDictionaryBuilderClass(TblColRef col) {
         }
         return null;
     }
-    
+
     public List<TblColRef> getAllGlobalDictColumns() {
         List<TblColRef> globalDictCols = new ArrayList<TblColRef>();
         List<DictionaryDesc> dictionaryDescList = getDictionaries();
@@ -1474,7 +1478,8 @@ public String getDictionaryBuilderClass(TblColRef col) {
 
         for (DictionaryDesc dictionaryDesc : dictionaryDescList) {
             String cls = dictionaryDesc.getBuilderClass();
-            if (GlobalDictionaryBuilder.class.getName().equals(cls) || SegmentAppendTrieDictBuilder.class.getName().equals(cls))
+            if (GlobalDictionaryBuilder.class.getName().equals(cls)
+                    || SegmentAppendTrieDictBuilder.class.getName().equals(cls))
                 globalDictCols.add(dictionaryDesc.getColumnRef());
         }
         return globalDictCols;
@@ -1483,7 +1488,7 @@ public String getDictionaryBuilderClass(TblColRef col) {
     public boolean isShrunkenDictFromGlobalEnabled() {
         return config.isShrunkenDictFromGlobalEnabled() && !getAllGlobalDictColumns().isEmpty();
     }
-    
+
     // UHC (ultra high cardinality column): contain the ShardByColumns and the GlobalDictionaryColumns
     public List<TblColRef> getAllUHCColumns() {
         List<TblColRef> uhcColumns = new ArrayList<TblColRef>();
@@ -1492,7 +1497,6 @@ public boolean isShrunkenDictFromGlobalEnabled() {
         return uhcColumns;
     }
 
-
     public String getProject() {
         return getModel().getProject();
     }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
index 467a294d58..93def8b4c7 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
@@ -20,6 +20,7 @@
 
 import java.io.Serializable;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.cube.CubeSegment;
@@ -78,9 +79,9 @@ private CubeJoinedFlatTableDesc(CubeDesc cubeDesc, CubeSegment cubeSegment /* ca
 
     protected String makeTableName(CubeDesc cubeDesc, CubeSegment cubeSegment) {
         if (cubeSegment == null) {
-            return MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeDesc.getName().toLowerCase();
+            return MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeDesc.getName().toLowerCase(Locale.ROOT);
         } else {
-            return MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeDesc.getName().toLowerCase() + "_"
+            return MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeDesc.getName().toLowerCase(Locale.ROOT) + "_"
                     + cubeSegment.getUuid().replaceAll("-", "_");
         }
     }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/DictionaryDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/DictionaryDesc.java
index ca2183a8ce..a700e1001a 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/DictionaryDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/DictionaryDesc.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.cube.model;
 
+import java.util.Locale;
+
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 
@@ -45,11 +47,11 @@
     void init(CubeDesc cubeDesc) {
         DataModelDesc model = cubeDesc.getModel();
 
-        column = column.toUpperCase();
+        column = column.toUpperCase(Locale.ROOT);
         colRef = model.findColumn(column);
 
         if (reuseColumn != null) {
-            reuseColumn = reuseColumn.toUpperCase();
+            reuseColumn = reuseColumn.toUpperCase(Locale.ROOT);
             reuseColRef = model.findColumn(reuseColumn);
         }
     }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java
index c8fff26928..dbbd4e8e7d 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.cube.model;
 
 import java.util.Arrays;
+import java.util.Locale;
 
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.model.JoinDesc;
@@ -56,7 +57,7 @@ public void init(CubeDesc cubeDesc) {
         DataModelDesc model = cubeDesc.getModel();
 
         if (name != null)
-            name = name.toUpperCase();
+            name = name.toUpperCase(Locale.ROOT);
 
         tableRef = model.findTable(table);
         table = tableRef.getAlias();
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java
index 77b4b14cdb..be8ed6a301 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java
@@ -21,14 +21,16 @@
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.LinkedList;
+import java.util.Locale;
 
-import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
-import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.kylin.common.util.StringUtil;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.MeasureDesc;
 
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
 /**
  */
 @JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
@@ -78,10 +80,10 @@ public void init(CubeDesc cubeDesc) {
         cubeRef = cubeDesc;
 
         for (HBaseColumnFamilyDesc cf : columnFamily) {
-            cf.setName(cf.getName().toUpperCase());
+            cf.setName(cf.getName().toUpperCase(Locale.ROOT));
 
             for (HBaseColumnDesc c : cf.getColumns()) {
-                c.setQualifier(c.getQualifier().toUpperCase());
+                c.setQualifier(c.getQualifier().toUpperCase(Locale.ROOT));
                 StringUtil.toUpperCaseArray(c.getMeasureRefs(), c.getMeasureRefs());
             }
         }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java b/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java
index 65719a588d..f78e92b530 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java
@@ -21,6 +21,7 @@
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -53,7 +54,8 @@
 
     private static Logger logger = LoggerFactory.getLogger(CubingUtils.class);
 
-    public static Map<Long, HLLCounter> sampling(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDescIn, Iterable<List<String>> streams) {
+    public static Map<Long, HLLCounter> sampling(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDescIn,
+            Iterable<List<String>> streams) {
         final CubeJoinedFlatTableEnrich flatDesc = new CubeJoinedFlatTableEnrich(flatDescIn, cubeDesc);
         final int rowkeyLength = cubeDesc.getRowkey().getRowKeyColumns().length;
         final Set<Long> allCuboidIds = cubeDesc.getInitialCuboidScheduler().getAllCuboidIds();
@@ -105,8 +107,10 @@
         return result;
     }
 
-    public static Map<TblColRef, Dictionary<String>> buildDictionary(final CubeInstance cubeInstance, Iterable<List<String>> recordList) throws IOException {
-        final List<TblColRef> columnsNeedToBuildDictionary = cubeInstance.getDescriptor().listDimensionColumnsExcludingDerived(true);
+    public static Map<TblColRef, Dictionary<String>> buildDictionary(final CubeInstance cubeInstance,
+            Iterable<List<String>> recordList) throws IOException {
+        final List<TblColRef> columnsNeedToBuildDictionary = cubeInstance.getDescriptor()
+                .listDimensionColumnsExcludingDerived(true);
         final HashMap<Integer, TblColRef> tblColRefMap = Maps.newHashMap();
         int index = 0;
         for (TblColRef column : columnsNeedToBuildDictionary) {
@@ -126,14 +130,16 @@
         }
         for (TblColRef tblColRef : valueMap.keySet()) {
             Set<String> values = valueMap.get(tblColRef);
-            Dictionary<String> dict = DictionaryGenerator.buildDictionary(tblColRef.getType(), new IterableDictionaryValueEnumerator(values));
+            Dictionary<String> dict = DictionaryGenerator.buildDictionary(tblColRef.getType(),
+                    new IterableDictionaryValueEnumerator(values));
             result.put(tblColRef, dict);
         }
         return result;
     }
 
     @SuppressWarnings("unchecked")
-    public static Map<TblColRef, Dictionary<String>> writeDictionary(CubeSegment cubeSegment, Map<TblColRef, Dictionary<String>> dictionaryMap, long startOffset, long endOffset) {
+    public static Map<TblColRef, Dictionary<String>> writeDictionary(CubeSegment cubeSegment,
+            Map<TblColRef, Dictionary<String>> dictionaryMap, long startOffset, long endOffset) {
         Map<TblColRef, Dictionary<String>> realDictMap = Maps.newHashMap();
 
         for (Map.Entry<TblColRef, Dictionary<String>> entry : dictionaryMap.entrySet()) {
@@ -141,7 +147,7 @@
             final Dictionary<String> dictionary = entry.getValue();
             IReadableTable.TableSignature signature = new IReadableTable.TableSignature();
             signature.setLastModifiedTime(System.currentTimeMillis());
-            signature.setPath(String.format("streaming_%s_%s", startOffset, endOffset));
+            signature.setPath(String.format(Locale.ROOT, "streaming_%s_%s", startOffset, endOffset));
             signature.setSize(endOffset - startOffset);
             DictionaryInfo dictInfo = new DictionaryInfo(tblColRef.getColumnDesc(), tblColRef.getDatatype(), signature);
             logger.info("writing dictionary for TblColRef:" + tblColRef.toString());
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
index 5872dd8097..eec3d70458 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
@@ -29,12 +29,12 @@
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeSet;
 
-import com.google.common.collect.Lists;
 import org.apache.kylin.common.util.Array;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -56,6 +56,7 @@
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
+import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
 /**
@@ -95,7 +96,7 @@ public void setUp() throws Exception {
 
     private String getColInAggrGroup(AggregationGroup g, String name) {
         for (String c : g.getIncludes()) {
-            if (c.toLowerCase().contains(name.toLowerCase()))
+            if (c.toLowerCase(Locale.ROOT).contains(name.toLowerCase(Locale.ROOT)))
                 return c;
         }
         throw new IllegalStateException();
@@ -158,7 +159,7 @@ public void testCiCube() {
     private List<MeasureDesc> dropPercentile(List<MeasureDesc> measures) {
         ArrayList<MeasureDesc> result = new ArrayList<>();
         for (MeasureDesc m : measures) {
-            if (!m.getFunction().getExpression().toUpperCase().contains("PERCENTILE"))
+            if (!m.getFunction().getExpression().toUpperCase(Locale.ROOT).contains("PERCENTILE"))
                 result.add(m);
         }
         return result;
@@ -377,14 +378,15 @@ public void testTooManyRowkeys() throws Exception {
         metaFile.renameTo(new File(path.substring(0, path.length() - 4)));
 
         thrown.expect(IllegalArgumentException.class);
-        thrown.expectMessage("Too many rowkeys (78) in CubeDesc, please try to reduce dimension number or adopt derived dimensions");
+        thrown.expectMessage(
+                "Too many rowkeys (78) in CubeDesc, please try to reduce dimension number or adopt derived dimensions");
         getTestConfig().clearManagers();
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("ut_78_rowkeys");
         cubeDesc.init(getTestConfig());
     }
 
     @Test
-    public void testValidateNotifyList() throws Exception{
+    public void testValidateNotifyList() throws Exception {
         thrown.expect(IllegalArgumentException.class);
         thrown.expectMessage("Email [test] is not validation.");
 
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java b/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java
index 074d97327b..540933f830 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java
@@ -20,6 +20,8 @@
 
 import static org.junit.Assert.assertEquals;
 
+import java.nio.charset.StandardCharsets;
+
 import org.apache.kylin.common.util.BytesSplitter;
 import org.junit.Test;
 
@@ -32,23 +34,32 @@
     @Test
     public void test() {
         BytesSplitter bytesSplitter = new BytesSplitter(10, 15);
-        byte[] input = "2013-02-17Collectibles".getBytes();
+        byte[] input = "2013-02-17Collectibles".getBytes(StandardCharsets.UTF_8);
         bytesSplitter.split(input, input.length, (byte) 127);
 
         assertEquals(2, bytesSplitter.getBufferSize());
-        assertEquals("2013-02-17", new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length));
-        assertEquals("Collectibles", new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length));
+        assertEquals("2013-02-17",
+                new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length, StandardCharsets.UTF_8));
+        assertEquals("Collectibles",
+                new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length, StandardCharsets.UTF_8)
+                );
     }
 
     @Test
     public void testNullValue() {
         BytesSplitter bytesSplitter = new BytesSplitter(10, 15);
-        byte[] input = "2013-02-17Collectibles".getBytes();
+        byte[] input = "2013-02-17Collectibles".getBytes(StandardCharsets.UTF_8);
         bytesSplitter.split(input, input.length, (byte) 127);
 
         assertEquals(3, bytesSplitter.getBufferSize());
-        assertEquals("2013-02-17", new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length));
-        assertEquals("", new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length));
-        assertEquals("Collectibles", new String(bytesSplitter.getSplitBuffers()[2].value, 0, bytesSplitter.getSplitBuffers()[2].length));
+        assertEquals("2013-02-17",
+                new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length, StandardCharsets.UTF_8)
+                );
+        assertEquals("",
+                new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length,
+                    StandardCharsets.UTF_8));
+        assertEquals("Collectibles",
+                new String(bytesSplitter.getSplitBuffers()[2].value, 0, bytesSplitter.getSplitBuffers()[2].length,
+                    StandardCharsets.UTF_8));
     }
 }
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/cuboid/TreeCuboidSchedulerTest.java b/core-cube/src/test/java/org/apache/kylin/cube/cuboid/TreeCuboidSchedulerTest.java
index 41fa8070fd..79c9426655 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/cuboid/TreeCuboidSchedulerTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/cuboid/TreeCuboidSchedulerTest.java
@@ -20,7 +20,9 @@
 
 import static org.junit.Assert.assertEquals;
 
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -39,7 +41,7 @@ public void testCreateCuboidTree() {
         long basicCuboid = getBaseCuboid(10);
         List<Long> cuboids = genRandomCuboids(basicCuboid, 200);
         CuboidTree cuboidTree = CuboidTree.createFromCuboids(cuboids);
-        PrintWriter out = new PrintWriter(System.out);
+        PrintWriter out = new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8));
         cuboidTree.print(out);
         out.flush();
     }
@@ -51,7 +53,7 @@ public void testSpanningChild() {
         long testCuboid = cuboids.get(10);
         System.out.println(cuboids);
         CuboidTree cuboidTree = CuboidTree.createFromCuboids(cuboids);
-        PrintWriter out = new PrintWriter(System.out);
+        PrintWriter out = new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8));
         cuboidTree.print(out);
         out.flush();
 
@@ -62,7 +64,7 @@ public void testSpanningChild() {
     @Test
     public void testFindBestMatchCuboid() {
         CuboidTree cuboidTree = createCuboidTree1();
-        PrintWriter out = new PrintWriter(System.out);
+        PrintWriter out = new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8));
         cuboidTree.print(out);
         out.flush();
 
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java
index f749fb4e67..57ea26c9a4 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java
@@ -23,6 +23,7 @@
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
 import java.util.SortedMap;
 import java.util.TreeMap;
@@ -93,10 +94,10 @@
     }
 
     enum Settings {
-        WITHOUT_MEM_HUNGRY,     // only test basic aggrs
-        WITH_HLLC,              // basic aggrs + hllc
-        WITH_LOW_CARD_BITMAP,   // basic aggrs + bitmap
-        WITH_HIGH_CARD_BITMAP   // basic aggrs + bitmap
+        WITHOUT_MEM_HUNGRY, // only test basic aggrs
+        WITH_HLLC, // basic aggrs + hllc
+        WITH_LOW_CARD_BITMAP, // basic aggrs + bitmap
+        WITH_HIGH_CARD_BITMAP // basic aggrs + bitmap
     }
 
     private MeasureAggregator<?>[] createNoMemHungryAggrs() {
@@ -132,19 +133,19 @@ private BitmapAggregator createBitmapAggr(boolean lowCardinality) {
         aggregators.addAll(Arrays.asList(createNoMemHungryAggrs()));
 
         switch (settings) {
-            case WITHOUT_MEM_HUNGRY:
-                break;
-            case WITH_HLLC:
-                aggregators.add(createHLLCAggr());
-                break;
-            case WITH_LOW_CARD_BITMAP:
-                aggregators.add(createBitmapAggr(true));
-                break;
-            case WITH_HIGH_CARD_BITMAP:
-                aggregators.add(createBitmapAggr(false));
-                break;
-            default:
-                break;
+        case WITHOUT_MEM_HUNGRY:
+            break;
+        case WITH_HLLC:
+            aggregators.add(createHLLCAggr());
+            break;
+        case WITH_LOW_CARD_BITMAP:
+            aggregators.add(createBitmapAggr(true));
+            break;
+        case WITH_HIGH_CARD_BITMAP:
+            aggregators.add(createBitmapAggr(false));
+            break;
+        default:
+            break;
         }
 
         return aggregators.toArray(new MeasureAggregator[aggregators.size()]);
@@ -158,10 +159,10 @@ public void testEstimateBitmapMemSize() {
             bitmapAggrs[i].aggregate(bitmaps[i]);
         }
 
-        System.out.printf("%-15s %-10s %-10s\n", "cardinality", "estimate", "actual");
+        System.out.printf(Locale.ROOT, "%-15s %-10s %-10s\n", "cardinality", "estimate", "actual");
         for (BitmapAggregator aggr : bitmapAggrs) {
-            System.out.printf("%-15d %-10d %-10d\n",
-                    aggr.getState().getCount(), aggr.getMemBytesEstimate(), meter.measureDeep(aggr));
+            System.out.printf(Locale.ROOT, "%-15d %-10d %-10d\n", aggr.getState().getCount(),
+                    aggr.getMemBytesEstimate(), meter.measureDeep(aggr));
         }
     }
 
@@ -190,8 +191,8 @@ public int compare(byte[] o1, byte[] o2) {
         long actualMillis = 0;
 
         System.out.println("Settings: " + settings);
-        System.out.printf("%15s %15s %15s %15s %15s\n",
-                "Size", "Estimate(bytes)", "Actual(bytes)", "Estimate(ms)", "Actual(ms)");
+        System.out.printf(Locale.ROOT, "%15s %15s %15s %15s %15s\n", "Size", "Estimate(bytes)", "Actual(bytes)",
+                "Estimate(ms)", "Actual(ms)");
 
         for (int i = 0; i < inputCount; i++) {
             byte[] key = new byte[10];
@@ -199,7 +200,7 @@ public int compare(byte[] o1, byte[] o2) {
             MeasureAggregator[] values = createAggrs(settings);
             map.put(key, values);
 
-            if ((i+1) % reportInterval == 0) {
+            if ((i + 1) % reportInterval == 0) {
                 stopwatch.start();
                 long estimateBytes = GTAggregateScanner.estimateSizeOfAggrCache(key, values, map.size());
                 estimateMillis += stopwatch.elapsedMillis();
@@ -210,8 +211,8 @@ public int compare(byte[] o1, byte[] o2) {
                 actualMillis += stopwatch.elapsedMillis();
                 stopwatch.reset();
 
-                System.out.printf("%,15d %,15d %,15d %,15d %,15d\n",
-                        map.size(), estimateBytes, actualBytes, estimateMillis, actualMillis);
+                System.out.printf(Locale.ROOT, "%,15d %,15d %,15d %,15d %,15d\n", map.size(), estimateBytes,
+                        actualBytes, estimateMillis, actualMillis);
             }
         }
         System.out.println("---------------------------------------\n");
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java b/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java
index 9e68eb4d6d..bcead85217 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java
@@ -22,6 +22,7 @@
 import java.io.IOException;
 import java.io.PrintStream;
 import java.util.Arrays;
+import java.util.Locale;
 import java.util.Objects;
 import java.util.TreeMap;
 import java.util.concurrent.ExecutionException;
@@ -60,9 +61,10 @@
  *
  * @author sunyerui
  */
-@SuppressWarnings({"rawtypes", "unchecked", "serial"})
+@SuppressWarnings({ "rawtypes", "unchecked", "serial" })
 public class AppendTrieDictionary<T> extends CacheDictionary<T> {
-    public static final byte[] HEAD_MAGIC = new byte[]{0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74}; // "AppendTrieDict"
+    public static final byte[] HEAD_MAGIC = new byte[] { 0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65,
+            0x44, 0x69, 0x63, 0x74 }; // "AppendTrieDict"
     public static final int HEAD_SIZE_I = HEAD_MAGIC.length;
     private static final Logger logger = LoggerFactory.getLogger(AppendTrieDictionary.class);
 
@@ -85,19 +87,23 @@ public void init(String baseDir) throws IOException {
         final Path latestVersionPath = globalDictStore.getVersionDir(latestVersion);
         this.metadata = globalDictStore.getMetadata(latestVersion);
         this.bytesConvert = metadata.bytesConverter;
-        this.dictCache = CacheBuilder.newBuilder().softValues().removalListener(new RemovalListener<AppendDictSliceKey, AppendDictSlice>() {
-            @Override
-            public void onRemoval(RemovalNotification<AppendDictSliceKey, AppendDictSlice> notification) {
-                logger.info("Evict slice with key {} and value {} caused by {}, size {}/{}", notification.getKey(), notification.getValue(), notification.getCause(), dictCache.size(), metadata.sliceFileMap.size());
-            }
-        }).build(new CacheLoader<AppendDictSliceKey, AppendDictSlice>() {
-            @Override
-            public AppendDictSlice load(AppendDictSliceKey key) throws Exception {
-                AppendDictSlice slice = globalDictStore.readSlice(latestVersionPath.toString(), metadata.sliceFileMap.get(key));
-                logger.trace("Load slice with key {} and value {}", key, slice);
-                return slice;
-            }
-        });
+        this.dictCache = CacheBuilder.newBuilder().softValues()
+                .removalListener(new RemovalListener<AppendDictSliceKey, AppendDictSlice>() {
+                    @Override
+                    public void onRemoval(RemovalNotification<AppendDictSliceKey, AppendDictSlice> notification) {
+                        logger.info("Evict slice with key {} and value {} caused by {}, size {}/{}",
+                                notification.getKey(), notification.getValue(), notification.getCause(),
+                                dictCache.size(), metadata.sliceFileMap.size());
+                    }
+                }).build(new CacheLoader<AppendDictSliceKey, AppendDictSlice>() {
+                    @Override
+                    public AppendDictSlice load(AppendDictSliceKey key) throws Exception {
+                        AppendDictSlice slice = globalDictStore.readSlice(latestVersionPath.toString(),
+                                metadata.sliceFileMap.get(key));
+                        logger.trace("Load slice with key {} and value {}", key, slice);
+                        return slice;
+                    }
+                });
     }
 
     @Override
@@ -162,7 +168,8 @@ public void readFields(DataInput in) throws IOException {
 
     @Override
     public void dump(PrintStream out) {
-        out.println(String.format("Total %d values and %d slices", metadata.nValues, metadata.sliceFileMap.size()));
+        out.println(String.format(Locale.ROOT, "Total %d values and %d slices", metadata.nValues,
+                metadata.sliceFileMap.size()));
     }
 
     @Override
@@ -184,7 +191,7 @@ public boolean equals(Object o) {
 
     @Override
     public String toString() {
-        return String.format("AppendTrieDictionary(%s)", baseDir);
+        return String.format(Locale.ROOT, "AppendTrieDictionary(%s)", baseDir);
     }
 
     @Override
@@ -221,7 +228,8 @@ private String convertToAbsolutePath(String path) {
         if (paths.length == 2) {
             return kylinConfig.getHdfsWorkingDirectory() + "/resources/SegmentDict/" + paths[1];
         } else {
-            throw new RuntimeException("the basic directory of global dictionary only support the format which contains '/resources/GlobalDict/' or '/resources/SegmentDict/'");
+            throw new RuntimeException(
+                    "the basic directory of global dictionary only support the format which contains '/resources/GlobalDict/' or '/resources/SegmentDict/'");
         }
     }
 
@@ -230,7 +238,7 @@ private String convertToAbsolutePath(String path) {
      *
      * @param flag
      */
-   void setSaveAbsolutePath(Boolean flag) {
+    void setSaveAbsolutePath(Boolean flag) {
         this.isSaveAbsolutePath = flag;
     }
 }
\ No newline at end of file
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java
index 00410e7f4e..c86bcb84a6 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java
@@ -19,6 +19,8 @@
 package org.apache.kylin.dict;
 
 import java.io.IOException;
+
+import java.util.Locale;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.lock.DistributedLock;
 import org.apache.kylin.common.util.Dictionary;
@@ -58,7 +60,8 @@ public void init(DictionaryInfo dictInfo, int baseId, String hdfsDir) throws IOE
             this.builder = new AppendTrieDictionaryBuilder(baseDir, maxEntriesPerSlice, true);
         } catch (Throwable e) {
             lock.unlock(getLockPath(sourceColumn));
-            throw new RuntimeException(String.format("Failed to create global dictionary on %s ", sourceColumn), e);
+            throw new RuntimeException(
+                    String.format(Locale.ROOT, "Failed to create global dictionary on %s ", sourceColumn), e);
         }
         this.baseId = baseId;
     }
@@ -69,7 +72,8 @@ public boolean addValue(String value) {
             if (lock.lock(getLockPath(sourceColumn))) {
                 logger.info("processed {} values for {}", counter, sourceColumn);
             } else {
-                throw new RuntimeException("Failed to create global dictionary on " + sourceColumn + " This client doesn't keep the lock");
+                throw new RuntimeException(
+                        "Failed to create global dictionary on " + sourceColumn + " This client doesn't keep the lock");
             }
         }
 
@@ -81,7 +85,8 @@ public boolean addValue(String value) {
             builder.addValue(value);
         } catch (Throwable e) {
             lock.unlock(getLockPath(sourceColumn));
-            throw new RuntimeException(String.format("Failed to create global dictionary on %s ", sourceColumn), e);
+            throw new RuntimeException(
+                    String.format(Locale.ROOT, "Failed to create global dictionary on %s ", sourceColumn), e);
         }
 
         return true;
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/ShrunkenDictionary.java b/core-dictionary/src/main/java/org/apache/kylin/dict/ShrunkenDictionary.java
index 35c995eee6..584d58e865 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/ShrunkenDictionary.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/ShrunkenDictionary.java
@@ -22,6 +22,7 @@
 import java.io.DataOutput;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.common.util.Dictionary;
@@ -107,7 +108,7 @@ protected T getValueFromIdImpl(int id) {
     }
 
     public void dump(PrintStream out) {
-        out.println(String.format("Total %d values for ShrunkenDictionary", valueToIdMap.size()));
+        out.println(String.format(Locale.ROOT, "Total %d values for ShrunkenDictionary", valueToIdMap.size()));
     }
 
     public void write(DataOutput out) throws IOException {
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java
index ee3a2c2389..c25e9b0d73 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java
@@ -18,11 +18,6 @@
 
 package org.apache.kylin.dict.global;
 
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.common.util.BytesUtil;
-import org.apache.kylin.dict.AppendTrieDictionary;
-import org.apache.kylin.dict.TrieDictionary;
-
 import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
@@ -32,6 +27,12 @@
 import java.util.IdentityHashMap;
 import java.util.LinkedList;
 
+import java.util.Locale;
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.common.util.BytesUtil;
+import org.apache.kylin.dict.AppendTrieDictionary;
+import org.apache.kylin.dict.TrieDictionary;
+
 public class AppendDictNode {
     public byte[] part;
     public int id = -1;
@@ -195,12 +196,14 @@ public static AppendDictNode splitNodeTree(final AppendDictNode splitNode) {
     }
 
     private void build_overwriteChildOffset(int parentOffset, int childOffset, int sizeChildOffset, byte[] trieBytes) {
-        int flags = (int) trieBytes[parentOffset] & (TrieDictionary.BIT_IS_LAST_CHILD | TrieDictionary.BIT_IS_END_OF_VALUE);
+        int flags = (int) trieBytes[parentOffset]
+                & (TrieDictionary.BIT_IS_LAST_CHILD | TrieDictionary.BIT_IS_END_OF_VALUE);
         BytesUtil.writeUnsigned(childOffset, trieBytes, parentOffset, sizeChildOffset);
         trieBytes[parentOffset] |= flags;
     }
 
-    private int build_writeNode(AppendDictNode n, int offset, boolean isLastChild, int sizeChildOffset, int sizeId, byte[] trieBytes) {
+    private int build_writeNode(AppendDictNode n, int offset, boolean isLastChild, int sizeChildOffset, int sizeId,
+            byte[] trieBytes) {
         int o = offset;
 
         // childOffset
@@ -212,7 +215,8 @@ private int build_writeNode(AppendDictNode n, int offset, boolean isLastChild, i
 
         // nValueBytes
         if (n.part.length > 255)
-            throw new RuntimeException("Value length is " + n.part.length + " and larger than 255: " + Bytes.toStringBinary(n.part));
+            throw new RuntimeException(
+                    "Value length is " + n.part.length + " and larger than 255: " + Bytes.toStringBinary(n.part));
         BytesUtil.writeUnsigned(n.part.length, trieBytes, o, 1);
         o++;
 
@@ -238,7 +242,8 @@ private void checkValidId(int id) {
 
     @Override
     public String toString() {
-        return String.format("DictNode[root=%s, nodes=%d, firstValue=%s]", Bytes.toStringBinary(part), childrenCount, Bytes.toStringBinary(firstValue()));
+        return String.format(Locale.ROOT, "DictNode[root=%s, nodes=%d, firstValue=%s]", Bytes.toStringBinary(part),
+                childrenCount, Bytes.toStringBinary(firstValue()));
     }
 
     static class Stats {
@@ -332,7 +337,8 @@ public void visit(AppendDictNode n, int level) {
             s.mbpn_sizeValueTotal = s.nValueBytesCompressed + s.nValues * s.mbpn_sizeId;
             s.mbpn_sizeNoValueBytes = 1;
             s.mbpn_sizeChildOffset = 5;
-            s.mbpn_footprint = s.mbpn_sizeValueTotal + s.mbpn_nNodes * (s.mbpn_sizeNoValueBytes + s.mbpn_sizeChildOffset);
+            s.mbpn_footprint = s.mbpn_sizeValueTotal
+                    + s.mbpn_nNodes * (s.mbpn_sizeNoValueBytes + s.mbpn_sizeChildOffset);
             while (true) { // minimize the offset size to match the footprint
                 int t = s.mbpn_sizeValueTotal + s.mbpn_nNodes * (s.mbpn_sizeNoValueBytes + s.mbpn_sizeChildOffset - 1);
                 // *4 because 2 MSB of offset is used for isEndOfValue & isEndChild flag
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java
index 4e820e08d6..7972bd3486 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java
@@ -18,9 +18,6 @@
 
 package org.apache.kylin.dict.global;
 
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.common.util.BytesUtil;
-
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInput;
@@ -29,8 +26,13 @@
 import java.util.Arrays;
 import java.util.HashSet;
 
+import java.util.Locale;
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.common.util.BytesUtil;
+
 public class AppendDictSlice {
-    static final byte[] HEAD_MAGIC = new byte[] { 0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74 }; // "AppendTrieDict"
+    static final byte[] HEAD_MAGIC = new byte[] { 0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65, 0x44,
+            0x69, 0x63, 0x74 }; // "AppendTrieDict"
     static final int HEAD_SIZE_I = HEAD_MAGIC.length;
     static final int BIT_IS_LAST_CHILD = 0x80;
     static final int BIT_IS_END_OF_VALUE = 0x40;
@@ -58,7 +60,8 @@ private void init() {
             throw new IllegalArgumentException("Wrong file type (magic does not match)");
 
         try {
-            DataInputStream headIn = new DataInputStream(new ByteArrayInputStream(trieBytes, HEAD_SIZE_I, trieBytes.length - HEAD_SIZE_I));
+            DataInputStream headIn = new DataInputStream(
+                    new ByteArrayInputStream(trieBytes, HEAD_SIZE_I, trieBytes.length - HEAD_SIZE_I));
             this.headSize = headIn.readShort();
             this.bodyLen = headIn.readInt();
             this.nValues = headIn.readInt();
@@ -104,7 +107,8 @@ public static AppendDictSlice deserializeFrom(DataInput in) throws IOException {
             if (checkFlag(nodeOffset, BIT_IS_END_OF_VALUE)) {
                 break;
             }
-            nodeOffset = headSize + (int) (BytesUtil.readLong(trieBytes, nodeOffset, sizeChildOffset) & childOffsetMask);
+            nodeOffset = headSize
+                    + (int) (BytesUtil.readLong(trieBytes, nodeOffset, sizeChildOffset) & childOffsetMask);
             if (nodeOffset == headSize) {
                 break;
             }
@@ -155,7 +159,8 @@ private int lookupSeqNoFromValue(int n, byte[] inp, int o, int inpEnd, int round
                 } else if (comp < 0) { // try next child
                     if (checkFlag(c, BIT_IS_LAST_CHILD))
                         return -1;
-                    c = p + BytesUtil.readUnsigned(trieBytes, p - 1, 1) + (checkFlag(c, BIT_IS_END_OF_VALUE) ? sizeOfId : 0);
+                    c = p + BytesUtil.readUnsigned(trieBytes, p - 1, 1)
+                            + (checkFlag(c, BIT_IS_END_OF_VALUE) ? sizeOfId : 0);
                 } else { // children are ordered by their first value byte
                     return -1;
                 }
@@ -261,7 +266,8 @@ public boolean doCheck() {
 
     @Override
     public String toString() {
-        return String.format("DictSlice[firstValue=%s, values=%d, bytes=%d]", Bytes.toStringBinary(getFirstValue()), nValues, bodyLen);
+        return String.format(Locale.ROOT, "DictSlice[firstValue=%s, values=%d, bytes=%d]",
+                Bytes.toStringBinary(getFirstValue()), nValues, bodyLen);
     }
 
     @Override
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java
index ec79f2cd00..f286085d1c 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java
@@ -23,6 +23,7 @@
 
 import java.io.IOException;
 import java.util.Arrays;
+import java.util.Locale;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.TreeSet;
@@ -133,7 +134,7 @@ void prepareForWrite(String workingDir, boolean isGlobal) throws IOException {
     @Override
     public Long[] listAllVersions() throws IOException {
         if (!fileSystem.exists(basePath)) {
-            return new Long[0];  // for the removed SegmentAppendTrieDictBuilder
+            return new Long[0]; // for the removed SegmentAppendTrieDictBuilder
         }
 
         FileStatus[] versionDirs = fileSystem.listStatus(basePath, new PathFilter() {
@@ -261,9 +262,12 @@ public String copyToAnotherMeta(KylinConfig srcConfig, KylinConfig dstConfig) th
             return baseDir;
         }
 
-        checkArgument(baseDir.startsWith(srcConfig.getHdfsWorkingDirectory()), "Please check why current directory {} doesn't belong to source working directory {}", baseDir, srcConfig.getHdfsWorkingDirectory());
+        checkArgument(baseDir.startsWith(srcConfig.getHdfsWorkingDirectory()),
+                "Please check why current directory {} doesn't belong to source working directory {}", baseDir,
+                srcConfig.getHdfsWorkingDirectory());
 
-        final String dstBaseDir = baseDir.replaceFirst(srcConfig.getHdfsWorkingDirectory(), dstConfig.getHdfsWorkingDirectory());
+        final String dstBaseDir = baseDir.replaceFirst(srcConfig.getHdfsWorkingDirectory(),
+                dstConfig.getHdfsWorkingDirectory());
 
         Long[] versions = listAllVersions();
         if (versions.length == 0) { // empty dict, nothing to copy
@@ -271,7 +275,8 @@ public String copyToAnotherMeta(KylinConfig srcConfig, KylinConfig dstConfig) th
         }
 
         Path srcVersionDir = getVersionDir(versions[versions.length - 1]);
-        Path dstVersionDir = new Path(srcVersionDir.toString().replaceFirst(srcConfig.getHdfsWorkingDirectory(), dstConfig.getHdfsWorkingDirectory()));
+        Path dstVersionDir = new Path(srcVersionDir.toString().replaceFirst(srcConfig.getHdfsWorkingDirectory(),
+                dstConfig.getHdfsWorkingDirectory()));
         FileSystem dstFS = dstVersionDir.getFileSystem(conf);
         if (dstFS.exists(dstVersionDir)) {
             dstFS.delete(dstVersionDir, true);
@@ -421,13 +426,14 @@ public void writeIndexFile(Path dir, GlobalDictMetadata metadata) throws IOExcep
         public void sanityCheck(Path dir, GlobalDictMetadata metadata) throws IOException {
             for (Map.Entry<AppendDictSliceKey, String> entry : metadata.sliceFileMap.entrySet()) {
                 if (!fs.exists(new Path(dir, entry.getValue()))) {
-                    throw new RuntimeException("The slice file " + entry.getValue() + " for the key: " + entry.getKey() + " must be existed!");
+                    throw new RuntimeException("The slice file " + entry.getValue() + " for the key: " + entry.getKey()
+                            + " must be existed!");
                 }
             }
         }
 
         public static String sliceFileName(AppendDictSliceKey key) {
-            return String.format("%s%d_%d", SLICE_PREFIX, System.currentTimeMillis(), key.hashCode());
+            return String.format(Locale.ROOT, "%s%d_%d", SLICE_PREFIX, System.currentTimeMillis(), key.hashCode());
         }
     }
 }
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/SegmentAppendTrieDictBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/SegmentAppendTrieDictBuilder.java
index c5b61b5c69..770b0bc193 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/SegmentAppendTrieDictBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/SegmentAppendTrieDictBuilder.java
@@ -20,6 +20,7 @@
 
 import java.io.IOException;
 
+import java.util.Locale;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Dictionary;
 import org.apache.kylin.common.util.RandomUtil;
@@ -50,7 +51,8 @@ public void init(DictionaryInfo dictInfo, int baseId, String hdfsDir) throws IOE
 
         //use UUID to make each segment dict in different HDFS dir and support concurrent build
         //use timestamp to make the segment dict easily to delete
-        String baseDir = hdfsDir + "resources/SegmentDict" + dictInfo.getResourceDir() + "/" + RandomUtil.randomUUID().toString() + "_" + System.currentTimeMillis()+ "/";
+        String baseDir = hdfsDir + "resources/SegmentDict" + dictInfo.getResourceDir() + "/"
+                + RandomUtil.randomUUID().toString() + "_" + System.currentTimeMillis() + "/";
 
         this.builder = new AppendTrieDictionaryBuilder(baseDir, maxEntriesPerSlice, false);
         this.baseId = baseId;
@@ -65,7 +67,8 @@ public boolean addValue(String value) {
         try {
             builder.addValue(value);
         } catch (Throwable e) {
-            throw new RuntimeException(String.format("Failed to create global dictionary on %s ", sourceColumn), e);
+            throw new RuntimeException(
+                    String.format(Locale.ROOT, "Failed to create global dictionary on %s ", sourceColumn), e);
         }
 
         return true;
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/AppendTrieDictionaryTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/AppendTrieDictionaryTest.java
index ea61c61b2c..7e5421a6f5 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/AppendTrieDictionaryTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/AppendTrieDictionaryTest.java
@@ -40,6 +40,7 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Random;
 import java.util.TreeMap;
@@ -70,7 +71,8 @@
     public void beforeTest() {
         staticCreateTestMetadata();
         KylinConfig.getInstanceFromEnv().setProperty("kylin.dictionary.append-entry-size", "50000");
-        BASE_DIR = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "/resources/GlobalDict" + RESOURCE_DIR + "/";
+        BASE_DIR = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "/resources/GlobalDict" + RESOURCE_DIR
+                + "/";
         LOCAL_BASE_DIR = getLocalWorkingDirectory() + "/resources/GlobalDict" + RESOURCE_DIR + "/";
     }
 
@@ -88,11 +90,19 @@ private void cleanup() {
         }
     }
 
-    private static final String[] words = new String[]{"paint", "par", "part", "parts", "partition", "partitions", "party", "partie", "parties", "patient", "taste", "tar", "trie", "try", "tries", "字典", "字典树", "字母", // non-ascii characters
+    private static final String[] words = new String[] { "paint", "par", "part", "parts", "partition", "partitions",
+            "party", "partie", "parties", "patient", "taste", "tar", "trie", "try", "tries", "字典", "字典树", "字母", // non-ascii characters
             "", // empty
-            "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii", "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiipaiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii",
-            "paintjkjdfklajkdljfkdsajklfjklsadjkjekjrklewjrklewjklrjklewjkljkljkljkljweklrjewkljrklewjrlkjewkljrkljkljkjlkjjkljkljkljkljlkjlkjlkjljdfadfads" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
-                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk",
+            "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii",
+            "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiipaiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii",
+            "paintjkjdfklajkdljfkdsajklfjklsadjkjekjrklewjrklewjklrjklewjkljkljkljkljweklrjewkljrklewjrlkjewkljrkljkljkjlkjjkljkljkljkljlkjlkjlkjljdfadfads"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk",
             "paint", "tar", "try", // some dup
     };
 
@@ -172,7 +182,8 @@ public void testHugeKeySet() throws IOException {
         dict.dump(System.out);
     }
 
-    private void testStringDictAppend(ArrayList<String> list, ArrayList<String> notfound, boolean shuffleList) throws IOException {
+    private void testStringDictAppend(ArrayList<String> list, ArrayList<String> notfound, boolean shuffleList)
+            throws IOException {
         Random rnd = new Random(System.currentTimeMillis());
         ArrayList<String> strList = new ArrayList<String>();
         strList.addAll(list);
@@ -198,8 +209,10 @@ private void testStringDictAppend(ArrayList<String> list, ArrayList<String> notf
             String str = strList.get(checkIndex);
             byte[] bytes = converter.convertToBytes(str);
             int id = dict.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
-            assertNotEquals(String.format("Value %s not exist", str), -1, id);
-            assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
+            assertNotEquals(String.format(Locale.ROOT, "Value %s not exist", str), -1, id);
+            assertFalse(
+                    String.format(Locale.ROOT, "Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)),
+                    checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
             checkMap.put(id, str);
         }
 
@@ -218,12 +231,13 @@ private void testStringDictAppend(ArrayList<String> list, ArrayList<String> notf
             String str = strList.get(checkIndex);
             byte[] bytes = converter.convertToBytes(str);
             int id = dict.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
-            assertNotEquals(String.format("Value %s not exist", str), -1, id);
+            assertNotEquals(String.format(Locale.ROOT, "Value %s not exist", str), -1, id);
             if (checkIndex < firstAppend) {
                 assertEquals("Except id " + id + " for " + str + " but " + checkMap.get(id), str, checkMap.get(id));
             } else {
                 // check second append str, should be new id
-                assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
+                assertFalse(String.format(Locale.ROOT, "Id %d for %s should be empty, but is %s", id, str,
+                        checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
                 checkMap.put(id, str);
             }
         }
@@ -243,12 +257,13 @@ private void testStringDictAppend(ArrayList<String> list, ArrayList<String> notf
             String str = strList.get(checkIndex);
             byte[] bytes = converter.convertToBytes(str);
             int id = dict.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
-            assertNotEquals(String.format("Value %s not exist", str), -1, id);
+            assertNotEquals(String.format(Locale.ROOT, "Value %s not exist", str), -1, id);
             if (checkIndex < secondAppend) {
                 assertEquals("Except id " + id + " for " + str + " but " + checkMap.get(id), str, checkMap.get(id));
             } else {
                 // check third append str, should be new id
-                assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
+                assertFalse(String.format(Locale.ROOT, "Id %d for %s should be empty, but is %s", id, str,
+                        checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
                 checkMap.put(id, str);
             }
         }
@@ -264,12 +279,13 @@ private void testStringDictAppend(ArrayList<String> list, ArrayList<String> notf
         for (String str : strList) {
             byte[] bytes = converter.convertToBytes(str);
             int id = dict.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
-            assertNotEquals(String.format("Value %s not exist", str), -1, id);
+            assertNotEquals(String.format(Locale.ROOT, "Value %s not exist", str), -1, id);
             assertEquals("Except id " + id + " for " + str + " but " + checkMap.get(id), str, checkMap.get(id));
         }
     }
 
-    private static AppendTrieDictionary<String> testSerialize(AppendTrieDictionary<String> dict, BytesConverter converter) {
+    private static AppendTrieDictionary<String> testSerialize(AppendTrieDictionary<String> dict,
+            BytesConverter converter) {
         try {
             ByteArrayOutputStream bout = new ByteArrayOutputStream();
             DataOutputStream dataout = new DataOutputStream(bout);
@@ -347,14 +363,14 @@ public void testSkipAddValue() throws IOException {
     public void testSerialize() throws IOException {
         AppendTrieDictionaryBuilder builder = createBuilder();
         AppendTrieDictionary dict = builder.build(0);
-        
+
         ByteArrayOutputStream bout = new ByteArrayOutputStream();
         DataOutputStream dataout = new DataOutputStream(bout);
         dict.write(dataout);
         dataout.close();
         ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());
         DataInputStream datain = new DataInputStream(bin);
-        
+
         assertNull(new Path(datain.readUTF()).toUri().getScheme());
         datain.close();
     }
@@ -369,7 +385,7 @@ public void testDeserialize() throws IOException {
         AppendTrieDictionary dict = builder.build(0);
         TreeMap checkMap = new TreeMap();
         BytesConverter converter = new StringBytesConverter();
-        for (String str: strList) {
+        for (String str : strList) {
             byte[] bytes = converter.convertToBytes(str);
             int id = dict.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
             checkMap.put(id, str);
@@ -388,7 +404,7 @@ public void testDeserialize() throws IOException {
         for (String str : strList) {
             byte[] bytes = converter.convertToBytes(str);
             int id = r.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
-            assertNotEquals(String.format("Value %s not exist", str), -1, id);
+            assertNotEquals(String.format(Locale.ROOT, "Value %s not exist", str), -1, id);
             assertEquals("Except id " + id + " for " + str + " but " + checkMap.get(id), str, checkMap.get(id));
         }
     }
@@ -566,7 +582,8 @@ private void convertIndexToOldFormat(String baseDir) throws IOException {
         Path v2IndexFile = new Path(versionPath, V2_INDEX_NAME);
 
         fs.delete(v2IndexFile, true);
-        GlobalDictHDFSStore.IndexFormat indexFormatV1 = new GlobalDictHDFSStore.IndexFormatV1(fs, HadoopUtil.getCurrentConfiguration());
+        GlobalDictHDFSStore.IndexFormat indexFormatV1 = new GlobalDictHDFSStore.IndexFormatV1(fs,
+                HadoopUtil.getCurrentConfiguration());
         indexFormatV1.writeIndexFile(versionPath, metadata);
 
         //convert v2 fileName format to v1 fileName format
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
index 6e8e655b0b..4f5ddff55d 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
@@ -24,6 +24,7 @@
 
 import java.io.IOException;
 import java.math.BigDecimal;
+import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.List;
 import java.util.Random;
@@ -45,7 +46,8 @@
  */
 public class NumberDictionaryTest extends LocalFileMetadataTestCase {
 
-    Number2BytesConverter.NumberBytesCodec codec = new Number2BytesConverter.NumberBytesCodec(MAX_DIGITS_BEFORE_DECIMAL_POINT);
+    Number2BytesConverter.NumberBytesCodec codec = new Number2BytesConverter.NumberBytesCodec(
+            MAX_DIGITS_BEFORE_DECIMAL_POINT);
     Random rand = new Random();
 
     @Before
@@ -78,7 +80,8 @@ public void testEmptyInput() throws IOException {
         String[] ints = new String[] { "", "0", "5", "100", "13" };
 
         // check "" is treated as NULL, not a code of dictionary
-        Dictionary<?> dict = DictionaryGenerator.buildDictionary(DataType.getType("integer"), new IterableDictionaryValueEnumerator(ints));
+        Dictionary<?> dict = DictionaryGenerator.buildDictionary(DataType.getType("integer"),
+                new IterableDictionaryValueEnumerator(ints));
         assertEquals(4, dict.getSize());
 
         final int id = ((NumberDictionary<String>) dict).getIdFromValue("");
@@ -95,7 +98,8 @@ public void testNumberEncode() {
         //test resolved jira-1800
         checkCodec("-0.0045454354354354359999999999877218", "-9999999999999999999.9954545645645645640000000000122781;");
         checkCodec("-0.009999999999877218", "-9999999999999999999.990000000000122781;");
-        checkCodec("12343434372493274.438403840384023840253554345345345345", "00012343434372493274.438403840384023840253554345345345345");
+        checkCodec("12343434372493274.438403840384023840253554345345345345",
+                "00012343434372493274.438403840384023840253554345345345345");
         assertEquals("00000000000000000052.57", encodeNumber("52.5700"));
         assertEquals("00000000000000000000", encodeNumber("0.00"));
         assertEquals("00000000000000000000", encodeNumber("0.0"));
@@ -141,15 +145,15 @@ public void testDictionary() {
 
         // test exact match
         NumberDictionary<String> dict = builder.build(0);
-//        for (int i = 0; i < sorted.size(); i++) {
-//            String dictNum = dict.getValueFromId(i);
-//            System.out.println(sorted.get(i) + "\t" + dictNum);
-//        }
+        //        for (int i = 0; i < sorted.size(); i++) {
+        //            String dictNum = dict.getValueFromId(i);
+        //            System.out.println(sorted.get(i) + "\t" + dictNum);
+        //        }
 
         for (int i = 0; i < sorted.size(); i++) {
             String dictNum = dict.getValueFromId(i);
             assertEquals(sorted.get(i), new BigDecimal(dictNum));
-            assertEquals(sorted.get(i), new BigDecimal(new String(dict.getValueByteFromId(i))));
+            assertEquals(sorted.get(i), new BigDecimal(new String(dict.getValueByteFromId(i), StandardCharsets.UTF_8)));
         }
 
         // test rounding
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java
index 1f63111e0a..3e50224120 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java
@@ -33,6 +33,7 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -754,7 +755,7 @@ public void buildTimeBenchmarkTest() throws Exception {
     private void evaluateDataSize(ArrayList<String> list) {
         long size = 0;
         for (String str : list)
-            size += str.getBytes().length;
+            size += str.getBytes(StandardCharsets.UTF_8).length;
         System.out.println("test data size : " + size / (1024 * 1024) + " MB");
     }
 
@@ -763,7 +764,7 @@ private void evaluateDataSize(int count) {
         Iterator<String> itr = rs.iterator();
         long bytesCount = 0;
         while (itr.hasNext())
-            bytesCount += itr.next().getBytes().length;
+            bytesCount += itr.next().getBytes(StandardCharsets.UTF_8).length;
         System.out.println("test data size : " + bytesCount / (1024 * 1024) + " MB");
     }
 
diff --git a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
index 86659d96ed..ff48244687 100644
--- a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
@@ -23,9 +23,9 @@
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
-import com.google.common.collect.Lists;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeSegment;
@@ -41,6 +41,8 @@
 import org.apache.kylin.metadata.model.TableRef;
 import org.apache.kylin.metadata.model.TblColRef;
 
+import com.google.common.collect.Lists;
+
 /**
  *
  */
@@ -143,7 +145,7 @@ public static String generateSelectDataStatement(IJoinedFlatTableDesc flatDesc,
             if (i > 0) {
                 sql.append(",");
             }
-            String colTotalName = String.format("%s.%s", col.getTableRef().getTableName(), col.getName());
+            String colTotalName = String.format(Locale.ROOT, "%s.%s", col.getTableRef().getTableName(), col.getName());
             if (skipAsList.contains(colTotalName)) {
                 sql.append(col.getExpressionInSourceDB() + sep);
             } else {
@@ -173,7 +175,7 @@ public static void appendJoinStatement(IJoinedFlatTableDesc flatDesc, StringBuil
                     if (pk.length != fk.length) {
                         throw new RuntimeException("Invalid join condition of lookup table:" + lookupDesc);
                     }
-                    String joinType = join.getType().toUpperCase();
+                    String joinType = join.getType().toUpperCase(Locale.ROOT);
 
                     sql.append(joinType + " JOIN " + dimTable.getTableIdentity() + " as " + dimTable.getAlias() + sep);
                     sql.append("ON ");
@@ -245,7 +247,7 @@ private static String colName(TblColRef col, boolean useAlias) {
     }
 
     private static String getHiveDataType(String javaDataType) {
-        String originDataType = javaDataType.toLowerCase();
+        String originDataType = javaDataType.toLowerCase(Locale.ROOT);
         String hiveDataType;
         if (originDataType.startsWith("varchar")) {
             hiveDataType = "string";
diff --git a/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java b/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java
index 9ba602f692..c85d11a0fc 100644
--- a/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java
+++ b/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java
@@ -21,6 +21,7 @@
 import java.io.File;
 import java.io.IOException;
 
+import java.util.Locale;
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.OptionsHelper;
@@ -52,7 +53,7 @@ private static File getJobConfig(String fileName) {
     private String getHadoopJobConfFilePath(String suffix, boolean appendSuffix) throws IOException {
         String hadoopJobConfFile;
         if (suffix != null && appendSuffix) {
-            hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + suffix.toLowerCase() + ".xml");
+            hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + suffix.toLowerCase(Locale.ROOT) + ".xml");
         } else {
             hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + ".xml");
         }
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
index 45c37b587e..09b7b8e4bf 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
@@ -26,6 +26,7 @@
 import java.util.HashMap;
 import java.util.IllegalFormatException;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.lang3.StringUtils;
@@ -511,7 +512,7 @@ public void addJobInfo(String id, Map<String, String> info) {
         if (info.containsKey(YARN_APP_ID) && !StringUtils.isEmpty(config.getJobTrackingURLPattern())) {
             String pattern = config.getJobTrackingURLPattern();
             try {
-                String newTrackingURL = String.format(pattern, info.get(YARN_APP_ID));
+                String newTrackingURL = String.format(Locale.ROOT, pattern, info.get(YARN_APP_ID));
                 info.put(YARN_APP_URL, newTrackingURL);
             } catch (IllegalFormatException ife) {
                 logger.error("Illegal tracking url pattern: " + config.getJobTrackingURLPattern());
diff --git a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
index 6d40be8bcd..6bbc2cea18 100644
--- a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
+++ b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.job.impl.threadpool;
 
+import java.util.Locale;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
@@ -54,7 +55,7 @@ public static DefaultScheduler getInstance() {
         }
         return INSTANCE;
     }
-    
+
     public synchronized static DefaultScheduler createInstance() {
         destroyInstance();
         INSTANCE = new DefaultScheduler();
@@ -75,7 +76,7 @@ public synchronized static void destroyInstance() {
     }
 
     // ============================================================================
-    
+
     private JobLock jobLock;
     private ExecutableManager executableManager;
     private FetcherRunner fetcher;
@@ -144,7 +145,7 @@ public synchronized void init(JobEngineConfig jobEngineConfig, JobLock lock) thr
         jobLock = lock;
 
         String serverMode = jobEngineConfig.getConfig().getServerMode();
-        if (!("job".equals(serverMode.toLowerCase()) || "all".equals(serverMode.toLowerCase()))) {
+        if (!("job".equals(serverMode.toLowerCase(Locale.ROOT)) || "all".equals(serverMode.toLowerCase(Locale.ROOT)))) {
             logger.info("server mode: " + serverMode + ", no need to run job scheduler");
             return;
         }
diff --git a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java
index cb4d8156df..d6f9fe2c08 100644
--- a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java
+++ b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java
@@ -20,6 +20,7 @@
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.util.Locale;
 import java.util.Set;
 import java.util.concurrent.CopyOnWriteArraySet;
 import java.util.concurrent.ExecutorService;
@@ -61,7 +62,7 @@
  */
 public class DistributedScheduler implements Scheduler<AbstractExecutable>, ConnectionStateListener {
     private static final Logger logger = LoggerFactory.getLogger(DistributedScheduler.class);
-    
+
     public static final String ZOOKEEPER_LOCK_PATH = "/job_engine/lock"; // note ZookeeperDistributedLock will ensure zk path prefix: /${kylin.env.zookeeper-base-path}/metadata
 
     public static DistributedScheduler getInstance(KylinConfig config) {
@@ -74,7 +75,7 @@ static DistributedScheduler newInstance(KylinConfig config) throws IOException {
     }
 
     // ============================================================================
-    
+
     private ExecutableManager executableManager;
     private FetcherRunner fetcher;
     private ScheduledExecutorService fetcherPool;
@@ -189,7 +190,7 @@ public void stateChanged(CuratorFramework client, ConnectionState newState) {
     @Override
     public synchronized void init(JobEngineConfig jobEngineConfig, JobLock jobLock) throws SchedulerException {
         String serverMode = jobEngineConfig.getConfig().getServerMode();
-        if (!("job".equals(serverMode.toLowerCase()) || "all".equals(serverMode.toLowerCase()))) {
+        if (!("job".equals(serverMode.toLowerCase(Locale.ROOT)) || "all".equals(serverMode.toLowerCase(Locale.ROOT)))) {
             logger.info("server mode: " + serverMode + ", no need to run job scheduler");
             return;
         }
diff --git a/core-job/src/main/java/org/apache/kylin/job/metrics/JobMetricsFacade.java b/core-job/src/main/java/org/apache/kylin/job/metrics/JobMetricsFacade.java
index dd18c9185a..5a8caa8313 100644
--- a/core-job/src/main/java/org/apache/kylin/job/metrics/JobMetricsFacade.java
+++ b/core-job/src/main/java/org/apache/kylin/job/metrics/JobMetricsFacade.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.job.metrics;
 
+import java.util.Locale;
+
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metrics.MetricsManager;
 import org.apache.kylin.metrics.lib.impl.RecordEvent;
@@ -112,7 +114,7 @@ private static void setJobStats(RecordEvent metricsEvent, long tableSize, long c
         public void setWrapper(String user, String projectName, String cubeName, String jobId, String jobType,
                 String cubingType) {
             this.user = user;
-            this.projectName = projectName == null ? null : projectName.toUpperCase();
+            this.projectName = projectName == null ? null : projectName.toUpperCase(Locale.ROOT);
             this.cubeName = cubeName;
             this.jobId = jobId;
             this.jobType = jobType;
diff --git a/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java b/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java
index dcc8d47e20..39c42d7607 100644
--- a/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java
@@ -23,6 +23,7 @@
 import java.io.ObjectOutput;
 import java.io.Serializable;
 import java.nio.ByteBuffer;
+import java.util.Locale;
 
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.common.util.Dictionary;
@@ -100,7 +101,8 @@ public void encode(String valueStr, byte[] output, int outputOffset) {
             for (int i = outputOffset; i < outputOffset + fixedLen; i++) {
                 output[i] = defaultByte;
             }
-            logger.error("Can't translate value " + valueStr + " to dictionary ID, roundingFlag " + roundingFlag + ". Using default value " + String.format("\\x%02X", defaultByte));
+            logger.error("Can't translate value " + valueStr + " to dictionary ID, roundingFlag " + roundingFlag + "."
+                    + " Using default value " + String.format(Locale.ROOT, "\\x%02X", defaultByte));
         }
     }
 
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java
index 9699d2ea55..d16a705c13 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.measure;
 
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.common.KylinConfig;
@@ -133,11 +134,11 @@ public static synchronized void init() {
         // register factories & data type serializers
         for (MeasureTypeFactory<?> factory : factoryInsts) {
             String funcName = factory.getAggrFunctionName();
-            if (!funcName.equals(funcName.toUpperCase()))
+            if (!funcName.equals(funcName.toUpperCase(Locale.ROOT)))
                 throw new IllegalArgumentException(
                         "Aggregation function name '" + funcName + "' must be in upper case");
             String dataTypeName = factory.getAggrDataTypeName();
-            if (!dataTypeName.equals(dataTypeName.toLowerCase()))
+            if (!dataTypeName.equals(dataTypeName.toLowerCase(Locale.ROOT)))
                 throw new IllegalArgumentException(
                         "Aggregation data type name '" + dataTypeName + "' must be in lower case");
             Class<? extends DataTypeSerializer<?>> serializer = factory.getAggrDataTypeSerializer();
@@ -164,7 +165,7 @@ private static void registerUDAF(MeasureTypeFactory<?> factory) {
             return;
 
         for (String udaf : udafs.keySet()) {
-            udaf = udaf.toUpperCase();
+            udaf = udaf.toUpperCase(Locale.ROOT);
             if (udaf.equals(FunctionDesc.FUNC_COUNT_DISTINCT))
                 continue; // skip built-in function
 
@@ -200,7 +201,7 @@ private static void registerUDAF(MeasureTypeFactory<?> factory) {
     }
 
     public static MeasureType<?> create(String funcName, DataType dataType) {
-        funcName = funcName.toUpperCase();
+        funcName = funcName.toUpperCase(Locale.ROOT);
 
         List<MeasureTypeFactory<?>> factory = factories.get(funcName);
         if (factory == null)
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
index c95cd1c850..b38299ac3b 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.measure.extendedcolumn;
 
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
@@ -114,7 +115,8 @@ public void adjustSqlDigest(List<MeasureDesc> measureDescs, SQLDigest sqlDigest)
     }
 
     @Override
-    public CapabilityResult.CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions, Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, final MeasureDesc measureDesc) {
+    public CapabilityResult.CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions,
+            Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, final MeasureDesc measureDesc) {
         TblColRef extendedCol = getExtendedColumn(measureDesc.getFunction());
 
         if (!unmatchedDimensions.contains(extendedCol)) {
@@ -144,9 +146,11 @@ public boolean needAdvancedTupleFilling() {
         return true;
     }
 
-    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo returnTupleInfo, Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo returnTupleInfo,
+            Map<TblColRef, Dictionary<String>> dictionaryMap) {
         final TblColRef extended = getExtendedColumn(function);
-        final int extendedColumnInTupleIdx = returnTupleInfo.hasColumn(extended) ? returnTupleInfo.getColumnIndex(extended) : -1;
+        final int extendedColumnInTupleIdx = returnTupleInfo.hasColumn(extended)
+                ? returnTupleInfo.getColumnIndex(extended) : -1;
 
         if (extendedColumnInTupleIdx == -1) {
             throw new RuntimeException("Extended column is not required in returnTupleInfo");
@@ -216,7 +220,8 @@ public String truncateWhenUTF8(String s, int maxBytes) {
             }
 
             @Override
-            public ByteArray valueOf(String[] values, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
+            public ByteArray valueOf(String[] values, MeasureDesc measureDesc,
+                    Map<TblColRef, Dictionary<String>> dictionaryMap) {
                 if (values.length <= 1)
                     throw new IllegalArgumentException();
 
@@ -229,7 +234,8 @@ public ByteArray valueOf(String[] values, MeasureDesc measureDesc, Map<TblColRef
                 if (bytes.length <= dataType.getPrecision()) {
                     return new ByteArray(bytes);
                 } else {
-                    return new ByteArray(truncateWhenUTF8(literal, dataType.getPrecision()).getBytes());
+                    return new ByteArray(
+                            truncateWhenUTF8(literal, dataType.getPrecision()).getBytes(StandardCharsets.UTF_8));
                 }
             }
         };
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
index f3959faf37..28d0ec78c1 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
@@ -23,6 +23,7 @@
 import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -230,9 +231,9 @@ private TableDesc getProjectSpecificTableDesc(String fullTableName, String prj)
      */
     private String getTableIdentity(String tableName) {
         if (!tableName.contains("."))
-            return "DEFAULT." + tableName.toUpperCase();
+            return "DEFAULT." + tableName.toUpperCase(Locale.ROOT);
         else
-            return tableName.toUpperCase();
+            return tableName.toUpperCase(Locale.ROOT);
     }
 
     public void saveSourceTable(TableDesc srcTable, String prj) throws IOException {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java
index bcbbe4960b..4d4b87f526 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.metadata.datatype;
 
 import java.nio.ByteBuffer;
+import java.util.Locale;
 
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.BooleanUtils;
@@ -59,8 +60,9 @@ public int getStorageBytesEstimate() {
     @Override
     public Long valueOf(String str) {
         if (str == null)
-           return Long.valueOf(0L);
+            return Long.valueOf(0L);
         else
-            return Long.valueOf(BooleanUtils.toInteger(ArrayUtils.contains(TRUE_VALUE_SET, str.toLowerCase())));
+            return Long
+                    .valueOf(BooleanUtils.toInteger(ArrayUtils.contains(TRUE_VALUE_SET, str.toLowerCase(Locale.ROOT))));
     }
 }
\ No newline at end of file
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
index efc47840d1..7580ad0202 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
@@ -23,6 +23,7 @@
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedHashSet;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
@@ -169,7 +170,7 @@ public DataType(String name, int precision, int scale) {
     }
 
     private DataType(String datatype) {
-        datatype = datatype.trim().toLowerCase();
+        datatype = datatype.trim().toLowerCase(Locale.ROOT);
         datatype = replaceLegacy(datatype);
 
         Pattern pattern = TYPE_PATTERN;
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java
index aa9cd3db01..9082c1fd5a 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java
@@ -24,6 +24,7 @@
 import java.nio.ByteBuffer;
 import java.util.Collection;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.common.util.BytesUtil;
@@ -66,7 +67,7 @@ public BuiltInFunctionTupleFilter(String name, FilterOperatorEnum filterOperator
         this.methodParams = Lists.newArrayList();
 
         if (name != null) {
-            this.name = name.toUpperCase();
+            this.name = name.toUpperCase(Locale.ROOT);
             initMethod();
         }
     }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
index e1561749b0..95f7ea6fde 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
@@ -22,6 +22,7 @@
 
 import java.lang.reflect.Method;
 import java.util.HashMap;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.lang3.reflect.MethodUtils;
@@ -130,14 +131,14 @@ public static String substring(String s, int from, int for_) {
     public static String upper(String s) {
         if (s == null)
             return null;
-        return s.toUpperCase();
+        return s.toUpperCase(Locale.ROOT);
     }
 
     /** SQL LOWER(string) function. */
     public static String lower(String s) {
         if (s == null)
             return null;
-        return s.toLowerCase();
+        return s.toLowerCase(Locale.ROOT);
     }
 
     /** SQL left || right */
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java
index 861e530c32..811420166b 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.metadata.filter.function;
 
 import java.lang.reflect.InvocationTargetException;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.metadata.filter.BuiltInFunctionTupleFilter;
@@ -44,7 +45,7 @@ public static TupleFilter getFunctionTupleFilter(String name) {
             throw new IllegalStateException("Function name cannot be null");
         }
 
-        name = name.toUpperCase();
+        name = name.toUpperCase(Locale.ROOT);
 
         if (SUPPORTED_UDF.containsKey(name)) {
             try {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java
index ef1e412708..0d681c0872 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java
@@ -17,6 +17,7 @@
  */
 package org.apache.kylin.metadata.filter.function;
 
+import java.util.Locale;
 import java.util.regex.Pattern;
 
 import org.apache.commons.lang.StringUtils;
@@ -36,7 +37,7 @@
         private Pattern p;
 
         private DefaultLikeMatcher(String patternStr) {
-            patternStr = patternStr.toLowerCase();
+            patternStr = patternStr.toLowerCase(Locale.ROOT);
             final String regex = Like.sqlToRegexLike(patternStr, null);
             p = Pattern.compile(regex);
         }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
index b8de5340c6..2b3f066a39 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
@@ -20,6 +20,7 @@
 
 import java.io.Serializable;
 
+import java.util.Locale;
 import org.apache.kylin.metadata.datatype.DataType;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
@@ -202,7 +203,7 @@ public void init(TableDesc table) {
         this.table = table;
 
         if (name != null)
-            name = name.toUpperCase();
+            name = name.toUpperCase(Locale.ROOT);
 
         if (id != null)
             zeroBasedIndex = Integer.parseInt(id) - 1;
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
index 9d655a4541..f46bff4f7c 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
@@ -25,6 +25,7 @@
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Queue;
 import java.util.Set;
@@ -282,7 +283,7 @@ public void setCapacity(RealizationCapacity capacity) {
 
     public TblColRef findColumn(String table, String column) throws IllegalArgumentException {
         TableRef tableRef = findTable(table);
-        TblColRef result = tableRef.getColumn(column.toUpperCase());
+        TblColRef result = tableRef.getColumn(column.toUpperCase(Locale.ROOT));
         if (result == null)
             throw new IllegalArgumentException("Column not found by " + table + "." + column);
         return result;
@@ -292,7 +293,7 @@ public TblColRef findColumn(String column) throws IllegalArgumentException {
         TblColRef result = null;
         String input = column;
 
-        column = column.toUpperCase();
+        column = column.toUpperCase(Locale.ROOT);
         int cut = column.lastIndexOf('.');
         if (cut > 0) {
             // table specified
@@ -314,7 +315,7 @@ public TblColRef findColumn(String column) throws IllegalArgumentException {
 
     // find by unique name, that must uniquely identifies a table in the model
     public TableRef findTable(String table) throws IllegalArgumentException {
-        TableRef result = tableNameMap.get(table.toUpperCase());
+        TableRef result = tableNameMap.get(table.toUpperCase(Locale.ROOT));
         if (result == null) {
             throw new IllegalArgumentException("Table not found by " + table);
         }
@@ -388,7 +389,7 @@ private void initTableAlias(Map<String, TableDesc> tables) {
             throw new IllegalStateException("root fact table should not be empty");
         }
 
-        rootFactTable = rootFactTable.toUpperCase();
+        rootFactTable = rootFactTable.toUpperCase(Locale.ROOT);
         if (tables.containsKey(rootFactTable) == false)
             throw new IllegalStateException("Root fact table does not exist:" + rootFactTable);
 
@@ -399,7 +400,7 @@ private void initTableAlias(Map<String, TableDesc> tables) {
         factTableRefs.add(rootFactTableRef);
 
         for (JoinTableDesc join : joinTables) {
-            join.setTable(join.getTable().toUpperCase());
+            join.setTable(join.getTable().toUpperCase(Locale.ROOT));
 
             if (tables.containsKey(join.getTable()) == false)
                 throw new IllegalStateException("Join table does not exist:" + join.getTable());
@@ -409,7 +410,7 @@ private void initTableAlias(Map<String, TableDesc> tables) {
             if (alias == null) {
                 alias = tableDesc.getName();
             }
-            alias = alias.toUpperCase();
+            alias = alias.toUpperCase(Locale.ROOT);
             join.setAlias(alias);
 
             boolean isLookup = join.getKind() == TableKind.LOOKUP;
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DatabaseDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DatabaseDesc.java
index 1b33f3594b..4f014da999 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DatabaseDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DatabaseDesc.java
@@ -23,6 +23,7 @@
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 
 /**
  * @author xjiang
@@ -35,7 +36,7 @@
      * @return the name
      */
     public String getName() {
-        return name == null ? "null" : name.toUpperCase();
+        return name == null ? "null" : name.toUpperCase(Locale.ROOT);
     }
 
     /**
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java
index 7ef84aabb9..2c8f6feb08 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.metadata.model;
 
+import java.util.Locale;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.RootPersistentEntity;
@@ -90,7 +91,7 @@ public void setName(String name) {
 
     public void init() {
         if (name != null)
-            name = name.toUpperCase();
+            name = name.toUpperCase(Locale.ROOT);
     }
 
     @Override
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java
index a3cdd7f41a..72d956a359 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java
@@ -22,6 +22,7 @@
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -47,7 +48,7 @@
 
     public static FunctionDesc newInstance(String expression, ParameterDesc param, String returnType) {
         FunctionDesc r = new FunctionDesc();
-        r.expression = (expression == null) ? null : expression.toUpperCase();
+        r.expression = (expression == null) ? null : expression.toUpperCase(Locale.ROOT);
         r.parameter = param;
         r.returnType = returnType;
         r.returnDataType = DataType.getType(returnType);
@@ -91,7 +92,7 @@ public static FunctionDesc newInstance(String expression, ParameterDesc param, S
     private boolean isDimensionAsMetric = false;
 
     public void init(DataModelDesc model) {
-        expression = expression.toUpperCase();
+        expression = expression.toUpperCase(Locale.ROOT);
         if (expression.equals(PercentileMeasureType.FUNC_PERCENTILE)) {
             expression = PercentileMeasureType.FUNC_PERCENTILE_APPROX; // for backward compatibility
         }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
index 209da7daad..caadbd691b 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
@@ -21,6 +21,7 @@
 import java.io.Serializable;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.kylin.common.util.StringUtil;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
@@ -54,7 +55,7 @@ public void setColumns(String[] columns) {
     }
 
     void init(DataModelDesc model) {
-        table = table.toUpperCase();
+        table = table.toUpperCase(Locale.ROOT);
         if (columns != null) {
             StringUtil.toUpperCaseArray(columns, columns);
         }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
index 2cfa681131..dcb37ecdad 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.metadata.model;
 
 import java.io.Serializable;
+import java.util.Locale;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.util.ClassUtil;
@@ -199,7 +200,8 @@ public String buildDateRangeCondition(PartitionDesc partDesc, ISegment seg, Segm
             StringBuilder builder = new StringBuilder();
 
             if (partDesc.partitionColumnIsYmdInt()) {
-                buildSingleColumnRangeCondAsYmdInt(builder, partitionDateColumn, startInclusive, endExclusive, partDesc.getPartitionDateFormat());
+                buildSingleColumnRangeCondAsYmdInt(builder, partitionDateColumn, startInclusive, endExclusive,
+                        partDesc.getPartitionDateFormat());
             } else if (partDesc.partitionColumnIsTimeMillis()) {
                 buildSingleColumnRangeCondAsTimeMillis(builder, partitionDateColumn, startInclusive, endExclusive);
             } else if (partitionDateColumn != null && partitionTimeColumn == null) {
@@ -230,8 +232,8 @@ private static void buildSingleColumnRangeCondAsYmdInt(StringBuilder builder, Tb
             builder.append(partitionColumnName + " >= "
                     + DateFormat.formatToDateStr(startInclusive, partitionColumnDateFormat));
             builder.append(" AND ");
-            builder.append(partitionColumnName + " < "
-                    + DateFormat.formatToDateStr(endExclusive, partitionColumnDateFormat));
+            builder.append(
+                    partitionColumnName + " < " + DateFormat.formatToDateStr(endExclusive, partitionColumnDateFormat));
         }
 
         private static void buildSingleColumnRangeCondition(StringBuilder builder, TblColRef partitionColumn,
@@ -308,8 +310,8 @@ public String buildDateRangeCondition(PartitionDesc partDesc, ISegment seg, Segm
             TblColRef partitionColumn = partDesc.getPartitionDateColumnRef();
             String tableAlias = partitionColumn.getTableAlias();
 
-            String concatField = String.format("CONCAT(%s.YEAR,'-',%s.MONTH,'-',%s.DAY)", tableAlias, tableAlias,
-                    tableAlias);
+            String concatField = String.format(Locale.ROOT, "CONCAT(%s.YEAR,'-',%s.MONTH,'-',%s.DAY)", tableAlias,
+                    tableAlias, tableAlias);
             StringBuilder builder = new StringBuilder();
 
             if (startInclusive > 0) {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
index 3f9a774599..d8e3b028aa 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
@@ -21,6 +21,7 @@
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -53,7 +54,7 @@
     public static String concatRawResourcePath(String nameOnPath) {
         return ResourceStore.TABLE_RESOURCE_ROOT + "/" + nameOnPath + ".json";
     }
-    
+
     public static String makeResourceName(String tableIdentity, String prj) {
         return prj == null ? tableIdentity : tableIdentity + "--" + prj;
     }
@@ -134,7 +135,7 @@ public TableDesc(TableDesc other) {
     public String resourceName() {
         return makeResourceName(getIdentity(), getProject());
     }
-    
+
     public TableDesc appendColumns(ColumnDesc[] computedColumns, boolean makeCopy) {
         if (computedColumns == null || computedColumns.length == 0) {
             return this;
@@ -152,7 +153,7 @@ public TableDesc appendColumns(ColumnDesc[] computedColumns, boolean makeCopy) {
                 if (existingColumns[i].getName().equalsIgnoreCase(computedColumns[j].getName())) {
                     // if we're adding a computed column twice, it should be allowed without producing duplicates
                     if (!existingColumns[i].isComputedColumn()) {
-                        throw new IllegalArgumentException(String.format(
+                        throw new IllegalArgumentException(String.format(Locale.ROOT,
                                 "There is already a column named %s on table %s, please change your computed column name",
                                 new Object[] { computedColumns[j].getName(), this.getIdentity() }));
                     } else {
@@ -195,7 +196,7 @@ public String getResourcePath() {
         if (isBorrowedFromGlobal()) {
             return concatResourcePath(getIdentity(), null);
         }
-        
+
         return concatResourcePath(getIdentity(), project);
     }
 
@@ -209,7 +210,8 @@ public String getResourcePathV1() {
 
     public String getIdentity() {
         if (identity == null) {
-            identity = String.format("%s.%s", this.getDatabase().toUpperCase(), this.getName()).toUpperCase();
+            identity = String.format(Locale.ROOT, "%s.%s", this.getDatabase().toUpperCase(Locale.ROOT), this.getName())
+                    .toUpperCase(Locale.ROOT);
         }
         return identity;
     }
@@ -295,12 +297,12 @@ public String getDataGen() {
     public void init(KylinConfig config, String project) {
         this.project = project;
         this.config = config;
-        
+
         if (name != null)
-            name = name.toUpperCase();
+            name = name.toUpperCase(Locale.ROOT);
 
         if (getDatabase() != null)
-            setDatabase(getDatabase().toUpperCase());
+            setDatabase(getDatabase().toUpperCase(Locale.ROOT));
 
         if (columns != null) {
             Arrays.sort(columns, new Comparator<ColumnDesc>() {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
index 28ea2382e4..74cc2dbc1c 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
@@ -23,6 +23,7 @@
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -202,7 +203,7 @@ public void init(String project) {
         this.project = project;
 
         if (this.tableIdentity != null)
-            this.tableIdentity = this.tableIdentity.toUpperCase();
+            this.tableIdentity = this.tableIdentity.toUpperCase(Locale.ROOT);
     }
 
     public void setLastModifiedTime(long lastModifiedTime) {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TblColRef.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TblColRef.java
index ee33e8a30e..918eedff9f 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TblColRef.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TblColRef.java
@@ -22,6 +22,7 @@
 
 import java.io.Serializable;
 
+import java.util.Locale;
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.metadata.datatype.DataType;
 
@@ -259,12 +260,12 @@ public String getTableWithSchema() {
         if (column.getTable() == null) {
             return "NULL";
         } else {
-            return column.getTable().getIdentity().toUpperCase();
+            return column.getTable().getIdentity().toUpperCase(Locale.ROOT);
         }
     }
 
     // return DB.TABLE.COLUMN
     public String getColumWithTableAndSchema() {
-        return (getTableWithSchema() + "." + column.getName()).toUpperCase();
+        return (getTableWithSchema() + "." + column.getName()).toUpperCase(Locale.ROOT);
     }
 }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/tool/CalciteParser.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/tool/CalciteParser.java
index d48100c764..759e573e26 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/tool/CalciteParser.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/tool/CalciteParser.java
@@ -21,6 +21,7 @@
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -69,7 +70,7 @@ public static String getLastNthName(SqlIdentifier id, int n) {
         //n = 1 is getting column
         //n = 2 is getting table's alias, if has.
         //n = 3 is getting database name, if has.
-        return id.names.get(id.names.size() - n).replace("\"", "").toUpperCase();
+        return id.names.get(id.names.size() - n).replace("\"", "").toUpperCase(Locale.ROOT);
     }
 
     public static void ensureNoAliasInExpr(String expr) {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
index 45622f34e0..d12e927bce 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
@@ -21,6 +21,7 @@
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 import java.util.TreeSet;
 
@@ -242,11 +243,11 @@ public void setTables(Set<String> tables) {
     }
 
     public boolean containsTable(String tableName) {
-        return tables.contains(tableName.toUpperCase());
+        return tables.contains(tableName.toUpperCase(Locale.ROOT));
     }
 
     public void removeTable(String tableName) {
-        tables.remove(tableName.toUpperCase());
+        tables.remove(tableName.toUpperCase(Locale.ROOT));
     }
 
     public void addExtFilter(String extFilterName) {
@@ -258,7 +259,7 @@ public void removeExtFilter(String filterName) {
     }
 
     public void addTable(String tableName) {
-        tables.add(tableName.toUpperCase());
+        tables.add(tableName.toUpperCase(Locale.ROOT));
     }
 
     public Set<String> getTables() {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
index 13a61cf7d2..bad9773929 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
@@ -18,9 +18,16 @@
 
 package org.apache.kylin.metadata.project;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -41,14 +48,9 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 
 public class ProjectManager {
     private static final Logger logger = LoggerFactory.getLogger(ProjectManager.class);
@@ -66,11 +68,11 @@ static ProjectManager newInstance(KylinConfig config) throws IOException {
 
     private KylinConfig config;
     private ProjectL2Cache l2Cache;
-    
+
     // project name => ProjectInstance
     private CaseInsensitiveStringCache<ProjectInstance> projectMap;
     private CachedCrudAssist<ProjectInstance> crud;
-    
+
     // protects concurrent operations around the cached map, to avoid for example
     // writing an entity in the middle of reloading it (dirty read)
     private AutoReadWriteLock prjMapLock = new AutoReadWriteLock();
@@ -245,7 +247,7 @@ public ProjectInstance addModelToProject(String modelName, String newProjectName
                 throw new IllegalArgumentException("Project " + newProjectName + " does not exist.");
             }
             prj.addModel(modelName);
-            
+
             return save(prj);
         }
     }
@@ -366,7 +368,7 @@ public void touchProject(String projectName) throws IOException {
             save(projectInstance);
         }
     }
-    
+
     private ProjectInstance save(ProjectInstance prj) throws IOException {
         crud.save(prj);
         clearL2Cache(prj.getName());
@@ -459,15 +461,15 @@ public ProjectInstance getProjectOfModel(String model) {
     }
 
     public Set<IRealization> getRealizationsByTable(String project, String tableName) {
-        return l2Cache.getRealizationsByTable(project, tableName.toUpperCase());
+        return l2Cache.getRealizationsByTable(project, tableName.toUpperCase(Locale.ROOT));
     }
 
     public List<MeasureDesc> listEffectiveRewriteMeasures(String project, String factTable) {
-        return l2Cache.listEffectiveRewriteMeasures(project, factTable.toUpperCase(), true);
+        return l2Cache.listEffectiveRewriteMeasures(project, factTable.toUpperCase(Locale.ROOT), true);
     }
 
     public List<MeasureDesc> listEffectiveMeasures(String project, String factTable) {
-        return l2Cache.listEffectiveRewriteMeasures(project, factTable.toUpperCase(), false);
+        return l2Cache.listEffectiveRewriteMeasures(project, factTable.toUpperCase(Locale.ROOT), false);
     }
 
     KylinConfig getConfig() {
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java
index 5fbb86f51c..d9c457791b 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java
@@ -21,6 +21,7 @@
 import static com.google.common.base.Predicates.not;
 
 import java.util.HashMap;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
@@ -92,7 +93,7 @@ public static String castReplace(String originString) {
             String castStr = castMatcher.group();
             String type = castMatcher.group(2);
             String supportedType = "";
-            switch (type.toUpperCase()) {
+            switch (type.toUpperCase(Locale.ROOT)) {
             case "INTEGER":
                 supportedType = "int";
                 break;
@@ -132,7 +133,7 @@ public static String subqueryReplace(String originString) {
             if (aliasMatcher.find()) {
                 String aliasCandidate = aliasMatcher.group(1);
 
-                if (aliasCandidate != null && !sqlKeyWordsExceptAS.contains(aliasCandidate.toUpperCase())) {
+                if (aliasCandidate != null && !sqlKeyWordsExceptAS.contains(aliasCandidate.toUpperCase(Locale.ROOT))) {
                     continue;
                 }
 
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java
index f086ae9c62..b6c48fd682 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java
@@ -20,12 +20,14 @@
 
 import java.io.IOException;
 import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
 import java.util.TreeSet;
 
@@ -182,7 +184,7 @@ public String next() {
         }
 
         private String formatNumber(double i) {
-            return new DecimalFormat(format).format(i);
+            return new DecimalFormat(format, DecimalFormatSymbols.getInstance(Locale.ROOT)).format(i);
         }
 
         private int randomInt() {
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java
index b7e8d3a611..ca90206c60 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java
@@ -26,11 +26,13 @@
 import java.io.InputStreamReader;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedHashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
 import org.apache.commons.io.IOUtils;
@@ -41,9 +43,9 @@
 import org.apache.kylin.metadata.datatype.DataType;
 import org.apache.kylin.metadata.model.ColumnDesc;
 import org.apache.kylin.metadata.model.DataModelDesc;
+import org.apache.kylin.metadata.model.DataModelManager;
 import org.apache.kylin.metadata.model.JoinDesc;
 import org.apache.kylin.metadata.model.JoinTableDesc;
-import org.apache.kylin.metadata.model.DataModelManager;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.slf4j.Logger;
@@ -67,7 +69,7 @@ public ModelDataGenerator(DataModelDesc model, int nRows) {
     private ModelDataGenerator(DataModelDesc model, int nRows, ResourceStore outputStore) {
         this(model, nRows, outputStore, "/data");
     }
-    
+
     private ModelDataGenerator(DataModelDesc model, int nRows, ResourceStore outputStore, String outputPath) {
         this.model = model;
         this.targetRows = nRows;
@@ -81,13 +83,14 @@ public void generate() throws IOException {
 
         JoinTableDesc[] allTables = model.getJoinTables();
         for (int i = allTables.length - 1; i >= -1; i--) { // reverse order needed for FK generation
-            TableDesc table = (i == -1) ? model.getRootFactTable().getTableDesc() : allTables[i].getTableRef().getTableDesc();
+            TableDesc table = (i == -1) ? model.getRootFactTable().getTableDesc()
+                    : allTables[i].getTableRef().getTableDesc();
             allTableDesc.add(table);
-            
+
             if (generated.contains(table))
                 continue;
 
-            logger.info(String.format("generating data for %s", table));
+            logger.info(String.format(Locale.ROOT, "generating data for %s", table));
             boolean gen = generateTable(table);
 
             if (gen)
@@ -103,7 +106,7 @@ private boolean generateTable(TableDesc table) throws IOException {
             return false;
 
         ByteArrayOutputStream bout = new ByteArrayOutputStream();
-        PrintWriter pout = new PrintWriter(new OutputStreamWriter(bout, "UTF-8"));
+        PrintWriter pout = new PrintWriter(new OutputStreamWriter(bout, StandardCharsets.UTF_8));
 
         generateTableInternal(table, config, pout);
 
@@ -148,7 +151,7 @@ private void generateTableInternal(TableDesc table, TableGenConfig config, Print
     private void generateDDL(Set<TableDesc> tables) throws IOException {
 
         ByteArrayOutputStream bout = new ByteArrayOutputStream();
-        PrintWriter pout = new PrintWriter(new OutputStreamWriter(bout, "UTF-8"));
+        PrintWriter pout = new PrintWriter(new OutputStreamWriter(bout, StandardCharsets.UTF_8));
 
         generateDatabaseDDL(tables, pout);
         generateCreateTableDDL(tables, pout);
@@ -178,7 +181,7 @@ private void generateCreateTableDDL(Set<TableDesc> tables, PrintWriter out) {
         for (TableDesc t : tables) {
             if (t.isView())
                 continue;
-            
+
             out.print("DROP TABLE IF EXISTS " + normHiveIdentifier(t.getIdentity()) + ";\n");
 
             out.print("CREATE TABLE " + normHiveIdentifier(t.getIdentity()) + "(" + "\n");
@@ -219,15 +222,16 @@ private void generateLoadDataDDL(Set<TableDesc> tables, PrintWriter out) {
                 out.print("-- " + t.getIdentity() + " is view \n");
                 continue;
             }
-            
-            out.print("LOAD DATA LOCAL INPATH '" + t.getIdentity() + ".csv' OVERWRITE INTO TABLE " + normHiveIdentifier(t.getIdentity()) + ";\n");
+
+            out.print("LOAD DATA LOCAL INPATH '" + t.getIdentity() + ".csv' OVERWRITE INTO TABLE "
+                    + normHiveIdentifier(t.getIdentity()) + ";\n");
         }
     }
 
     public boolean existsInStore(TableDesc table) throws IOException {
         return outputStore.exists(path(table));
     }
-    
+
     public boolean isPK(ColumnDesc col) {
         for (JoinTableDesc joinTable : model.getJoinTables()) {
             JoinDesc join = joinTable.getJoin();
@@ -238,7 +242,7 @@ public boolean isPK(ColumnDesc col) {
         }
         return false;
     }
-    
+
     public List<String> getPkValuesIfIsFk(ColumnDesc fk) throws IOException {
         JoinTableDesc[] joinTables = model.getJoinTables();
         for (int i = 0; i < joinTables.length; i++) {
@@ -269,7 +273,8 @@ private ColumnDesc findPk(JoinTableDesc joinTable, ColumnDesc fk) {
 
         List<String> r = new ArrayList<>();
 
-        BufferedReader in = new BufferedReader(new InputStreamReader(outputStore.getResource(path(pk.getTable())).inputStream, "UTF-8"));
+        BufferedReader in = new BufferedReader(
+                new InputStreamReader(outputStore.getResource(path(pk.getTable())).inputStream, "UTF-8"));
         try {
             String line;
             while ((line = in.readLine()) != null) {
@@ -305,11 +310,12 @@ public static void main(String[] args) throws IOException {
         String modelName = args[0];
         int nRows = Integer.parseInt(args[1]);
         String outputDir = args.length > 2 ? args[2] : null;
-        
+
         KylinConfig conf = KylinConfig.getInstanceFromEnv();
         DataModelDesc model = DataModelManager.getInstance(conf).getDataModelDesc(modelName);
-        ResourceStore store = outputDir == null ? ResourceStore.getStore(conf) : ResourceStore.getStore(mockup(outputDir));
-        
+        ResourceStore store = outputDir == null ? ResourceStore.getStore(conf)
+                : ResourceStore.getStore(mockup(outputDir));
+
         ModelDataGenerator gen = new ModelDataGenerator(model, nRows, store);
         gen.generate();
     }
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
index 4e67d220b6..7ccec637f9 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
@@ -20,6 +20,7 @@
 
 import java.math.BigDecimal;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -84,11 +85,8 @@ public static void after() throws Exception {
         decimalMax.aggregate(decimal);
         decimalSum.aggregate(decimal);
 
-        return Lists.newArrayList(
-                longMin, longMax, longSum,
-                doubleMin, doubleMax, doubleSum,
-                decimalMin, decimalMax, decimalSum
-        );
+        return Lists.newArrayList(longMin, longMax, longSum, doubleMin, doubleMax, doubleSum, decimalMin, decimalMax,
+                decimalSum);
     }
 
     private String getAggregatorName(Class<? extends MeasureAggregator> clazz) {
@@ -111,7 +109,8 @@ public void testAggregatorEstimate() {
         }
         bitmapAggregator.aggregate(bitmapCounter);
 
-        ExtendedColumnMeasureType extendedColumnType = new ExtendedColumnMeasureType("EXTENDED_COLUMN", DataType.getType("extendedcolumn(100)"));
+        ExtendedColumnMeasureType extendedColumnType = new ExtendedColumnMeasureType("EXTENDED_COLUMN",
+                DataType.getType("extendedcolumn(100)"));
         MeasureAggregator<ByteArray> extendedColumnAggregator = extendedColumnType.newAggregator();
         extendedColumnAggregator.aggregate(new ByteArray(100));
 
@@ -120,10 +119,11 @@ public void testAggregatorEstimate() {
         aggregators.add(bitmapAggregator);
         aggregators.add(extendedColumnAggregator);
 
-        System.out.printf("%40s %10s %10s\n", "Class", "Estimate", "Actual");
+        System.out.printf(Locale.ROOT, "%40s %10s %10s\n", "Class", "Estimate", "Actual");
         for (MeasureAggregator aggregator : aggregators) {
             String clzName = getAggregatorName(aggregator.getClass());
-            System.out.printf("%40s %10d %10d\n", clzName, aggregator.getMemBytesEstimate(), meter.measureDeep(aggregator));
+            System.out.printf(Locale.ROOT, "%40s %10d %10d\n", clzName, aggregator.getMemBytesEstimate(),
+                    meter.measureDeep(aggregator));
         }
     }
 
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnSerializerTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnSerializerTest.java
index 835f7f20ca..c5b4cd48e3 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnSerializerTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnSerializerTest.java
@@ -20,6 +20,7 @@
 
 import java.nio.ByteBuffer;
 
+import java.nio.charset.StandardCharsets;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -75,7 +76,7 @@ public void testNormal() {
         serializer.serialize(array, buffer);
         buffer.flip();
         ByteArray des = serializer.deserialize(buffer);
-        Assert.assertTrue(new ByteArray(text.getBytes()).equals(des));
+        Assert.assertTrue(new ByteArray(text.getBytes(StandardCharsets.UTF_8)).equals(des));
     }
 
     @Test
@@ -89,6 +90,6 @@ public void testOverflow() {
         serializer.serialize(array, buffer);
         buffer.flip();
         ByteArray des = serializer.deserialize(buffer);
-        Assert.assertTrue(new ByteArray(StringUtils.repeat("h", 20).getBytes()).equals(des));
+        Assert.assertTrue(new ByteArray(StringUtils.repeat("h", 20).getBytes(StandardCharsets.UTF_8)).equals(des));
     }
 }
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java
index 3eba78be5b..2105d37b36 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java
@@ -20,9 +20,13 @@
 
 import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
@@ -76,7 +80,7 @@ protected String prepareTestDate() throws IOException {
 
         if (tempFile.exists())
             FileUtils.forceDelete(tempFile);
-        FileWriter fw = new FileWriter(tempFile);
+        Writer fw = new OutputStreamWriter(new FileOutputStream(tempFile), StandardCharsets.UTF_8);
         try {
             for (int i = 0; i < TOTAL_RECORDS; i++) {
                 keyIndex = zipf.sample() - 1;
@@ -97,10 +101,12 @@ protected String prepareTestDate() throws IOException {
     //@Test
     public void testSingleSpaceSaving() throws IOException {
         String dataFile = prepareTestDate();
-        TopNCounterTest.SpaceSavingConsumer spaceSavingCounter = new TopNCounterTest.SpaceSavingConsumer(TOP_K * SPACE_SAVING_ROOM);
+        TopNCounterTest.SpaceSavingConsumer spaceSavingCounter = new TopNCounterTest.SpaceSavingConsumer(
+                TOP_K * SPACE_SAVING_ROOM);
         TopNCounterTest.HashMapConsumer accurateCounter = new TopNCounterTest.HashMapConsumer();
 
-        for (TopNCounterTest.TestDataConsumer consumer : new TopNCounterTest.TestDataConsumer[] { spaceSavingCounter, accurateCounter }) {
+        for (TopNCounterTest.TestDataConsumer consumer : new TopNCounterTest.TestDataConsumer[] { spaceSavingCounter,
+                accurateCounter }) {
             feedDataToConsumer(dataFile, consumer, 0, TOTAL_RECORDS);
         }
 
@@ -109,7 +115,8 @@ public void testSingleSpaceSaving() throws IOException {
         compareResult(spaceSavingCounter, accurateCounter);
     }
 
-    private void compareResult(TopNCounterTest.TestDataConsumer firstConsumer, TopNCounterTest.TestDataConsumer secondConsumer) {
+    private void compareResult(TopNCounterTest.TestDataConsumer firstConsumer,
+            TopNCounterTest.TestDataConsumer secondConsumer) {
         List<Pair<String, Double>> topResult1 = firstConsumer.getTopN(TOP_K);
         outputMsg("Get topN, Space saving takes " + firstConsumer.getSpentTime() / 1000 + " seconds");
         List<Pair<String, Double>> realSequence = secondConsumer.getTopN(TOP_K);
@@ -123,8 +130,10 @@ private void compareResult(TopNCounterTest.TestDataConsumer firstConsumer, TopNC
                 //            if (topResult1.get(i).getFirst().equals(realSequence.get(i).getFirst()) && topResult1.get(i).getSecond().doubleValue() == realSequence.get(i).getSecond().doubleValue()) {
                 outputMsg("Passed; key:" + topResult1.get(i).getFirst() + ", value:" + topResult1.get(i).getSecond());
             } else {
-                outputMsg("Failed; space saving key:" + topResult1.get(i).getFirst() + ", value:" + topResult1.get(i).getSecond());
-                outputMsg("Failed; correct key:" + realSequence.get(i).getFirst() + ", value:" + realSequence.get(i).getSecond());
+                outputMsg("Failed; space saving key:" + topResult1.get(i).getFirst() + ", value:"
+                        + topResult1.get(i).getSecond());
+                outputMsg("Failed; correct key:" + realSequence.get(i).getFirst() + ", value:"
+                        + realSequence.get(i).getSecond());
                 error++;
             }
         }
@@ -167,7 +176,8 @@ public void testParallelSpaceSaving() throws IOException, ClassNotFoundException
 
     }
 
-    private TopNCounterTest.SpaceSavingConsumer[] singleMerge(TopNCounterTest.SpaceSavingConsumer[] consumers) throws IOException, ClassNotFoundException {
+    private TopNCounterTest.SpaceSavingConsumer[] singleMerge(TopNCounterTest.SpaceSavingConsumer[] consumers)
+            throws IOException, ClassNotFoundException {
         List<TopNCounterTest.SpaceSavingConsumer> list = Lists.newArrayList();
         if (consumers.length == 1)
             return consumers;
@@ -183,7 +193,8 @@ public void testParallelSpaceSaving() throws IOException, ClassNotFoundException
 
     }
 
-    private TopNCounterTest.SpaceSavingConsumer[] binaryMerge(TopNCounterTest.SpaceSavingConsumer[] consumers) throws IOException, ClassNotFoundException {
+    private TopNCounterTest.SpaceSavingConsumer[] binaryMerge(TopNCounterTest.SpaceSavingConsumer[] consumers)
+            throws IOException, ClassNotFoundException {
         List<TopNCounterTest.SpaceSavingConsumer> list = Lists.newArrayList();
         if (consumers.length == 1)
             return consumers;
@@ -199,9 +210,11 @@ public void testParallelSpaceSaving() throws IOException, ClassNotFoundException
         return binaryMerge(list.toArray(new TopNCounterTest.SpaceSavingConsumer[list.size()]));
     }
 
-    private void feedDataToConsumer(String dataFile, TopNCounterTest.TestDataConsumer consumer, int startLine, int endLine) throws IOException {
+    private void feedDataToConsumer(String dataFile, TopNCounterTest.TestDataConsumer consumer, int startLine,
+            int endLine) throws IOException {
         long startTime = System.currentTimeMillis();
-        BufferedReader bufferedReader = new BufferedReader(new FileReader(dataFile));
+        BufferedReader bufferedReader = new BufferedReader(
+                new InputStreamReader(new FileInputStream(dataFile), StandardCharsets.UTF_8));
 
         int lineNum = 0;
         String line = bufferedReader.readLine();
@@ -214,7 +227,8 @@ private void feedDataToConsumer(String dataFile, TopNCounterTest.TestDataConsume
         }
 
         bufferedReader.close();
-        outputMsg("feed data to " + consumer.getClass().getCanonicalName() + " take time (seconds): " + (System.currentTimeMillis() - startTime) / 1000);
+        outputMsg("feed data to " + consumer.getClass().getCanonicalName() + " take time (seconds): "
+                + (System.currentTimeMillis() - startTime) / 1000);
     }
 
     private void outputMsg(String msg) {
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/MetricsManager.java b/core-metrics/src/main/java/org/apache/kylin/metrics/MetricsManager.java
index bcfb2751e8..aff8cc9533 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/MetricsManager.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/MetricsManager.java
@@ -83,6 +83,7 @@ private static void setSystemCubeSink(Sink systemCubeSink) {
             }
         }
         scSink = systemCubeSink;
+        System.gc();
     }
 
     private static void setSourceReporterBindProps(
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEvent.java b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEvent.java
index 93da2f445f..6452d78242 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEvent.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEvent.java
@@ -23,6 +23,7 @@
 import java.io.Serializable;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.Map;
 import java.util.Set;
@@ -206,7 +207,7 @@ public String getType() {
 
     @Override
     public byte[] getKey() {
-        return (getHost() + "-" + getTime() + "-" + getID()).getBytes();
+        return (getHost() + "-" + getTime() + "-" + getID()).getBytes(StandardCharsets.UTF_8);
     }
 
     @Override
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEventTimeDetail.java b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEventTimeDetail.java
index ff97b9b121..c611d0f943 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEventTimeDetail.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEventTimeDetail.java
@@ -20,6 +20,7 @@
 
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
+import java.util.Locale;
 import java.util.TimeZone;
 
 import org.apache.kylin.common.KylinConfig;
@@ -43,24 +44,24 @@
     public final String week_begin_date;
 
     public RecordEventTimeDetail(long timeStamp) {
-        Calendar calendar = Calendar.getInstance(timeZone);
+        Calendar calendar = Calendar.getInstance(timeZone, Locale.ROOT);
         calendar.setTimeInMillis(timeStamp);
 
         SimpleDateFormat dateFormat = dateFormatThreadLocal.get();
         if (dateFormat == null) {
-            dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+            dateFormat = new SimpleDateFormat("yyyy-MM-dd", Locale.ROOT);
             dateFormat.setTimeZone(timeZone);
             dateFormatThreadLocal.set(dateFormat);
         }
         SimpleDateFormat timeFormat = timeFormatThreadLocal.get();
         if (timeFormat == null) {
-            timeFormat = new SimpleDateFormat("HH:mm:ss");
+            timeFormat = new SimpleDateFormat("HH:mm:ss", Locale.ROOT);
             timeFormat.setTimeZone(timeZone);
             timeFormatThreadLocal.set(timeFormat);
         }
 
-        String yearStr = String.format("%04d", calendar.get(Calendar.YEAR));
-        String monthStr = String.format("%02d", calendar.get(Calendar.MONTH) + 1);
+        String yearStr = String.format(Locale.ROOT, "%04d", calendar.get(Calendar.YEAR));
+        String monthStr = String.format(Locale.ROOT, "%02d", calendar.get(Calendar.MONTH) + 1);
         this.year_begin_date = yearStr + "-01-01";
         this.month_begin_date = yearStr + "-" + monthStr + "-01";
         this.date = dateFormat.format(calendar.getTime());
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/TimePropertyEnum.java b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/TimePropertyEnum.java
index 1336843221..c013b4c3f3 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/TimePropertyEnum.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/TimePropertyEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.metrics.lib.impl;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum TimePropertyEnum {
@@ -36,7 +38,7 @@ public static TimePropertyEnum getByPropertyName(String propertyName) {
             return null;
         }
         for (TimePropertyEnum property : TimePropertyEnum.values()) {
-            if (property.propertyName.equals(propertyName.toUpperCase())) {
+            if (property.propertyName.equals(propertyName.toUpperCase(Locale.ROOT))) {
                 return property;
             }
         }
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/property/JobPropertyEnum.java b/core-metrics/src/main/java/org/apache/kylin/metrics/property/JobPropertyEnum.java
index 64d13acf1d..3ca567ed75 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/property/JobPropertyEnum.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/property/JobPropertyEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.metrics.property;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum JobPropertyEnum {
@@ -41,7 +43,7 @@ public static JobPropertyEnum getByName(String name) {
             return null;
         }
         for (JobPropertyEnum property : JobPropertyEnum.values()) {
-            if (property.propertyName.equals(name.toUpperCase())) {
+            if (property.propertyName.equals(name.toUpperCase(Locale.ROOT))) {
                 return property;
             }
         }
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryCubePropertyEnum.java b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryCubePropertyEnum.java
index d2d5bb4765..21477dc57a 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryCubePropertyEnum.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryCubePropertyEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.metrics.property;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum QueryCubePropertyEnum {
@@ -39,7 +41,7 @@ public static QueryCubePropertyEnum getByName(String name) {
             return null;
         }
         for (QueryCubePropertyEnum property : QueryCubePropertyEnum.values()) {
-            if (property.propertyName.equals(name.toUpperCase())) {
+            if (property.propertyName.equals(name.toUpperCase(Locale.ROOT))) {
                 return property;
             }
         }
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryPropertyEnum.java b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryPropertyEnum.java
index 3f016b0d38..20da4cee8e 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryPropertyEnum.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryPropertyEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.metrics.property;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum QueryPropertyEnum {
@@ -37,7 +39,7 @@ public static QueryPropertyEnum getByName(String name) {
             return null;
         }
         for (QueryPropertyEnum property : QueryPropertyEnum.values()) {
-            if (property.propertyName.equals(name.toUpperCase())) {
+            if (property.propertyName.equals(name.toUpperCase(Locale.ROOT))) {
                 return property;
             }
         }
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryRPCPropertyEnum.java b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryRPCPropertyEnum.java
index 049b9edecf..4366f0d7b6 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryRPCPropertyEnum.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryRPCPropertyEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.metrics.property;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum QueryRPCPropertyEnum {
@@ -36,7 +38,7 @@ public static QueryRPCPropertyEnum getByName(String name) {
             return null;
         }
         for (QueryRPCPropertyEnum property : QueryRPCPropertyEnum.values()) {
-            if (property.propertyName.equals(name.toUpperCase())) {
+            if (property.propertyName.equals(name.toUpperCase(Locale.ROOT))) {
                 return property;
             }
         }
diff --git a/dev-support/signatures.txt b/dev-support/signatures.txt
new file mode 100644
index 0000000000..c6f4d54f54
--- /dev/null
+++ b/dev-support/signatures.txt
@@ -0,0 +1,25 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to you under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Signatures of APIs to avoid.
+# Cribbed from Elasticsearch
+
+java.lang.Character#codePointBefore(char[],int) @ Implicit start offset is error-prone when the char[] is a buffer and the first chars are random chars
+java.lang.Character#codePointAt(char[],int) @ Implicit end offset is error-prone when the char[] is a buffer and the last chars are random chars
+
+@defaultMessage Please do not try to kill the world
+java.lang.Runtime#exit(int)
+
+# End signatures.txt
\ No newline at end of file
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchOptimizeJobCheckpointBuilder.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchOptimizeJobCheckpointBuilder.java
index 1a779d2ab7..a06fd46a04 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchOptimizeJobCheckpointBuilder.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchOptimizeJobCheckpointBuilder.java
@@ -21,6 +21,7 @@
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
@@ -35,7 +36,7 @@
 
 public class BatchOptimizeJobCheckpointBuilder {
 
-    protected static SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss");
+    protected static SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss", Locale.ROOT);
 
     final protected CubeInstance cube;
     final protected String submitter;
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
index 7f7191d047..fb1a7f4ed0 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
@@ -21,6 +21,7 @@
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.TimeZone;
 import java.util.regex.Matcher;
@@ -86,7 +87,7 @@ public static CubingJobTypeEnum getByName(String name) {
                 return null;
             }
             for (CubingJobTypeEnum jobTypeEnum : CubingJobTypeEnum.values()) {
-                if (jobTypeEnum.name.equals(name.toUpperCase())) {
+                if (jobTypeEnum.name.equals(name.toUpperCase(Locale.ROOT))) {
                     return jobTypeEnum;
                 }
             }
@@ -136,7 +137,7 @@ private static CubingJob initCubingJob(CubeSegment seg, String jobType, String s
         }
 
         CubingJob result = new CubingJob();
-        SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss");
+        SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss", Locale.ROOT);
         format.setTimeZone(TimeZone.getTimeZone(config.getTimeZone()));
         result.setDeployEnvName(kylinConfig.getDeployEnv());
         result.setProjectName(projList.get(0).getName());
@@ -279,8 +280,7 @@ protected void updateMetrics(ExecutableContext context, ExecuteResult result, Ex
             jobStats.setJobStats(findSourceSizeBytes(), findCubeSizeBytes(), getDuration(), getMapReduceWaitTime(),
                     getPerBytesTimeCost(findSourceSizeBytes(), getDuration()));
             if (CubingJobTypeEnum.getByName(getJobType()) == CubingJobTypeEnum.BUILD) {
-                jobStats.setJobStepStats(
-                        getTaskDurationByName(ExecutableConstants.STEP_NAME_FACT_DISTINCT_COLUMNS),
+                jobStats.setJobStepStats(getTaskDurationByName(ExecutableConstants.STEP_NAME_FACT_DISTINCT_COLUMNS),
                         getTaskDurationByName(ExecutableConstants.STEP_NAME_BUILD_DICTIONARY),
                         getTaskDurationByName(ExecutableConstants.STEP_NAME_BUILD_IN_MEM_CUBE),
                         getTaskDurationByName(ExecutableConstants.STEP_NAME_CONVERT_CUBOID_TO_HFILE));
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/LookupSnapshotBuildJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/LookupSnapshotBuildJob.java
index 6865ce3be7..3ebd7d2577 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/LookupSnapshotBuildJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/LookupSnapshotBuildJob.java
@@ -21,6 +21,7 @@
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.List;
+import java.util.Locale;
 import java.util.TimeZone;
 
 import org.apache.kylin.common.KylinConfig;
@@ -58,7 +59,7 @@ private static LookupSnapshotBuildJob initJob(CubeInstance cube, String tableNam
         }
 
         LookupSnapshotBuildJob result = new LookupSnapshotBuildJob();
-        SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss");
+        SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss", Locale.ROOT);
         format.setTimeZone(TimeZone.getTimeZone(kylinConfig.getTimeZone()));
         result.setDeployEnvName(kylinConfig.getDeployEnv());
         result.setProjectName(projList.get(0).getName());
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
index 6b8934abb4..102995e113 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
@@ -18,15 +18,20 @@
 
 package org.apache.kylin.engine.mr.common;
 
+import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
@@ -361,7 +366,7 @@ private static void printOneCuboidInfo(long parent, long cuboidID, Map<Long, Lon
     }
 
     private static String formatDouble(double input) {
-        return new DecimalFormat("#.##").format(input);
+        return new DecimalFormat("#.##", DecimalFormatSymbols.getInstance(Locale.ROOT)).format(input);
     }
 
     public static class CubeStatsResult {
@@ -423,7 +428,8 @@ public static void main(String[] args) throws IOException {
         CubeInstance cube = CubeManager.getInstance(config).getCube(args[0]);
         List<CubeSegment> segments = cube.getSegments();
 
-        PrintWriter out = new PrintWriter(System.out);
+        PrintWriter out = new PrintWriter(
+                new BufferedWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8)));
         for (CubeSegment seg : segments) {
             try {
                 new CubeStatsReader(seg, config).print(out);
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
index d7da2c2752..0e93ee7227 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
+
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
@@ -97,7 +99,7 @@ public int run(String[] args) throws Exception {
             parseOptions(options, args);
 
             String output = getOptionValue(OPTION_OUTPUT_PATH);
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             int nCuboidLevel = Integer.parseInt(getOptionValue(OPTION_NCUBOID_LEVEL));
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
             String cubingJobId = getOptionValue(OPTION_CUBING_JOB_ID);
@@ -113,7 +115,8 @@ public int run(String[] args) throws Exception {
             cuboidScheduler = CuboidSchedulerUtil.getCuboidSchedulerByMode(segment, cuboidModeName);
 
             if (checkSkip(cubingJobId, nCuboidLevel)) {
-                logger.info("Skip job " + getOptionValue(OPTION_JOB_NAME) + " for " + segmentID + "[" + segmentID + "]");
+                logger.info(
+                        "Skip job " + getOptionValue(OPTION_JOB_NAME) + " for " + segmentID + "[" + segmentID + "]");
                 return 0;
             }
 
@@ -166,7 +169,8 @@ private void configureMapperInputFormat(CubeSegment cubeSeg) throws Exception {
 
         if ("FLAT_TABLE".equals(input)) {
             // base cuboid case
-            IMRTableInputFormat flatTableInputFormat = MRUtil.getBatchCubingInputSide(cubeSeg).getFlatTableInputFormat();
+            IMRTableInputFormat flatTableInputFormat = MRUtil.getBatchCubingInputSide(cubeSeg)
+                    .getFlatTableInputFormat();
             flatTableInputFormat.configureJob(job);
         } else {
             // n-dimension cuboid case
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java
index 2dd9f00f91..a7fa2cdc48 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java
@@ -23,6 +23,7 @@
 import java.util.List;
 
 import com.google.common.collect.Lists;
+import java.util.Locale;
 import org.apache.hadoop.io.Text;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeManager;
@@ -62,7 +63,7 @@
     @Override
     protected void doSetup(Context context) throws IOException {
         super.bindCurrentConfiguration(context.getConfiguration());
-        cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase();
+        cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(Locale.ROOT);
 
         // only used in Build job, not in Merge job
         cuboidLevel = context.getConfiguration().getInt(BatchConstants.CFG_CUBE_CUBOID_LEVEL, 0);
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
index 61ba2479fd..27b5208fbc 100755
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
@@ -21,6 +21,7 @@
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -98,7 +99,7 @@ protected void doSetup(Context context) throws IOException {
         taskId = context.getTaskAttemptID().getTaskID().getId();
 
         reducerMapping = new FactDistinctColumnsReducerMapping(cube);
-        
+
         logger.info("reducer no " + taskId + ", role play " + reducerMapping.getRolePlayOfReducer(taskId));
 
         if (reducerMapping.isCuboidRowCounterReducer(taskId)) {
@@ -132,7 +133,8 @@ protected void doSetup(Context context) throws IOException {
     }
 
     @Override
-    public void doReduce(SelfDefineSortableKey skey, Iterable<Text> values, Context context) throws IOException, InterruptedException {
+    public void doReduce(SelfDefineSortableKey skey, Iterable<Text> values, Context context)
+            throws IOException, InterruptedException {
         Text key = skey.getText();
         if (isStatistics) {
             // for hll
@@ -219,12 +221,12 @@ private void outputDimRangeInfo() throws IOException, InterruptedException {
             // output written to baseDir/colName/colName.dci-r-00000 (etc)
             String dimRangeFileName = col.getIdentity() + "/" + col.getName() + DIMENSION_COL_INFO_FILE_POSTFIX;
 
-            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(), new Text(minValue.getBytes()),
-                    dimRangeFileName);
-            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(), new Text(maxValue.getBytes()),
-                    dimRangeFileName);
-            logger.info("write dimension range info for col : " + col.getName() + "  minValue:" + minValue + " maxValue:"
-                    + maxValue);
+            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(),
+                    new Text(minValue.getBytes(StandardCharsets.UTF_8)), dimRangeFileName);
+            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(),
+                    new Text(maxValue.getBytes(StandardCharsets.UTF_8)), dimRangeFileName);
+            logger.info("write dimension range info for col : " + col.getName() + "  minValue:" + minValue
+                    + " maxValue:" + maxValue);
         }
     }
 
@@ -232,11 +234,13 @@ private void outputDict(TblColRef col, Dictionary<String> dict) throws IOExcepti
         // output written to baseDir/colName/colName.rldict-r-00000 (etc)
         String dictFileName = col.getIdentity() + "/" + col.getName() + DICT_FILE_POSTFIX;
 
-        try (ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream outputStream = new DataOutputStream(baos);) {
+        try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
+                DataOutputStream outputStream = new DataOutputStream(baos);) {
             outputStream.writeUTF(dict.getClass().getName());
             dict.write(outputStream);
 
-            mos.write(BatchConstants.CFG_OUTPUT_DICT, NullWritable.get(), new ArrayPrimitiveWritable(baos.toByteArray()), dictFileName);
+            mos.write(BatchConstants.CFG_OUTPUT_DICT, NullWritable.get(),
+                    new ArrayPrimitiveWritable(baos.toByteArray()), dictFileName);
         }
     }
 
@@ -252,19 +256,23 @@ private void outputStatistics(List<Long> allCuboids) throws IOException, Interru
             grandTotal += hll.getCountEstimate();
         }
         double mapperOverlapRatio = grandTotal == 0 ? 0 : (double) totalRowsBeforeMerge / grandTotal;
-        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-1), new BytesWritable(Bytes.toBytes(mapperOverlapRatio)), statisticsFileName);
+        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-1),
+                new BytesWritable(Bytes.toBytes(mapperOverlapRatio)), statisticsFileName);
 
         // mapper number at key -2
-        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-2), new BytesWritable(Bytes.toBytes(baseCuboidRowCountInMappers.size())), statisticsFileName);
+        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-2),
+                new BytesWritable(Bytes.toBytes(baseCuboidRowCountInMappers.size())), statisticsFileName);
 
         // sampling percentage at key 0
-        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(0L), new BytesWritable(Bytes.toBytes(samplingPercentage)), statisticsFileName);
+        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(0L),
+                new BytesWritable(Bytes.toBytes(samplingPercentage)), statisticsFileName);
 
         for (long i : allCuboids) {
             valueBuf.clear();
             cuboidHLLMap.get(i).writeRegisters(valueBuf);
             valueBuf.flip();
-            mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(i), new BytesWritable(valueBuf.array(), valueBuf.limit()), statisticsFileName);
+            mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(i),
+                    new BytesWritable(valueBuf.array(), valueBuf.limit()), statisticsFileName);
         }
     }
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FilterRecommendCuboidDataJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FilterRecommendCuboidDataJob.java
index b56434b7ae..2fbbc737bb 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FilterRecommendCuboidDataJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FilterRecommendCuboidDataJob.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
@@ -52,7 +53,7 @@ public int run(String[] args) throws Exception {
             parseOptions(options, args);
 
             job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
             Path input = new Path(getOptionValue(OPTION_INPUT_PATH));
             Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidFromBaseCuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidFromBaseCuboidJob.java
index 7bfa33af31..63e651c29a 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidFromBaseCuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidFromBaseCuboidJob.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
+
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
@@ -79,7 +81,7 @@ public int run(String[] args) throws Exception {
             options.addOption(OPTION_NEED_UPDATE_BASE_CUBOID_SHARD);
             parseOptions(options, args);
 
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
             String output = getOptionValue(OPTION_OUTPUT_PATH);
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
index f8874fe1e0..02dc71c91f 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
@@ -73,7 +74,7 @@ public int run(String[] args) throws Exception {
             options.addOption(OPTION_DICTIONARY_SHRUNKEN_PATH);
             parseOptions(options, args);
 
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
             String output = getOptionValue(OPTION_OUTPUT_PATH);
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java
index 5aa2c69c50..d36545d6a2 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java
@@ -22,6 +22,7 @@
 import java.nio.ByteBuffer;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.hadoop.io.Text;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
@@ -59,7 +60,7 @@ protected void doSetup(Context context) throws IOException {
         super.bindCurrentConfiguration(context.getConfiguration());
         KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata();
 
-        String cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase();
+        String cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(Locale.ROOT);
         CubeInstance cube = CubeManager.getInstance(config).getCube(cubeName);
         CubeDesc cubeDesc = cube.getDescriptor();
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
index 63d0619641..e144ec3637 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
@@ -45,7 +46,7 @@ public int run(String[] args) throws Exception {
 
             String input = getOptionValue(OPTION_INPUT_PATH);
             String output = getOptionValue(OPTION_OUTPUT_PATH);
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
 
             CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java
index c75abea2d1..756d8eed28 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java
@@ -20,6 +20,7 @@
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
@@ -28,9 +29,7 @@
 public class SelfDefineSortableKey implements WritableComparable<SelfDefineSortableKey> {
 
     public enum TypeFlag {
-        NONE_NUMERIC_TYPE,
-        INTEGER_FAMILY_TYPE,
-        DOUBLE_FAMILY_TYPE
+        NONE_NUMERIC_TYPE, INTEGER_FAMILY_TYPE, DOUBLE_FAMILY_TYPE
     }
 
     private byte typeId; //non-numeric(0000 0000) int(0000 0001) other numberic(0000 0010)
@@ -50,7 +49,7 @@ public void init(Text key, byte typeId) {
         this.typeId = typeId;
         this.rawKey = key;
         if (isNumberFamily()) {
-            String valueStr = new String(key.getBytes(), 1, key.getLength() - 1);
+            String valueStr = new String(key.getBytes(), 1, key.getLength() - 1, StandardCharsets.UTF_8);
             if (isIntegerFamily()) {
                 this.keyInObj = Long.parseLong(valueStr);
             } else {
@@ -61,7 +60,6 @@ public void init(Text key, byte typeId) {
         }
     }
 
-
     public void init(Text key, DataType type) {
         init(key, getTypeIdByDatatype(type));
     }
@@ -113,7 +111,6 @@ public boolean isIntegerFamily() {
         return (typeId == TypeFlag.INTEGER_FAMILY_TYPE.ordinal());
     }
 
-
     public byte getTypeIdByDatatype(DataType type) {
         if (!type.isNumberFamily()) {
             return (byte) TypeFlag.NONE_NUMERIC_TYPE.ordinal();
@@ -129,5 +126,3 @@ public void setTypeId(byte typeId) {
     }
 
 }
-
-
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
index fdb19db5f5..a84a3a120e 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
@@ -21,6 +21,7 @@
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -66,7 +67,8 @@ public UpdateCubeInfoAfterBuildStep() {
     @Override
     protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
         final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
-        final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams())).latestCopyForWrite();
+        final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()))
+                .latestCopyForWrite();
         final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
 
         CubingJob cubingJob = (CubingJob) getManager().getJob(CubingExecutableUtil.getCubingJobId(this.getParams()));
@@ -92,7 +94,8 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio
         }
     }
 
-    private void saveExtSnapshotIfNeeded(CubeManager cubeManager, CubeInstance cube, CubeSegment segment) throws IOException {
+    private void saveExtSnapshotIfNeeded(CubeManager cubeManager, CubeInstance cube, CubeSegment segment)
+            throws IOException {
         String extLookupSnapshotStr = this.getParam(BatchConstants.ARG_EXT_LOOKUP_SNAPSHOTS_INFO);
         if (extLookupSnapshotStr == null || extLookupSnapshotStr.isEmpty()) {
             return;
@@ -142,7 +145,7 @@ private void updateSegment(CubeSegment segment) throws IOException {
             for (Path outputFile : outputFiles) {
                 try {
                     is = fs.open(outputFile);
-                    isr = new InputStreamReader(is);
+                    isr = new InputStreamReader(is, StandardCharsets.UTF_8);
                     bufferedReader = new BufferedReader(isr);
                     minValues.add(bufferedReader.readLine());
                     maxValues.add(bufferedReader.readLine());
@@ -157,10 +160,14 @@ private void updateSegment(CubeSegment segment) throws IOException {
             String maxValue = order.max(maxValues);
             logger.info("updateSegment step. {} minValue:" + minValue + " maxValue:" + maxValue, dimColRef.getName());
 
-            if (segment.isOffsetCube() && partitionCol != null && partitionCol.getIdentity().equals(dimColRef.getIdentity())) {
-                logger.info("update partition. {} timeMinValue:" + minValue + " timeMaxValue:" + maxValue, dimColRef.getName());
-                if (DateFormat.stringToMillis(minValue) != timeMinValue && DateFormat.stringToMillis(maxValue) != timeMaxValue) {
-                    segment.setTSRange(new TSRange(DateFormat.stringToMillis(minValue), DateFormat.stringToMillis(maxValue) + 1));
+            if (segment.isOffsetCube() && partitionCol != null
+                    && partitionCol.getIdentity().equals(dimColRef.getIdentity())) {
+                logger.info("update partition. {} timeMinValue:" + minValue + " timeMaxValue:" + maxValue,
+                        dimColRef.getName());
+                if (DateFormat.stringToMillis(minValue) != timeMinValue
+                        && DateFormat.stringToMillis(maxValue) != timeMaxValue) {
+                    segment.setTSRange(
+                            new TSRange(DateFormat.stringToMillis(minValue), DateFormat.stringToMillis(maxValue) + 1));
                 }
             }
             segment.getDimensionRangeInfoMap().put(dimColRef.getIdentity(), new DimensionRangeInfo(minValue, maxValue));
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateOldCuboidShardJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateOldCuboidShardJob.java
index b71e459576..80c483ea28 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateOldCuboidShardJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateOldCuboidShardJob.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
@@ -54,7 +55,7 @@ public int run(String[] args) throws Exception {
             parseOptions(options, args);
 
             job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
             Path input = new Path(getOptionValue(OPTION_INPUT_PATH));
             Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java
index 29e2f6561b..aea6b0ba89 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java
@@ -23,8 +23,11 @@
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
 import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -100,7 +103,7 @@ public void createStringTestFiles() throws Exception {
         }
         ArrayList<BufferedWriter> bws = new ArrayList<>();
         for (File f : allFiles) {
-            bws.add(new BufferedWriter(new FileWriter(f)));
+            bws.add(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f), StandardCharsets.UTF_8)));
         }
         System.out.println(data.size());
         for (String str : data) {
@@ -205,7 +208,7 @@ public void createIntegerTestFiles() throws Exception {
         }
         ArrayList<BufferedWriter> bws = new ArrayList<>();
         for (File f : allFiles) {
-            bws.add(new BufferedWriter(new FileWriter(f)));
+            bws.add(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f), StandardCharsets.UTF_8)));
         }
         System.out.println(data.size());
         for (String str : data) {
@@ -278,7 +281,7 @@ public void createDoubleTestFiles() throws Exception {
         }
         ArrayList<BufferedWriter> bws = new ArrayList<>();
         for (File f : allFiles) {
-            bws.add(new BufferedWriter(new FileWriter(f)));
+            bws.add(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f), StandardCharsets.UTF_8)));
         }
         System.out.println(data.size());
         for (String str : data) {
@@ -302,7 +305,8 @@ public void createDoubleTestFiles() throws Exception {
         ArrayList<String> result = new ArrayList<>();
         File dir = new File(dirPath);
         for (File f : dir.listFiles()) {
-            BufferedReader br = new BufferedReader(new FileReader(f));
+            BufferedReader br = new BufferedReader(
+                    new InputStreamReader(new FileInputStream(f), StandardCharsets.UTF_8));
             String str = br.readLine();
             while (str != null) {
                 result.add(str);
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
index ad1c2355cf..f3b0a13fd5 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
@@ -17,6 +17,13 @@
 */
 package org.apache.kylin.engine.spark;
 
+import java.io.Serializable;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
@@ -62,13 +69,8 @@
 import org.apache.spark.storage.StorageLevel;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import scala.Tuple2;
 
-import java.io.Serializable;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
+import scala.Tuple2;
 
 /**
  * Spark application to build cube with the "by-layer" algorithm. Only support source data from Hive; Metadata in HBase.
@@ -87,8 +89,8 @@
             .isRequired(true).withDescription("Cube output path").create(BatchConstants.ARG_OUTPUT);
     public static final Option OPTION_INPUT_TABLE = OptionBuilder.withArgName("hiveTable").hasArg().isRequired(true)
             .withDescription("Hive Intermediate Table").create("hiveTable");
-    public static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_INPUT).hasArg().isRequired(true)
-            .withDescription("Hive Intermediate Table PATH").create(BatchConstants.ARG_INPUT);
+    public static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_INPUT).hasArg()
+            .isRequired(true).withDescription("Hive Intermediate Table PATH").create(BatchConstants.ARG_INPUT);
 
     private Options options;
 
@@ -162,7 +164,8 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
 
         boolean isSequenceFile = JoinedFlatTable.SEQUENCEFILE.equalsIgnoreCase(envConfig.getFlatTableStorageFormat());
 
-        final JavaPairRDD<ByteArray, Object[]> encodedBaseRDD = SparkUtil.hiveRecordInputRDD(isSequenceFile, sc, inputPath, hiveTable)
+        final JavaPairRDD<ByteArray, Object[]> encodedBaseRDD = SparkUtil
+                .hiveRecordInputRDD(isSequenceFile, sc, inputPath, hiveTable)
                 .mapToPair(new EncodeBaseCuboid(cubeName, segmentId, metaUrl, sConf));
 
         Long totalCount = 0L;
@@ -170,7 +173,8 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
             totalCount = encodedBaseRDD.count();
         }
 
-        final BaseCuboidReducerFunction2 baseCuboidReducerFunction = new BaseCuboidReducerFunction2(cubeName, metaUrl, sConf);
+        final BaseCuboidReducerFunction2 baseCuboidReducerFunction = new BaseCuboidReducerFunction2(cubeName, metaUrl,
+                sConf);
         BaseCuboidReducerFunction2 reducerFunction2 = baseCuboidReducerFunction;
         if (allNormalMeasure == false) {
             reducerFunction2 = new CuboidReducerFunction2(cubeName, metaUrl, sConf, needAggr);
@@ -205,8 +209,6 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
         //HadoopUtil.deleteHDFSMeta(metaUrl);
     }
 
-
-
     protected JavaPairRDD<ByteArray, Object[]> prepareOutput(JavaPairRDD<ByteArray, Object[]> rdd, KylinConfig config,
             CubeSegment segment, int level) {
         return rdd;
@@ -342,7 +344,8 @@ public void init() {
     static public class CuboidReducerFunction2 extends BaseCuboidReducerFunction2 {
         private boolean[] needAggr;
 
-        public CuboidReducerFunction2(String cubeName, String metaUrl, SerializableConfiguration conf, boolean[] needAggr) {
+        public CuboidReducerFunction2(String cubeName, String metaUrl, SerializableConfiguration conf,
+                boolean[] needAggr) {
             super(cubeName, metaUrl, conf);
             this.needAggr = needAggr;
         }
@@ -386,7 +389,8 @@ public CuboidFlatMap(String cubeName, String segmentId, String metaUrl, Serializ
 
         public void init() {
             KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
-            try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig.setAndUnsetThreadLocalConfig(kConfig)) {
+            try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
+                    .setAndUnsetThreadLocalConfig(kConfig)) {
                 CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
                 this.cubeSegment = cubeInstance.getSegmentById(segmentId);
                 this.cubeDesc = cubeInstance.getDescriptor();
@@ -436,8 +440,8 @@ protected void sanityCheck(JavaPairRDD<ByteArray, Object[]> rdd, Long totalCount
         Long count2 = getRDDCountSum(rdd, countMeasureIndex);
         if (count2 != totalCount * thisCuboidNum) {
             throw new IllegalStateException(
-                    String.format("Sanity check failed, level %s, total count(*) is %s; cuboid number %s", thisLevel,
-                            count2, thisCuboidNum));
+                    String.format(Locale.ROOT, "Sanity check failed, level %s, total count(*) is %s; cuboid number %s",
+                            thisLevel, count2, thisCuboidNum));
         } else {
             logger.info("sanity check success for level " + thisLevel + ", count(*) is " + (count2 / thisCuboidNum));
         }
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
index a354909f4b..fcf54846fe 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
@@ -22,6 +22,7 @@
 import java.util.HashMap;
 import java.util.LinkedHashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.Callable;
@@ -194,9 +195,8 @@ private ExecuteResult onResumed(String appId, ExecutableManager mgr) throws Exec
     protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
         ExecutableManager mgr = getManager();
         Map<String, String> extra = mgr.getOutput(getId()).getExtra();
-        String sparkJobId = extra.get(ExecutableConstants.SPARK_JOB_ID);
-        if (!StringUtils.isEmpty(sparkJobId)) {
-            return onResumed(sparkJobId, mgr);
+        if (extra.containsKey(ExecutableConstants.SPARK_JOB_ID)) {
+            return onResumed(extra.get(ExecutableConstants.SPARK_JOB_ID), mgr);
         } else {
             String cubeName = this.getParam(SparkCubingByLayer.OPTION_CUBE_NAME.getOpt());
             CubeInstance cube = CubeManager.getInstance(context.getConfig()).getCube(cubeName);
@@ -256,8 +256,8 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio
             }
 
             stringBuilder.append("--jars %s %s %s");
-            final String cmd = String.format(stringBuilder.toString(), hadoopConf, KylinConfig.getSparkHome(), jars,
-                    jobJar, formatArgs());
+            final String cmd = String.format(Locale.ROOT, stringBuilder.toString(), hadoopConf,
+                    KylinConfig.getSparkHome(), jars, jobJar, formatArgs());
             logger.info("cmd: " + cmd);
             final ExecutorService executorService = Executors.newSingleThreadExecutor();
             final CliCommandExecutor exec = new CliCommandExecutor();
@@ -371,7 +371,7 @@ private void setAlgorithmLayer() {
     private String getAppState(String appId) throws IOException {
         CliCommandExecutor executor = KylinConfig.getInstanceFromEnv().getCliCommandExecutor();
         PatternedLogger patternedLogger = new PatternedLogger(logger);
-        String stateCmd = String.format("yarn application -status %s", appId);
+        String stateCmd = String.format(Locale.ROOT, "yarn application -status %s", appId);
         executor.execute(stateCmd, patternedLogger);
         Map<String, String> info = patternedLogger.getInfo();
         return info.get(ExecutableConstants.YARN_APP_STATE);
@@ -379,7 +379,7 @@ private String getAppState(String appId) throws IOException {
 
     private void killApp(String appId) throws IOException, InterruptedException {
         CliCommandExecutor executor = KylinConfig.getInstanceFromEnv().getCliCommandExecutor();
-        String killCmd = String.format("yarn application -kill %s", appId);
+        String killCmd = String.format(Locale.ROOT, "yarn application -kill %s", appId);
         executor.execute(killCmd);
     }
 
@@ -421,7 +421,8 @@ private void attachSegmentMetadataWithDict(CubeSegment segment) throws IOExcepti
             // cube statistics is not available for new segment
             dumpList.add(segment.getStatisticsResourcePath());
         }
-        JobRelatedMetaUtil.dumpAndUploadKylinPropsAndMetadata(dumpList, (KylinConfigExt) segment.getConfig(), this.getParam(SparkCubingByLayer.OPTION_META_URL.getOpt()));
+        JobRelatedMetaUtil.dumpAndUploadKylinPropsAndMetadata(dumpList, (KylinConfigExt) segment.getConfig(),
+                this.getParam(SparkCubingByLayer.OPTION_META_URL.getOpt()));
     }
 
     private void attachSegmentsMetadataWithDict(List<CubeSegment> segments) throws IOException {
@@ -435,7 +436,8 @@ private void attachSegmentsMetadataWithDict(List<CubeSegment> segments) throws I
                 dumpList.add(segment.getStatisticsResourcePath());
             }
         }
-        JobRelatedMetaUtil.dumpAndUploadKylinPropsAndMetadata(dumpList, (KylinConfigExt) segments.get(0).getConfig(), this.getParam(SparkCubingByLayer.OPTION_META_URL.getOpt()));
+        JobRelatedMetaUtil.dumpAndUploadKylinPropsAndMetadata(dumpList, (KylinConfigExt) segments.get(0).getConfig(),
+                this.getParam(SparkCubingByLayer.OPTION_META_URL.getOpt()));
     }
 
     private void readCounters(final Map<String, String> info) {
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java
index 7f052341f0..b12fd02fb8 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java
@@ -18,13 +18,18 @@
 
 package org.apache.kylin.engine.spark;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-import com.google.common.hash.HashFunction;
-import com.google.common.hash.Hasher;
-import com.google.common.hash.Hashing;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.Serializable;
+import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
@@ -86,20 +91,18 @@
 import org.apache.spark.util.LongAccumulator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.google.common.hash.HashFunction;
+import com.google.common.hash.Hasher;
+import com.google.common.hash.Hashing;
+
 import scala.Tuple2;
 import scala.Tuple3;
 
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.Serializable;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
 public class SparkFactDistinct extends AbstractApplication implements Serializable {
 
     protected static final Logger logger = LoggerFactory.getLogger(SparkFactDistinct.class);
@@ -119,8 +122,8 @@
             .withDescription("Hive Intermediate Table").create("hiveTable");
     public static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_INPUT).hasArg()
             .isRequired(true).withDescription("Hive Intermediate Table PATH").create(BatchConstants.ARG_INPUT);
-    public static final Option OPTION_COUNTER_PATH = OptionBuilder.withArgName(BatchConstants.ARG_COUNTER_OUPUT).hasArg()
-            .isRequired(true).withDescription("counter output path").create(BatchConstants.ARG_COUNTER_OUPUT);
+    public static final Option OPTION_COUNTER_PATH = OptionBuilder.withArgName(BatchConstants.ARG_COUNTER_OUPUT)
+            .hasArg().isRequired(true).withDescription("counter output path").create(BatchConstants.ARG_COUNTER_OUPUT);
 
     private Options options;
 
@@ -152,7 +155,8 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
         String counterPath = optionsHelper.getOptionValue(OPTION_COUNTER_PATH);
         int samplingPercent = Integer.parseInt(optionsHelper.getOptionValue(OPTION_STATS_SAMPLING_PERCENT));
 
-        Class[] kryoClassArray = new Class[] { Class.forName("scala.reflect.ClassTag$$anon$1"), Class.forName("org.apache.kylin.engine.mr.steps.SelfDefineSortableKey") };
+        Class[] kryoClassArray = new Class[] { Class.forName("scala.reflect.ClassTag$$anon$1"),
+                Class.forName("org.apache.kylin.engine.mr.steps.SelfDefineSortableKey") };
 
         SparkConf conf = new SparkConf().setAppName("Fact distinct columns for:" + cubeName + " segment " + segmentId);
         //serialization conf
@@ -186,17 +190,24 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
 
         final JavaRDD<String[]> recordRDD = SparkUtil.hiveRecordInputRDD(isSequenceFile, sc, inputPath, hiveTable);
 
-        JavaPairRDD<SelfDefineSortableKey, Text> flatOutputRDD = recordRDD.mapPartitionsToPair(new FlatOutputFucntion(cubeName, segmentId, metaUrl, sConf, samplingPercent, bytesWritten));
+        JavaPairRDD<SelfDefineSortableKey, Text> flatOutputRDD = recordRDD.mapPartitionsToPair(
+                new FlatOutputFucntion(cubeName, segmentId, metaUrl, sConf, samplingPercent, bytesWritten));
 
-        JavaPairRDD<SelfDefineSortableKey, Iterable<Text>> aggredRDD = flatOutputRDD.groupByKey(new FactDistinctPartitioner(cubeName, metaUrl, sConf, reducerMapping.getTotalReducerNum()));
+        JavaPairRDD<SelfDefineSortableKey, Iterable<Text>> aggredRDD = flatOutputRDD
+                .groupByKey(new FactDistinctPartitioner(cubeName, metaUrl, sConf, reducerMapping.getTotalReducerNum()));
 
-        JavaPairRDD<String, Tuple3<Writable, Writable, String>> outputRDD = aggredRDD.mapPartitionsToPair(new MultiOutputFunction(cubeName, metaUrl, sConf, samplingPercent));
+        JavaPairRDD<String, Tuple3<Writable, Writable, String>> outputRDD = aggredRDD
+                .mapPartitionsToPair(new MultiOutputFunction(cubeName, metaUrl, sConf, samplingPercent));
 
         // make each reducer output to respective dir
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_COLUMN, SequenceFileOutputFormat.class, NullWritable.class, Text.class);
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_DICT, SequenceFileOutputFormat.class, NullWritable.class, ArrayPrimitiveWritable.class);
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_STATISTICS, SequenceFileOutputFormat.class, LongWritable.class, BytesWritable.class);
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_PARTITION, TextOutputFormat.class, NullWritable.class, LongWritable.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_COLUMN, SequenceFileOutputFormat.class,
+                NullWritable.class, Text.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_DICT, SequenceFileOutputFormat.class,
+                NullWritable.class, ArrayPrimitiveWritable.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_STATISTICS, SequenceFileOutputFormat.class,
+                LongWritable.class, BytesWritable.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_PARTITION, TextOutputFormat.class,
+                NullWritable.class, LongWritable.class);
 
         FileOutputFormat.setOutputPath(job, new Path(outputPath));
         FileOutputFormat.setCompressOutput(job, false);
@@ -223,7 +234,6 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
         HadoopUtil.deleteHDFSMeta(metaUrl);
     }
 
-
     static class FlatOutputFucntion implements PairFlatMapFunction<Iterator<String[]>, SelfDefineSortableKey, Text> {
         private volatile transient boolean initialized = false;
         private String cubeName;
@@ -241,7 +251,8 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
         private LongAccumulator bytesWritten;
         private KeyValueBuilder keyValueBuilder;
 
-        public FlatOutputFucntion(String cubeName, String segmentId, String metaurl, SerializableConfiguration conf, int samplingPercent, LongAccumulator bytesWritten) {
+        public FlatOutputFucntion(String cubeName, String segmentId, String metaurl, SerializableConfiguration conf,
+                int samplingPercent, LongAccumulator bytesWritten) {
             this.cubeName = cubeName;
             this.segmentId = segmentId;
             this.metaUrl = metaurl;
@@ -572,7 +583,8 @@ private void putRowKeyToHLLNew(String[] row) {
         private int totalReducerNum;
         private FactDistinctColumnsReducerMapping reducerMapping;
 
-        public FactDistinctPartitioner(String cubeName, String metaUrl, SerializableConfiguration conf, int totalReducerNum) {
+        public FactDistinctPartitioner(String cubeName, String metaUrl, SerializableConfiguration conf,
+                int totalReducerNum) {
             this.cubeName = cubeName;
             this.metaUrl = metaUrl;
             this.conf = conf;
@@ -641,7 +653,8 @@ public int getPartition(Object o) {
         private String minValue = null;
         private List<Tuple2<String, Tuple3<Writable, Writable, String>>> result;
 
-        public MultiOutputFunction(String cubeName, String metaurl, SerializableConfiguration conf, int samplingPercent) {
+        public MultiOutputFunction(String cubeName, String metaurl, SerializableConfiguration conf,
+                int samplingPercent) {
             this.cubeName = cubeName;
             this.metaUrl = metaurl;
             this.conf = conf;
@@ -651,7 +664,8 @@ public MultiOutputFunction(String cubeName, String metaurl, SerializableConfigur
         private void init() throws IOException {
             taskId = TaskContext.getPartitionId();
             KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
-            try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig.setAndUnsetThreadLocalConfig(kConfig)) {
+            try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
+                    .setAndUnsetThreadLocalConfig(kConfig)) {
                 CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
                 cubeDesc = cubeInstance.getDescriptor();
                 cubeConfig = cubeInstance.getConfig();
@@ -686,7 +700,8 @@ private void init() throws IOException {
                         builder = DictionaryGenerator.newDictionaryBuilder(col.getType());
                         builder.init(null, 0, null);
                     }
-                    logger.info("Partition " + taskId + " handling column " + col + ", buildDictInReducer=" + buildDictInReducer);
+                    logger.info("Partition " + taskId + " handling column " + col + ", buildDictInReducer="
+                            + buildDictInReducer);
                 }
 
                 initialized = true;
@@ -760,7 +775,7 @@ private void logAFewRows(String value) {
                             String fileName = col.getIdentity() + "/";
                             result.add(new Tuple2<String, Tuple3<Writable, Writable, String>>(
                                     BatchConstants.CFG_OUTPUT_COLUMN, new Tuple3<Writable, Writable, String>(
-                                    NullWritable.get(), new Text(keyBytes), fileName)));
+                                            NullWritable.get(), new Text(keyBytes), fileName)));
                         }
                     }
                 }
@@ -819,22 +834,24 @@ private void outputDimRangeInfo(List<Tuple2<String, Tuple3<Writable, Writable, S
                 String dimRangeFileName = col.getIdentity() + "/" + col.getName() + DIMENSION_COL_INFO_FILE_POSTFIX;
 
                 result.add(new Tuple2<String, Tuple3<Writable, Writable, String>>(BatchConstants.CFG_OUTPUT_PARTITION,
-                        new Tuple3<Writable, Writable, String>(NullWritable.get(), new Text(minValue.getBytes()),
-                                dimRangeFileName)));
+                        new Tuple3<Writable, Writable, String>(NullWritable.get(),
+                                new Text(minValue.getBytes(StandardCharsets.UTF_8)), dimRangeFileName)));
                 result.add(new Tuple2<String, Tuple3<Writable, Writable, String>>(BatchConstants.CFG_OUTPUT_PARTITION,
-                        new Tuple3<Writable, Writable, String>(NullWritable.get(), new Text(maxValue.getBytes()),
-                                dimRangeFileName)));
+                        new Tuple3<Writable, Writable, String>(NullWritable.get(),
+                                new Text(maxValue.getBytes(StandardCharsets.UTF_8)), dimRangeFileName)));
                 logger.info("write dimension range info for col : " + col.getName() + "  minValue:" + minValue
                         + " maxValue:" + maxValue);
             }
         }
 
-        private void outputDict(TblColRef col, Dictionary<String> dict, List<Tuple2<String, Tuple3<Writable, Writable, String>>> result)
+        private void outputDict(TblColRef col, Dictionary<String> dict,
+                List<Tuple2<String, Tuple3<Writable, Writable, String>>> result)
                 throws IOException, InterruptedException {
             // output written to baseDir/colName/colName.rldict-r-00000 (etc)
             String dictFileName = col.getIdentity() + "/" + col.getName() + DICT_FILE_POSTFIX;
 
-            try (ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream outputStream = new DataOutputStream(baos)) {
+            try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
+                    DataOutputStream outputStream = new DataOutputStream(baos)) {
                 outputStream.writeUTF(dict.getClass().getName());
                 dict.write(outputStream);
 
@@ -844,10 +861,12 @@ private void outputDict(TblColRef col, Dictionary<String> dict, List<Tuple2<Stri
             }
         }
 
-        private void outputStatistics(List<Long> allCuboids, List<Tuple2<String, Tuple3<Writable, Writable, String>>> result)
+        private void outputStatistics(List<Long> allCuboids,
+                List<Tuple2<String, Tuple3<Writable, Writable, String>>> result)
                 throws IOException, InterruptedException {
             // output written to baseDir/statistics/statistics-r-00000 (etc)
-            String statisticsFileName = BatchConstants.CFG_OUTPUT_STATISTICS + "/" + BatchConstants.CFG_OUTPUT_STATISTICS;
+            String statisticsFileName = BatchConstants.CFG_OUTPUT_STATISTICS + "/"
+                    + BatchConstants.CFG_OUTPUT_STATISTICS;
 
             // mapper overlap ratio at key -1
             long grandTotal = 0;
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java
index 9f8c7dd24e..7e3ca05017 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java
@@ -20,6 +20,7 @@
 
 import java.io.IOException;
 import java.math.BigDecimal;
+import java.nio.charset.StandardCharsets;
 import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
 import java.sql.Date;
@@ -186,7 +187,7 @@ public static Object wrapObject(String value, int sqlType) {
         case Types.BINARY:
         case Types.VARBINARY:
         case Types.LONGVARBINARY:
-            return value.getBytes();
+            return value.getBytes(StandardCharsets.UTF_8);
         case Types.DATE:
             return Date.valueOf(value);
         case Types.TIME:
@@ -217,7 +218,8 @@ private void addHttpHeaders(HttpRequestBase method) {
 
         String username = connProps.getProperty("user");
         String password = connProps.getProperty("password");
-        String basicAuth = DatatypeConverter.printBase64Binary((username + ":" + password).getBytes());
+        String basicAuth = DatatypeConverter
+                .printBase64Binary((username + ":" + password).getBytes(StandardCharsets.UTF_8));
         method.addHeader("Authorization", "Basic " + basicAuth);
     }
 
diff --git a/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java b/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java
index dc41773d67..dd84bd6c89 100755
--- a/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java
@@ -21,9 +21,13 @@
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+import java.io.BufferedWriter;
 import java.io.File;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.Set;
 
 import org.apache.kylin.common.util.Dictionary;
@@ -60,7 +64,8 @@ public void after() throws Exception {
     @Test
     public void basic() throws Exception {
         dictMgr = DictionaryManager.getInstance(getTestConfig());
-        CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_without_slr_desc");
+        CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig())
+                .getCubeDesc("test_kylin_cube_without_slr_desc");
         TblColRef col = cubeDesc.findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_FORMAT_NAME");
 
         MockDistinctColumnValuesProvider mockupData = new MockDistinctColumnValuesProvider("A", "B", "C");
@@ -105,7 +110,8 @@ public void basic() throws Exception {
 
         public MockDistinctColumnValuesProvider(String... values) throws IOException {
             File tmpFile = File.createTempFile("MockDistinctColumnValuesProvider", ".txt");
-            PrintWriter out = new PrintWriter(tmpFile);
+            PrintWriter out = new PrintWriter(
+                    new BufferedWriter(new OutputStreamWriter(new FileOutputStream(tmpFile), StandardCharsets.UTF_8)));
 
             set = Sets.newTreeSet();
             for (String value : values) {
diff --git a/kylin-it/src/test/java/org/apache/kylin/cube/cuboid/algorithm/ITAlgorithmTestBase.java b/kylin-it/src/test/java/org/apache/kylin/cube/cuboid/algorithm/ITAlgorithmTestBase.java
index 797d0db86a..1d6d0bc562 100755
--- a/kylin-it/src/test/java/org/apache/kylin/cube/cuboid/algorithm/ITAlgorithmTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/cube/cuboid/algorithm/ITAlgorithmTestBase.java
@@ -19,8 +19,10 @@
 package org.apache.kylin.cube.cuboid.algorithm;
 
 import java.io.BufferedReader;
-import java.io.FileReader;
+import java.io.FileInputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -88,7 +90,8 @@ public double getQueryCostRatio(CuboidStats cuboidStats, List<Long> recommendLis
 
             String sCurrentLine;
 
-            br = new BufferedReader(new FileReader("src/test/resources/statistics.txt"));
+            br = new BufferedReader(new InputStreamReader(new FileInputStream("src/test/resources/statistics.txt"),
+                    StandardCharsets.UTF_8));
 
             while ((sCurrentLine = br.readLine()) != null) {
                 String[] statPair = sCurrentLine.split(" ");
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 16ceedea68..ec5bc35ff8 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -26,6 +26,7 @@
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Random;
 import java.util.Set;
@@ -115,7 +116,7 @@ public static void main(String[] args) throws Exception {
     public static void beforeClass() throws Exception {
         beforeClass(HBaseMetadataTestCase.SANDBOX_TEST_DATA);
     }
-    
+
     public static void beforeClass(String confDir) throws Exception {
         logger.info("Adding to classpath: " + new File(confDir).getAbsolutePath());
         ClassUtil.addClasspath(new File(confDir).getAbsolutePath());
@@ -138,7 +139,8 @@ public static void beforeClass(String confDir) throws Exception {
         System.setProperty("SPARK_HOME", "/usr/local/spark"); // need manually create and put spark to this folder on Jenkins
         System.setProperty("kylin.hadoop.conf.dir", confDir);
         if (StringUtils.isEmpty(System.getProperty("hdp.version"))) {
-            throw new RuntimeException("No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
+            throw new RuntimeException(
+                    "No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
         }
 
         HBaseMetadataTestCase.staticCreateTestMetadata(confDir);
@@ -153,7 +155,10 @@ public static void beforeClass(String confDir) throws Exception {
                 throw new IOException("mkdir fails");
             }
         } catch (IOException e) {
-            throw new RuntimeException("failed to create kylin.env.hdfs-working-dir, Please make sure the user has right to access " + KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory(), e);
+            throw new RuntimeException(
+                    "failed to create kylin.env.hdfs-working-dir, Please make sure the user has right to access "
+                            + KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory(),
+                    e);
         }
     }
 
@@ -161,7 +166,7 @@ private static boolean isFastBuildMode() {
         String fastModeStr = System.getProperty("fastBuildMode");
         if (fastModeStr == null)
             fastModeStr = System.getenv("KYLIN_CI_FASTBUILD");
-        
+
         return "true".equalsIgnoreCase(fastModeStr);
     }
 
@@ -289,14 +294,14 @@ protected boolean testModel() throws Exception {
     private boolean testLeftJoinCube() throws Exception {
         String cubeName = "ci_left_join_cube";
         clearSegment(cubeName);
-        
+
         // NOTE: ci_left_join_cube has percentile which isn't supported by Spark engine now
 
         return doBuildAndMergeOnCube(cubeName);
     }
 
     private boolean doBuildAndMergeOnCube(String cubeName) throws ParseException, Exception {
-        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd", Locale.ROOT);
         f.setTimeZone(TimeZone.getTimeZone("GMT"));
         long date1 = 0;
         long date2 = f.parse("2012-06-01").getTime();
@@ -307,7 +312,7 @@ private boolean doBuildAndMergeOnCube(String cubeName) throws ParseException, Ex
 
         if (fastBuildMode)
             return buildSegment(cubeName, date1, date4);
-        
+
         if (!buildSegment(cubeName, date1, date2))
             return false;
         checkNormalSegRangeInfo(cubeManager.getCube(cubeName));
@@ -327,14 +332,13 @@ private boolean doBuildAndMergeOnCube(String cubeName) throws ParseException, Ex
             return false;
         checkEmptySegRangeInfo(cubeManager.getCube(cubeName));
 
-
         if (!mergeSegment(cubeName, date2, date4)) // merge 2 normal segments
             return false;
         checkNormalSegRangeInfo(cubeManager.getCube(cubeName));
         if (!mergeSegment(cubeName, date2, date5)) // merge normal and empty
             return false;
         checkNormalSegRangeInfo(cubeManager.getCube(cubeName));
-        
+
         // now have 2 normal segments [date1, date2) [date2, date5) and 1 empty segment [date5, date6)
         return true;
     }
@@ -345,7 +349,7 @@ private boolean testInnerJoinCube() throws Exception {
 
         String cubeName = "ci_inner_join_cube";
         clearSegment(cubeName);
-        
+
         return doBuildAndMergeOnCube(cubeName);
     }
 
@@ -382,7 +386,8 @@ private Boolean optimizeCube(String cubeName) throws Exception {
     }
 
     private Boolean mergeSegment(String cubeName, long startDate, long endDate) throws Exception {
-        CubeSegment segment = cubeManager.mergeSegments(cubeManager.getCube(cubeName), new TSRange(startDate, endDate), null, true);
+        CubeSegment segment = cubeManager.mergeSegments(cubeManager.getCube(cubeName), new TSRange(startDate, endDate),
+                null, true);
         DefaultChainedExecutable job = EngineFactory.createBatchMergeJob(segment, "TEST");
         jobService.addJob(job);
         ExecutableState state = waitForJob(job.getId());
@@ -494,15 +499,15 @@ private void checkNormalSegRangeInfo(CubeInstance cube) {
             long max_v = DateFormat.stringToMillis(dmRangeInfo.getMax());
             long ts_range_start = segment.getTSRange().start.v;
             long ts_range_end = segment.getTSRange().end.v;
-            if (!(ts_range_start <= min_v && max_v <= ts_range_end -1)) {
-                throw new RuntimeException(String.format(
+            if (!(ts_range_start <= min_v && max_v <= ts_range_end - 1)) {
+                throw new RuntimeException(String.format(Locale.ROOT,
                         "Build cube failed, wrong partition column min/max value."
                                 + " Segment: %s, min value: %s, TsRange.start: %s, max value: %s, TsRange.end: %s",
                         segment, min_v, ts_range_start, max_v, ts_range_end));
             }
         }
     }
-    
+
     private CubeSegment getLastModifiedSegment(CubeInstance cube) {
         return Collections.max(cube.getSegments(), new Comparator<CubeSegment>() {
             @Override
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index 97a9c9cb15..80a5521113 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -25,6 +25,7 @@
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
 import java.util.TimeZone;
 import java.util.concurrent.Callable;
@@ -167,7 +168,7 @@ public void build() throws Exception {
         new Thread(new Runnable() {
             @Override
             public void run() {
-                SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+                SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd", Locale.ROOT);
                 f.setTimeZone(TimeZone.getTimeZone("GMT"));
                 long dateStart = 0;
                 try {
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java b/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
index db2b9486d7..ca4b5e1615 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
@@ -20,6 +20,7 @@
 import java.io.UnsupportedEncodingException;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Properties;
 import java.util.Random;
 
@@ -38,7 +39,8 @@
 import kafka.utils.ZkUtils;
 
 public class MockKafka {
-    private static Properties createProperties(ZkConnection zkServerConnection, String logDir, String port, String brokerId) {
+    private static Properties createProperties(ZkConnection zkServerConnection, String logDir, String port,
+            String brokerId) {
         Properties properties = new Properties();
         properties.put("port", port);
         properties.put("broker.id", brokerId);
@@ -59,7 +61,8 @@ private static Properties createProperties(ZkConnection zkServerConnection, Stri
     private ZkConnection zkConnection;
 
     public MockKafka(ZkConnection zkServerConnection) {
-        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + RandomUtil.randomUUID().toString(), "9092", "1");
+        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + RandomUtil.randomUUID().toString(),
+                "9092", "1");
         start();
     }
 
@@ -69,14 +72,16 @@ private MockKafka(Properties properties) {
     }
 
     public MockKafka(ZkConnection zkServerConnection, int port, int brokerId) {
-        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + RandomUtil.randomUUID().toString(), String.valueOf(port), String.valueOf(brokerId));
+        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + RandomUtil.randomUUID().toString(),
+                String.valueOf(port), String.valueOf(brokerId));
         //start();
     }
 
     private MockKafka(ZkConnection zkServerConnection, String logDir, String port, String brokerId) {
         this(createProperties(zkServerConnection, logDir, port, brokerId));
         this.zkConnection = zkServerConnection;
-        System.out.println(String.format("Kafka %s:%s dir:%s", kafkaServer.serverConfig().brokerId(), kafkaServer.serverConfig().port(), kafkaServer.serverConfig().logDirs()));
+        System.out.println(String.format(Locale.ROOT, "Kafka %s:%s dir:%s", kafkaServer.serverConfig().brokerId(),
+                kafkaServer.serverConfig().port(), kafkaServer.serverConfig().logDirs()));
     }
 
     public void createTopic(String topic, int partition, int replication) {
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java b/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
index d703941ddc..261fe5ab32 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
@@ -27,6 +27,7 @@
 import java.sql.Statement;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.io.IOUtils;
@@ -80,7 +81,7 @@ public void loadAllTables() throws SQLException {
 
     private void loadH2Table(String tableName) throws SQLException {
         TableMetadataManager metaMgr = TableMetadataManager.getInstance(config);
-        TableDesc tableDesc = metaMgr.getTableDesc(tableName.toUpperCase(), project);
+        TableDesc tableDesc = metaMgr.getTableDesc(tableName.toUpperCase(Locale.ROOT), project);
         File tempFile = null;
 
         try {
@@ -162,11 +163,11 @@ private String generateCreateH2TableSql(TableDesc tableDesc, String csvFilePath)
     }
 
     private static String getH2DataType(String javaDataType) {
-        String hiveDataType = javaToH2DataTypeMapping.get(javaDataType.toLowerCase());
+        String hiveDataType = javaToH2DataTypeMapping.get(javaDataType.toLowerCase(Locale.ROOT));
         if (hiveDataType == null) {
             hiveDataType = javaDataType;
         }
-        return hiveDataType.toLowerCase();
+        return hiveDataType.toLowerCase(Locale.ROOT);
     }
 
 }
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index fa49afb49c..43247502df 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -40,6 +40,7 @@
 import java.util.Comparator;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 import java.util.TreeSet;
 import java.util.logging.LogManager;
@@ -169,7 +170,7 @@ public int compare(File o1, File o2) {
         System.out.println(folder.getAbsolutePath());
         Set<File> set = new TreeSet<>(new FileByNameComparator());
         for (final File fileEntry : folder.listFiles()) {
-            if (fileEntry.getName().toLowerCase().endsWith(fileType.toLowerCase())) {
+            if (fileEntry.getName().toLowerCase(Locale.ROOT).endsWith(fileType.toLowerCase(Locale.ROOT))) {
                 set.add(fileEntry);
             }
         }
@@ -349,7 +350,7 @@ protected static String changeJoinType(String sql, String targetType) {
         for (int i = 0; i < tokens.length - 1; ++i) {
             if ((tokens[i].equalsIgnoreCase("inner") || tokens[i].equalsIgnoreCase("left"))
                     && tokens[i + 1].equalsIgnoreCase("join")) {
-                tokens[i] = targetType.toLowerCase();
+                tokens[i] = targetType.toLowerCase(Locale.ROOT);
             }
         }
 
@@ -508,7 +509,7 @@ protected void execLimitAndValidate(String queryFolder) throws Exception {
             String sql = getTextFromFile(sqlFile);
 
             String sqlWithLimit;
-            if (sql.toLowerCase().contains("limit ")) {
+            if (sql.toLowerCase(Locale.ROOT).contains("limit ")) {
                 sqlWithLimit = sql;
             } else {
                 sqlWithLimit = sql + " limit 5";
diff --git a/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java b/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java
index 26a81e3566..04a1f203b7 100644
--- a/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java
+++ b/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java
@@ -22,6 +22,7 @@
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Properties;
 
@@ -128,7 +129,7 @@ private void write(RecordKey recordKey, Iterable<HiveProducerRecord> recordItr)
         sb.append(tableLocation);
         for (Map.Entry<String, String> e : recordKey.partition().entrySet()) {
             sb.append("/");
-            sb.append(e.getKey().toLowerCase());
+            sb.append(e.getKey().toLowerCase(Locale.ROOT));
             sb.append("=");
             sb.append(e.getValue());
         }
@@ -145,7 +146,7 @@ private void write(RecordKey recordKey, Iterable<HiveProducerRecord> recordItr)
                 } else {
                     hql.append(",");
                 }
-                hql.append(e.getKey().toLowerCase());
+                hql.append(e.getKey().toLowerCase(Locale.ROOT));
                 hql.append("='" + e.getValue() + "'");
             }
             hql.append(")");
@@ -192,7 +193,7 @@ public HiveProducerRecord parseToHiveProducerRecord(String tableName, Map<String
         List<FieldSchema> fields = tableFieldSchemaCache.get(tableNameSplits).getSecond();
         List<Object> columnValues = Lists.newArrayListWithExpectedSize(fields.size());
         for (FieldSchema fieldSchema : fields) {
-            columnValues.add(rawValue.get(fieldSchema.getName().toUpperCase()));
+            columnValues.add(rawValue.get(fieldSchema.getName().toUpperCase(Locale.ROOT)));
         }
 
         return new HiveProducerRecord(tableNameSplits.getFirst(), tableNameSplits.getSecond(), partitionKVs, columnValues);
diff --git a/query/src/main/java/org/apache/kylin/query/enumerator/HiveEnumerator.java b/query/src/main/java/org/apache/kylin/query/enumerator/HiveEnumerator.java
index f5ada16493..2b258f0326 100644
--- a/query/src/main/java/org/apache/kylin/query/enumerator/HiveEnumerator.java
+++ b/query/src/main/java/org/apache/kylin/query/enumerator/HiveEnumerator.java
@@ -25,6 +25,7 @@
 import java.sql.Statement;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.calcite.linq4j.Enumerator;
 import org.apache.kylin.common.util.DBUtils;
 import org.apache.kylin.query.relnode.OLAPContext;
@@ -81,7 +82,7 @@ private boolean populateResult() {
             if (hasNext) {
                 List<String> allFields = olapContext.returnTupleInfo.getAllFields();
                 for (int i = 0; i < allFields.size(); i++) {
-                    Object value = rs.getObject(allFields.get(i).toLowerCase());
+                    Object value = rs.getObject(allFields.get(i).toLowerCase(Locale.ROOT));
                     current[i] = value;
                 }
             }
diff --git a/query/src/main/java/org/apache/kylin/query/schema/OLAPSchemaFactory.java b/query/src/main/java/org/apache/kylin/query/schema/OLAPSchemaFactory.java
index a1935fed0c..534b02f5a4 100644
--- a/query/src/main/java/org/apache/kylin/query/schema/OLAPSchemaFactory.java
+++ b/query/src/main/java/org/apache/kylin/query/schema/OLAPSchemaFactory.java
@@ -22,6 +22,7 @@
 import java.io.IOException;
 import java.util.Collection;
 import java.util.HashMap;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -147,7 +148,7 @@ private static void createOLAPSchemaFunctions(Map<String, String> definedUdfs, S
         int index = 0;
         out.append("            \"functions\": [\n");
         for (Map.Entry<String, String> udf : udfs.entrySet()) {
-            String udfName = udf.getKey().trim().toUpperCase();
+            String udfName = udf.getKey().trim().toUpperCase(Locale.ROOT);
             String udfClassName = udf.getValue().trim();
             out.append("               {\n");
             out.append("                   name: '" + udfName + "',\n");
diff --git a/query/src/main/java/org/apache/kylin/query/security/QueryInterceptor.java b/query/src/main/java/org/apache/kylin/query/security/QueryInterceptor.java
index 0debd6ce8c..39851921be 100644
--- a/query/src/main/java/org/apache/kylin/query/security/QueryInterceptor.java
+++ b/query/src/main/java/org/apache/kylin/query/security/QueryInterceptor.java
@@ -21,6 +21,7 @@
 import java.util.Collection;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.kylin.query.relnode.OLAPContext;
 
 public abstract class QueryInterceptor {
@@ -40,7 +41,7 @@ private void intercept(List<OLAPContext> contexts, Collection<String> blackList)
 
         Collection<String> queryCols = getQueryIdentifiers(contexts);
         for (String id : blackList) {
-            if (queryCols.contains(id.toUpperCase())) {
+            if (queryCols.contains(id.toUpperCase(Locale.ROOT))) {
                 throw new AccessDeniedException(getIdentifierType() + ":" + id);
             }
         }
diff --git a/query/src/main/java/org/apache/kylin/query/security/TableLevelACL.java b/query/src/main/java/org/apache/kylin/query/security/TableLevelACL.java
index c1b67b0603..7cc2273f6e 100644
--- a/query/src/main/java/org/apache/kylin/query/security/TableLevelACL.java
+++ b/query/src/main/java/org/apache/kylin/query/security/TableLevelACL.java
@@ -21,6 +21,7 @@
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
 import org.apache.kylin.metadata.model.TblColRef;
@@ -30,8 +31,8 @@
     public static void tableFilter(List<OLAPContext> contexts, List<String> tableBlackList) {
         Set<String> tableWithSchema = getTableWithSchema(contexts);
         for (String tbl : tableBlackList) {
-            if (tableWithSchema.contains(tbl.toUpperCase())) {
-//                throw new kylin.AccessDeniedException("table:" + tbl);
+            if (tableWithSchema.contains(tbl.toUpperCase(Locale.ROOT))) {
+                //                throw new kylin.AccessDeniedException("table:" + tbl);
                 System.out.println("Access table:" + tbl + " denied");
             }
         }
@@ -40,8 +41,8 @@ public static void tableFilter(List<OLAPContext> contexts, List<String> tableBla
     public static void columnFilter(List<OLAPContext> contexts, List<String> columnBlackList) {
         List<String> allColWithTblAndSchema = getAllColWithTblAndSchema(contexts);
         for (String tbl : columnBlackList) {
-            if (allColWithTblAndSchema.contains(tbl.toUpperCase())) {
-//                throw new kylin.AccessDeniedException("table:" + tbl);
+            if (allColWithTblAndSchema.contains(tbl.toUpperCase(Locale.ROOT))) {
+                //                throw new kylin.AccessDeniedException("table:" + tbl);
                 System.out.println("Access table:" + tbl + " denied");
             }
         }
diff --git a/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java b/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
index 424a1725ac..4378221a3c 100644
--- a/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
+++ b/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.query.util;
 
 import java.util.List;
+import java.util.Locale;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -58,7 +59,7 @@ public static String massageSql(String sql, String project, int limit, int offse
         final String suffixPattern = "^.+?\\s(limit\\s\\d+)?\\s(offset\\s\\d+)?\\s*$";
         sql = sql.replaceAll("\\s+", " ");
         Pattern pattern = Pattern.compile(suffixPattern);
-        Matcher matcher = pattern.matcher(sql.toLowerCase() + "  ");
+        Matcher matcher = pattern.matcher(sql.toLowerCase(Locale.ROOT) + "  ");
 
         if (matcher.find()) {
             if (limit > 0 && matcher.group(1) == null) {
@@ -71,7 +72,7 @@ public static String massageSql(String sql, String project, int limit, int offse
 
         // https://issues.apache.org/jira/browse/KYLIN-2649
         if (kylinConfig.getForceLimit() > 0 && limit <= 0 && matcher.group(1) == null
-                && sql1.toLowerCase().matches("^select\\s+\\*\\p{all}*")) {
+                && sql1.toLowerCase(Locale.ROOT).matches("^select\\s+\\*\\p{all}*")) {
             sql1 += ("\nLIMIT " + kylinConfig.getForceLimit());
         }
 
@@ -143,7 +144,7 @@ public static String makeErrorMsgUserFriendly(String errorMsg) {
     }
 
     public static boolean isSelectStatement(String sql) {
-        String sql1 = sql.toLowerCase();
+        String sql1 = sql.toLowerCase(Locale.ROOT);
         sql1 = removeCommentInSql(sql1);
         sql1 = sql1.trim();
         return sql1.startsWith("select") || (sql1.startsWith("with") && sql1.contains("select"))
diff --git a/query/src/main/java/org/apache/kylin/query/util/TempStatementUtil.java b/query/src/main/java/org/apache/kylin/query/util/TempStatementUtil.java
index d64c791947..8d42e34814 100644
--- a/query/src/main/java/org/apache/kylin/query/util/TempStatementUtil.java
+++ b/query/src/main/java/org/apache/kylin/query/util/TempStatementUtil.java
@@ -20,6 +20,7 @@
 
 import java.io.IOException;
 import java.util.HashSet;
+import java.util.Locale;
 import java.util.Set;
 import java.util.regex.Pattern;
 
@@ -32,6 +33,7 @@
     private static final String WITH = "WITH";
     private static final String DROP = "DROP";
     private static final String CREATE = "CREATE";
+
     public static Pair<Boolean, String> handleTempStatement(String sql, KylinConfig config) {
         if (!config.isConvertCreateTableToWith()) {
             return new Pair<>(false, sql);
@@ -132,15 +134,15 @@ private static void translateCreateToWith(String sql, KylinConfig config) throws
     }
 
     private static boolean isCreateTable(String sql) {
-        return sql.trim().toUpperCase().startsWith(CREATE);
+        return sql.trim().toUpperCase(Locale.ROOT).startsWith(CREATE);
     }
 
     private static boolean isDropTable(String sql) {
-        return sql.trim().toUpperCase().startsWith(DROP);
+        return sql.trim().toUpperCase(Locale.ROOT).startsWith(DROP);
     }
 
     private static boolean isWith(String sql) {
-        return sql.trim().toUpperCase().startsWith(WITH);
+        return sql.trim().toUpperCase(Locale.ROOT).startsWith(WITH);
     }
 
     private static String appendWith(String sql, KylinConfig config) {
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/AdminController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/AdminController.java
index 963a945d95..f9c81f79fe 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/AdminController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/AdminController.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.controller;
 
 import java.io.IOException;
+import java.io.UnsupportedEncodingException;
 
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.kylin.common.KylinConfig;
@@ -65,7 +66,7 @@ public GeneralResponse getEnv() {
             envRes.put("env", env);
 
             return envRes;
-        } catch (ConfigurationException e) {
+        } catch (ConfigurationException | UnsupportedEncodingException e) {
             throw new RuntimeException(msg.getGET_ENV_CONFIG_FAIL(), e);
         }
     }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java
index 061dd791f8..8607348917 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java
@@ -23,6 +23,7 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.util.Locale;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
@@ -65,7 +66,8 @@ ErrorResponse handleError(HttpServletRequest req, Exception ex) {
         Throwable cause = ex;
         while (cause != null) {
             if (cause.getClass().getPackage().getName().startsWith("org.apache.hadoop.hbase")) {
-                return new ErrorResponse(req.getRequestURL().toString(), new InternalErrorException(String.format(msg.getHBASE_FAIL(), ex.getMessage()), ex));
+                return new ErrorResponse(req.getRequestURL().toString(), new InternalErrorException(
+                        String.format(Locale.ROOT, msg.getHBASE_FAIL(), ex.getMessage()), ex));
             }
             cause = cause.getCause();
         }
@@ -117,7 +119,8 @@ protected void checkRequiredArg(String fieldName, Object fieldValue) {
 
     protected void setDownloadResponse(String downloadFile, final HttpServletResponse response) {
         File file = new File(downloadFile);
-        try (InputStream fileInputStream = new FileInputStream(file); OutputStream output = response.getOutputStream()) {
+        try (InputStream fileInputStream = new FileInputStream(file);
+                OutputStream output = response.getOutputStream()) {
             response.reset();
             response.setContentType("application/octet-stream");
             response.setContentLength((int) (file.length()));
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 3f30ab9031..a78f26a209 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -24,6 +24,7 @@
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -295,14 +296,17 @@ public CubeInstance rebuildLookupSnapshot(@PathVariable String cubeName, @PathVa
      *
      * @throws IOException
      */
-    @RequestMapping(value = "/{cubeName}/refresh_lookup", method = { RequestMethod.PUT }, produces = { "application/json" })
+    @RequestMapping(value = "/{cubeName}/refresh_lookup", method = { RequestMethod.PUT }, produces = {
+            "application/json" })
     @ResponseBody
-    public JobInstance reBuildLookupSnapshot(@PathVariable String cubeName, @RequestBody LookupSnapshotBuildRequest request) {
+    public JobInstance reBuildLookupSnapshot(@PathVariable String cubeName,
+            @RequestBody LookupSnapshotBuildRequest request) {
         try {
             final CubeManager cubeMgr = cubeService.getCubeManager();
             final CubeInstance cube = cubeMgr.getCube(cubeName);
             String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
-            return jobService.submitLookupSnapshotJob(cube, request.getLookupTableName(), request.getSegmentIDs(), submitter);
+            return jobService.submitLookupSnapshotJob(cube, request.getLookupTableName(), request.getSegmentIDs(),
+                    submitter);
         } catch (IOException e) {
             logger.error(e.getLocalizedMessage(), e);
             throw new InternalErrorException(e.getLocalizedMessage());
@@ -1007,7 +1011,7 @@ private void checkCubeExists(String cubeName) {
         CubeInstance cubeInstance = cubeService.getCubeManager().getCube(cubeName);
         if (cubeInstance == null) {
             Message msg = MsgPicker.getMsg();
-            throw new NotFoundException(String.format(msg.getCUBE_NOT_FOUND(), cubeName));
+            throw new NotFoundException(String.format(Locale.ROOT, msg.getCUBE_NOT_FOUND(), cubeName));
         }
     }
 
@@ -1015,7 +1019,6 @@ private void checkBuildingSegment(CubeInstance cube) {
         checkBuildingSegment(cube, cube.getConfig().getMaxBuildingSegments());
     }
 
-
     private void checkBuildingSegment(CubeInstance cube, int maxBuildingSeg) {
         if (cube.getBuildingSegments().size() >= maxBuildingSeg) {
             throw new TooManyRequestException(
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
index a27e148595..43d67ab85e 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
@@ -21,6 +21,7 @@
 import java.io.IOException;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
@@ -114,7 +115,8 @@ public ModelRequest saveModelDesc(@RequestBody ModelRequest modelRequest) {
         }
         if (!ValidateUtil.isAlphanumericUnderscore(modelDesc.getName())) {
             throw new BadRequestException(
-                    String.format("Invalid model name %s, only letters, numbers and underscore " + "supported."),
+                    String.format(Locale.ROOT,
+                            "Invalid model name %s, only letters, numbers and underscore " + "supported."),
                     modelDesc.getName());
         }
 
@@ -201,8 +203,8 @@ public ModelRequest cloneModel(@PathVariable String modelName, @RequestBody Mode
             throw new BadRequestException("New model name should not be empty.");
         }
         if (!ValidateUtil.isAlphanumericUnderscore(newModelName)) {
-            throw new BadRequestException(String
-                    .format("Invalid model name %s, only letters, numbers and underscore supported.", newModelName));
+            throw new BadRequestException(String.format(Locale.ROOT,
+                    "Invalid model name %s, only letters, numbers and underscore supported.", newModelName));
         }
 
         DataModelDesc newModelDesc = DataModelDesc.getCopyOf(modelDesc);
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java
index 44eeffe6fb..ecea55740d 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java
@@ -22,6 +22,7 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.util.JsonUtil;
@@ -128,7 +129,8 @@ public ProjectInstance saveProject(@RequestBody ProjectRequest projectRequest) {
 
         if (!ValidateUtil.isAlphanumericUnderscore(projectDesc.getName())) {
             throw new BadRequestException(
-                    String.format("Invalid Project name %s, only letters, numbers and underscore supported."),
+                    String.format(Locale.ROOT,
+                            "Invalid Project name %s, only letters, numbers and underscore " + "supported."),
                     projectDesc.getName());
         }
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
index 828c6749ac..978450cfbe 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
@@ -24,6 +24,7 @@
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.TreeSet;
 
@@ -118,7 +119,8 @@ public void removeQuery(@PathVariable String id) throws IOException {
 
     @RequestMapping(value = "/saved_queries", method = RequestMethod.GET, produces = { "application/json" })
     @ResponseBody
-    public List<Query> getQueries(@RequestParam(value = "project", required = false) String project) throws IOException {
+    public List<Query> getQueries(@RequestParam(value = "project", required = false) String project)
+            throws IOException {
         String creator = SecurityContextHolder.getContext().getAuthentication().getName();
         return queryService.getQueries(creator, project);
     }
@@ -137,7 +139,7 @@ public void downloadQueryResult(@PathVariable String format, SQLRequest sqlReque
         SQLResponse result = queryService.doQueryWithCache(sqlRequest);
         response.setContentType("text/" + format + ";charset=utf-8");
 
-        SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS");
+        SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS", Locale.ROOT);
         Date now = new Date();
         String nowStr = sdf.format(now);
         response.setHeader("Content-Disposition", "attachment; filename=\"" + nowStr + ".result." + format + "\"");
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
index 66621c7dd6..488b7e0259 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
@@ -22,6 +22,7 @@
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -31,8 +32,8 @@
 import org.apache.kylin.rest.exception.NotFoundException;
 import org.apache.kylin.rest.request.CardinalityRequest;
 import org.apache.kylin.rest.request.HiveTableRequest;
-import org.apache.kylin.rest.service.TableACLService;
 import org.apache.kylin.rest.response.TableSnapshotResponse;
+import org.apache.kylin.rest.service.TableACLService;
 import org.apache.kylin.rest.service.TableService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -74,7 +75,8 @@
      */
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public List<TableDesc> getTableDesc(@RequestParam(value = "ext", required = false) boolean withExt, @RequestParam(value = "project", required = true) String project) throws IOException {
+    public List<TableDesc> getTableDesc(@RequestParam(value = "ext", required = false) boolean withExt,
+            @RequestParam(value = "project", required = true) String project) throws IOException {
         try {
             return tableService.getTableDescByProject(project, withExt);
         } catch (IOException e) {
@@ -90,7 +92,8 @@
      * @return Table metadata array
      * @throws IOException
      */
-    @RequestMapping(value = "/{project}/{tableName:.+}", method = { RequestMethod.GET }, produces = { "application/json" })
+    @RequestMapping(value = "/{project}/{tableName:.+}", method = { RequestMethod.GET }, produces = {
+            "application/json" })
     @ResponseBody
     public TableDesc getTableDesc(@PathVariable String tableName, @PathVariable String project) {
         TableDesc table = tableService.getTableDescByName(tableName, false, project);
@@ -101,7 +104,8 @@ public TableDesc getTableDesc(@PathVariable String tableName, @PathVariable Stri
 
     @RequestMapping(value = "/{tables}/{project}", method = { RequestMethod.POST }, produces = { "application/json" })
     @ResponseBody
-    public Map<String, String[]> loadHiveTables(@PathVariable String tables, @PathVariable String project, @RequestBody HiveTableRequest request) throws IOException {
+    public Map<String, String[]> loadHiveTables(@PathVariable String tables, @PathVariable String project,
+            @RequestBody HiveTableRequest request) throws IOException {
         String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
         Map<String, String[]> result = new HashMap<String, String[]>();
         String[] tableNames = StringUtil.splitAndTrim(tables, ",");
@@ -159,14 +163,16 @@ public TableDesc getTableDesc(@PathVariable String tableName, @PathVariable Stri
      * @return Table metadata array
      * @throws IOException
      */
-    @RequestMapping(value = "/{project}/{tableNames}/cardinality", method = { RequestMethod.PUT }, produces = { "application/json" })
+    @RequestMapping(value = "/{project}/{tableNames}/cardinality", method = { RequestMethod.PUT }, produces = {
+            "application/json" })
     @ResponseBody
-    public CardinalityRequest generateCardinality(@PathVariable String tableNames, @RequestBody CardinalityRequest request, @PathVariable String project) throws Exception {
+    public CardinalityRequest generateCardinality(@PathVariable String tableNames,
+            @RequestBody CardinalityRequest request, @PathVariable String project) throws Exception {
         String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
         String[] tables = tableNames.split(",");
         try {
             for (String table : tables) {
-                tableService.calculateCardinality(table.trim().toUpperCase(), submitter, project);
+                tableService.calculateCardinality(table.trim().toUpperCase(Locale.ROOT), submitter, project);
             }
         } catch (IOException e) {
             logger.error("Failed to calculate cardinality", e);
@@ -183,7 +189,8 @@ public CardinalityRequest generateCardinality(@PathVariable String tableNames, @
      */
     @RequestMapping(value = "/hive", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    private List<String> showHiveDatabases(@RequestParam(value = "project", required = false) String project) throws IOException {
+    private List<String> showHiveDatabases(@RequestParam(value = "project", required = false) String project)
+            throws IOException {
         try {
             return tableService.getSourceDbNames(project);
         } catch (Throwable e) {
@@ -200,7 +207,8 @@ public CardinalityRequest generateCardinality(@PathVariable String tableNames, @
      */
     @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    private List<String> showHiveTables(@PathVariable String database, @RequestParam(value = "project", required = false) String project) throws IOException {
+    private List<String> showHiveTables(@PathVariable String database,
+            @RequestParam(value = "project", required = false) String project) throws IOException {
         try {
             return tableService.getSourceTableNames(project, database);
         } catch (Throwable e) {
@@ -211,13 +219,15 @@ public CardinalityRequest generateCardinality(@PathVariable String tableNames, @
 
     @RequestMapping(value = "/{project}/{tableName}/{snapshotID}/snapshotLocalCache", method = { RequestMethod.PUT })
     @ResponseBody
-    public void updateSnapshotLocalCache(@PathVariable final String project, @PathVariable final String tableName, @PathVariable final String snapshotID) {
+    public void updateSnapshotLocalCache(@PathVariable final String project, @PathVariable final String tableName,
+            @PathVariable final String snapshotID) {
         tableService.updateSnapshotLocalCache(project, tableName, snapshotID);
     }
 
     @RequestMapping(value = "/{tableName}/{snapshotID}/snapshotLocalCache/state", method = { RequestMethod.GET })
     @ResponseBody
-    public String getSnapshotLocalCacheState(@PathVariable final String tableName, @PathVariable final String snapshotID) {
+    public String getSnapshotLocalCacheState(@PathVariable final String tableName,
+            @PathVariable final String snapshotID) {
         return tableService.getSnapshotLocalCacheState(tableName, snapshotID);
     }
 
@@ -229,7 +239,8 @@ public void removeSnapshotLocalCache(@PathVariable final String tableName, @Path
 
     @RequestMapping(value = "/{project}/{tableName}/snapshots", method = { RequestMethod.GET })
     @ResponseBody
-    public List<TableSnapshotResponse> getTableSnapshots(@PathVariable final String project, @PathVariable final String tableName) throws IOException {
+    public List<TableSnapshotResponse> getTableSnapshots(@PathVariable final String project,
+            @PathVariable final String tableName) throws IOException {
         return tableService.getLookupTableSnapshots(project, tableName);
     }
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
index 40fc5ef0f2..6b99bee3f5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.metrics;
 
 import java.nio.charset.Charset;
+import java.util.Locale;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
@@ -103,8 +104,8 @@ private static void updateMetricsToReservoir(SQLRequest sqlRequest, SQLResponse
             RecordEvent rpcMetricsEvent = new TimedRecordEvent(
                     KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQueryRpcCall());
             setRPCWrapper(rpcMetricsEvent, //
-                    norm(sqlRequest.getProject()), entry.getRealizationName(),
-                    entry.getRpcServer(), entry.getException());
+                    norm(sqlRequest.getProject()), entry.getRealizationName(), entry.getRpcServer(),
+                    entry.getException());
             setRPCStats(rpcMetricsEvent, //
                     entry.getCallTimeMs(), entry.getSkippedRows(), entry.getScannedRows(), entry.getReturnedRows(),
                     entry.getAggregatedRows());
@@ -117,8 +118,7 @@ private static void updateMetricsToReservoir(SQLRequest sqlRequest, SQLResponse
                     KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQuery());
             setQueryWrapper(queryMetricsEvent, //
                     user, sqlHashCode, sqlResponse.isStorageCacheUsed() ? "CACHE" : contextEntry.getQueryType(),
-                    norm(sqlRequest.getProject()), contextEntry.getRealization(),
-                    contextEntry.getRealizationType(),
+                    norm(sqlRequest.getProject()), contextEntry.getRealization(), contextEntry.getRealizationType(),
                     sqlResponse.getThrowable());
 
             long totalStorageReturnCount = 0L;
@@ -129,9 +129,9 @@ private static void updateMetricsToReservoir(SQLRequest sqlRequest, SQLResponse
                             KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQueryCube());
 
                     setCubeWrapper(cubeSegmentMetricsEvent, //
-                            norm(sqlRequest.getProject()),
-                            segmentEntry.getCubeName(), segmentEntry.getSegmentName(), segmentEntry.getSourceCuboidId(),
-                            segmentEntry.getTargetCuboidId(), segmentEntry.getFilterMask());
+                            norm(sqlRequest.getProject()), segmentEntry.getCubeName(), segmentEntry.getSegmentName(),
+                            segmentEntry.getSourceCuboidId(), segmentEntry.getTargetCuboidId(),
+                            segmentEntry.getFilterMask());
 
                     setCubeStats(cubeSegmentMetricsEvent, //
                             segmentEntry.getCallCount(), segmentEntry.getCallTimeSum(), segmentEntry.getCallTimeMax(),
@@ -152,7 +152,7 @@ private static void updateMetricsToReservoir(SQLRequest sqlRequest, SQLResponse
     }
 
     private static String norm(String project) {
-        return project.toUpperCase();
+        return project.toUpperCase(Locale.ROOT);
     }
 
     private static void setRPCWrapper(RecordEvent metricsEvent, String projectName, String realizationName,
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java b/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
index 74eae63a21..1173fe1de9 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.security;
 
 import java.util.HashSet;
+import java.util.Locale;
 import java.util.Set;
 
 import org.apache.commons.lang.ArrayUtils;
@@ -49,9 +50,11 @@
      * @param contextSource
      * @param groupSearchBase
      */
-    public AuthoritiesPopulator(ContextSource contextSource, String groupSearchBase, String adminRole, String defaultRole) {
+    public AuthoritiesPopulator(ContextSource contextSource, String groupSearchBase, String adminRole,
+            String defaultRole) {
         super(contextSource, groupSearchBase);
-        this.adminRoleAsAuthority = new SimpleGrantedAuthority(adminRole.toUpperCase()); // spring will convert group names to uppercase by default
+        this.adminRoleAsAuthority = new SimpleGrantedAuthority(adminRole.toUpperCase(Locale.ROOT)); // spring will
+        // convert group names to uppercase by default
 
         String[] defaultRoles = StringUtils.split(defaultRole, ",");
         if (ArrayUtils.contains(defaultRoles, Constant.ROLE_MODELER)) {
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java b/server-base/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java
index b67753758c..dc8ac74845 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java
@@ -21,6 +21,7 @@
 import java.io.Serializable;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.persistence.AclEntity;
 import org.apache.kylin.rest.service.AclService;
@@ -88,7 +89,7 @@ private boolean checkExternalPermission(ExternalAclProvider eap, Authentication
             try {
                 p = kylinPermissionFactory.buildFromName(permString);
             } catch (IllegalArgumentException notfound) {
-                p = kylinPermissionFactory.buildFromName(permString.toUpperCase());
+                p = kylinPermissionFactory.buildFromName(permString.toUpperCase(Locale.ROOT));
             }
 
             if (p != null) {
@@ -105,7 +106,7 @@ public boolean hasPermission(Authentication authentication, Serializable targetI
         ExternalAclProvider eap = ExternalAclProvider.getInstance();
         if (eap == null)
             return super.hasPermission(authentication, targetId, targetType, permission);
-        
+
         return checkExternalPermission(eap, authentication, targetType, targetId.toString(), permission);
     }
 }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
index 9eb9bb7239..47b8027382 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
@@ -31,6 +31,7 @@
 package org.apache.kylin.rest.security;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Iterator;
@@ -51,7 +52,6 @@
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
@@ -60,6 +60,7 @@
 import org.apache.hadoop.hbase.client.Row;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.filter.CompareFilter;
@@ -96,9 +97,11 @@
     private final String tableName;
     private final List<String> columnFamilies = new ArrayList<>();
 
-    private NavigableMap<byte[], NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>> data = new TreeMap<>(Bytes.BYTES_COMPARATOR);
+    private NavigableMap<byte[], NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>> data = new TreeMap<>(
+            Bytes.BYTES_COMPARATOR);
 
-    private static List<KeyValue> toKeyValue(byte[] row, NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, int maxVersions) {
+    private static List<KeyValue> toKeyValue(byte[] row,
+            NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, int maxVersions) {
         return toKeyValue(row, rowdata, 0, Long.MAX_VALUE, maxVersions);
     }
 
@@ -163,7 +166,9 @@ public Result append(Append append) throws IOException {
         throw new RuntimeException(this.getClass() + " does NOT implement this method.");
     }
 
-    private static List<KeyValue> toKeyValue(byte[] row, NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, long timestampStart, long timestampEnd, int maxVersions) {
+    private static List<KeyValue> toKeyValue(byte[] row,
+            NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, long timestampStart,
+            long timestampEnd, int maxVersions) {
         List<KeyValue> ret = new ArrayList<KeyValue>();
         for (byte[] family : rowdata.keySet())
             for (byte[] qualifier : rowdata.get(family).keySet()) {
@@ -238,12 +243,14 @@ public void batch(List<? extends Row> actions, Object[] results) throws IOExcept
     }
 
     @Override
-    public <R> void batchCallback(List<? extends Row> actions, Object[] results, Batch.Callback<R> callback) throws IOException, InterruptedException {
+    public <R> void batchCallback(List<? extends Row> actions, Object[] results, Batch.Callback<R> callback)
+            throws IOException, InterruptedException {
 
     }
 
     @Override
-    public <R> Object[] batchCallback(List<? extends Row> actions, Batch.Callback<R> callback) throws IOException, InterruptedException {
+    public <R> Object[] batchCallback(List<? extends Row> actions, Batch.Callback<R> callback)
+            throws IOException, InterruptedException {
         return new Object[0];
     }
 
@@ -267,11 +274,13 @@ public Result get(Get get) throws IOException {
                     qualifiers = data.get(row).get(family).navigableKeySet();
                 for (byte[] qualifier : qualifiers) {
                     if (qualifier == null)
-                        qualifier = "".getBytes();
-                    if (!data.get(row).containsKey(family) || !data.get(row).get(family).containsKey(qualifier) || data.get(row).get(family).get(qualifier).isEmpty())
+                        qualifier = "".getBytes(StandardCharsets.UTF_8);
+                    if (!data.get(row).containsKey(family) || !data.get(row).get(family).containsKey(qualifier)
+                            || data.get(row).get(family).get(qualifier).isEmpty())
                         continue;
                     Map.Entry<Long, byte[]> timestampAndValue = data.get(row).get(family).get(qualifier).lastEntry();
-                    kvs.add(new KeyValue(row, family, qualifier, timestampAndValue.getKey(), timestampAndValue.getValue()));
+                    kvs.add(new KeyValue(row, family, qualifier, timestampAndValue.getKey(),
+                            timestampAndValue.getValue()));
                 }
             }
         }
@@ -320,7 +329,8 @@ public ResultScanner getScanner(Scan scan) throws IOException {
 
             List<KeyValue> kvs = null;
             if (!scan.hasFamilies()) {
-                kvs = toKeyValue(row, data.get(row), scan.getTimeRange().getMin(), scan.getTimeRange().getMax(), scan.getMaxVersions());
+                kvs = toKeyValue(row, data.get(row), scan.getTimeRange().getMin(), scan.getTimeRange().getMax(),
+                        scan.getMaxVersions());
             } else {
                 kvs = new ArrayList<KeyValue>();
                 for (byte[] family : scan.getFamilyMap().keySet()) {
@@ -482,16 +492,19 @@ public ResultScanner getScanner(byte[] family, byte[] qualifier) throws IOExcept
     @Override
     public void put(Put put) throws IOException {
         byte[] row = put.getRow();
-        NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowData = forceFind(data, row, new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>(Bytes.BYTES_COMPARATOR));
+        NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowData = forceFind(data, row,
+                new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>(Bytes.BYTES_COMPARATOR));
         for (byte[] family : put.getFamilyMap().keySet()) {
-            if (columnFamilies.contains(new String(family)) == false) {
-                throw new RuntimeException("Not Exists columnFamily : " + new String(family));
+            if (columnFamilies.contains(new String(family, StandardCharsets.UTF_8)) == false) {
+                throw new RuntimeException("Not Exists columnFamily : " + new String(family, StandardCharsets.UTF_8));
             }
-            NavigableMap<byte[], NavigableMap<Long, byte[]>> familyData = forceFind(rowData, family, new TreeMap<byte[], NavigableMap<Long, byte[]>>(Bytes.BYTES_COMPARATOR));
+            NavigableMap<byte[], NavigableMap<Long, byte[]>> familyData = forceFind(rowData, family,
+                    new TreeMap<byte[], NavigableMap<Long, byte[]>>(Bytes.BYTES_COMPARATOR));
             for (KeyValue kv : put.getFamilyMap().get(family)) {
                 kv.updateLatestStamp(Bytes.toBytes(System.currentTimeMillis()));
                 byte[] qualifier = kv.getQualifier();
-                NavigableMap<Long, byte[]> qualifierData = forceFind(familyData, qualifier, new TreeMap<Long, byte[]>());
+                NavigableMap<Long, byte[]> qualifierData = forceFind(familyData, qualifier,
+                        new TreeMap<Long, byte[]>());
                 qualifierData.put(kv.getTimestamp(), kv.getValue());
             }
         }
@@ -510,9 +523,13 @@ public void put(List<Put> puts) throws IOException {
 
     private boolean check(byte[] row, byte[] family, byte[] qualifier, byte[] value) {
         if (value == null || value.length == 0)
-            return !data.containsKey(row) || !data.get(row).containsKey(family) || !data.get(row).get(family).containsKey(qualifier);
+            return !data.containsKey(row) || !data.get(row).containsKey(family)
+                    || !data.get(row).get(family).containsKey(qualifier);
         else
-            return data.containsKey(row) && data.get(row).containsKey(family) && data.get(row).get(family).containsKey(qualifier) && !data.get(row).get(family).get(qualifier).isEmpty() && Arrays.equals(data.get(row).get(family).get(qualifier).lastEntry().getValue(), value);
+            return data.containsKey(row) && data.get(row).containsKey(family)
+                    && data.get(row).get(family).containsKey(qualifier)
+                    && !data.get(row).get(family).get(qualifier).isEmpty()
+                    && Arrays.equals(data.get(row).get(family).get(qualifier).lastEntry().getValue(), value);
     }
 
     /**
@@ -528,7 +545,8 @@ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, byte[] v
     }
 
     @Override
-    public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Put put) throws IOException {
+    public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp,
+            byte[] bytes3, Put put) throws IOException {
         return false;
     }
 
@@ -581,7 +599,8 @@ public void delete(List<Delete> deletes) throws IOException {
      * {@inheritDoc}
      */
     @Override
-    public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete) throws IOException {
+    public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete)
+            throws IOException {
         if (check(row, family, qualifier, value)) {
             delete(delete);
             return true;
@@ -590,7 +609,8 @@ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[
     }
 
     @Override
-    public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Delete delete) throws IOException {
+    public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp,
+            byte[] bytes3, Delete delete) throws IOException {
         return false;
     }
 
@@ -611,7 +631,8 @@ public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, lo
     }
 
     @Override
-    public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, Durability durability) throws IOException {
+    public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, Durability durability)
+            throws IOException {
         return 0;
     }
 
@@ -629,13 +650,15 @@ public CoprocessorRpcChannel coprocessorService(byte[] row) {
     }
 
     @Override
-    public <T extends Service, R> Map<byte[], R> coprocessorService(Class<T> service, byte[] startKey, byte[] endKey, Batch.Call<T, R> callable) throws ServiceException, Throwable {
+    public <T extends Service, R> Map<byte[], R> coprocessorService(Class<T> service, byte[] startKey, byte[] endKey,
+            Batch.Call<T, R> callable) throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
 
     @Override
-    public <T extends Service, R> void coprocessorService(Class<T> service, byte[] startKey, byte[] endKey, Batch.Call<T, R> callable, Batch.Callback<R> callback) throws ServiceException, Throwable {
+    public <T extends Service, R> void coprocessorService(Class<T> service, byte[] startKey, byte[] endKey,
+            Batch.Call<T, R> callable, Batch.Callback<R> callback) throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
@@ -658,19 +681,23 @@ public void setWriteBufferSize(long writeBufferSize) throws IOException {
     }
 
     @Override
-    public <R extends Message> Map<byte[], R> batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor, Message request, byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable {
+    public <R extends Message> Map<byte[], R> batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor,
+            Message request, byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
 
     @Override
-    public <R extends Message> void batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor, Message request, byte[] startKey, byte[] endKey, R responsePrototype, Batch.Callback<R> callback) throws ServiceException, Throwable {
+    public <R extends Message> void batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor,
+            Message request, byte[] startKey, byte[] endKey, R responsePrototype, Batch.Callback<R> callback)
+            throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
 
     //@Override  (only since 0.98.8)
-    public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp, byte[] value, RowMutations mutation) throws IOException {
+    public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp,
+            byte[] value, RowMutations mutation) throws IOException {
         throw new NotImplementedException();
 
     }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java b/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
index 0a8e84745a..566e1cde36 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
@@ -24,6 +24,7 @@
 import java.io.StringWriter;
 import java.lang.reflect.Method;
 import java.nio.charset.Charset;
+import java.util.Locale;
 import java.util.Properties;
 
 import org.apache.commons.io.IOUtils;
@@ -72,7 +73,7 @@ public Properties getAllKylinProperties() {
     }
 
     protected String resolvePlaceholder(String placeholder, Properties props) {
-        if (placeholder.toLowerCase().contains("password")) {
+        if (placeholder.toLowerCase(Locale.ROOT).contains("password")) {
             return EncryptUtil.decrypt(props.getProperty(placeholder));
         } else {
             return props.getProperty(placeholder);
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
index 4f439fe8fa..aa0d549f2e 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
@@ -24,6 +24,7 @@
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import javax.annotation.Nullable;
@@ -99,7 +100,8 @@ protected AclRecord initEntityAfterReload(AclRecord acl, String resourceName) {
 
     @Override
     public void afterPropertiesSet() throws Exception {
-        Broadcaster.getInstance(KylinConfig.getInstanceFromEnv()).registerStaticListener(new AclRecordSyncListener(), "acl");
+        Broadcaster.getInstance(KylinConfig.getInstanceFromEnv()).registerStaticListener(new AclRecordSyncListener(),
+                "acl");
     }
 
     private class AclRecordSyncListener extends Broadcaster.Listener {
@@ -156,7 +158,7 @@ public Acl readAclById(ObjectIdentity object, List<Sid> sids) throws NotFoundExc
         Message msg = MsgPicker.getMsg();
         Map<ObjectIdentity, Acl> aclsMap = readAclsById(Arrays.asList(object), sids);
         if (!aclsMap.containsKey(object)) {
-            throw new BadRequestException(String.format(msg.getNO_ACL_ENTRY(), object));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getNO_ACL_ENTRY(), object));
         }
         return aclsMap.get(object);
     }
@@ -173,7 +175,7 @@ public Acl readAclById(ObjectIdentity object, List<Sid> sids) throws NotFoundExc
             AclRecord record = getAclRecordByCache(objID(oid));
             if (record == null) {
                 Message msg = MsgPicker.getMsg();
-                throw new NotFoundException(String.format(msg.getACL_INFO_NOT_FOUND(), oid));
+                throw new NotFoundException(String.format(Locale.ROOT, msg.getACL_INFO_NOT_FOUND(), oid));
             }
 
             Acl parentAcl = null;
@@ -209,7 +211,8 @@ public void deleteAcl(ObjectIdentity objectIdentity, boolean deleteChildren) thr
             List<ObjectIdentity> children = findChildren(objectIdentity);
             if (!deleteChildren && children.size() > 0) {
                 Message msg = MsgPicker.getMsg();
-                throw new BadRequestException(String.format(msg.getIDENTITY_EXIST_CHILDREN(), objectIdentity));
+                throw new BadRequestException(
+                        String.format(Locale.ROOT, msg.getIDENTITY_EXIST_CHILDREN(), objectIdentity));
             }
             for (ObjectIdentity oid : children) {
                 deleteAcl(oid, deleteChildren);
@@ -272,7 +275,7 @@ private AclRecord getAclRecordByCache(String id) {
                 return aclMap.get(id);
             }
         }
-        
+
         try (AutoLock l = lock.lockForWrite()) {
             crud.reloadAll();
             return aclMap.get(id);
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
index 33957abc94..33033ccfc3 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.service;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
@@ -198,8 +199,8 @@ private void convertToResourceStore(KylinConfig kylinConfig, String tableName, R
 
     private ObjectIdentityImpl getDomainObjectInfoFromRs(Result result) {
         String type = new String(result.getValue(Bytes.toBytes(AclConstant.ACL_INFO_FAMILY),
-                Bytes.toBytes(AclConstant.ACL_INFO_FAMILY_TYPE_COLUMN)));
-        String id = new String(result.getRow());
+                Bytes.toBytes(AclConstant.ACL_INFO_FAMILY_TYPE_COLUMN)), StandardCharsets.UTF_8);
+        String id = new String(result.getRow(), StandardCharsets.UTF_8);
         ObjectIdentityImpl newInfo = new ObjectIdentityImpl(type, id);
         return newInfo;
     }
@@ -228,7 +229,7 @@ private SidInfo getOwnerSidInfo(Result result) throws IOException {
 
         if (familyMap != null && !familyMap.isEmpty()) {
             for (Map.Entry<byte[], byte[]> entry : familyMap.entrySet()) {
-                String sid = new String(entry.getKey());
+                String sid = new String(entry.getKey(), StandardCharsets.UTF_8);
                 LegacyAceInfo aceInfo = aceSerializer.deserialize(entry.getValue());
                 if (null != aceInfo) {
                     allAceInfoMap.put(sid, aceInfo);
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java
index f7881f141c..23d523e866 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java
@@ -20,13 +20,13 @@
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
+import java.io.UnsupportedEncodingException;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Map;
 import java.util.Properties;
 import java.util.TreeMap;
 
-import com.google.common.collect.Lists;
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.commons.configuration.PropertiesConfiguration;
 import org.apache.commons.lang3.StringUtils;
@@ -39,6 +39,8 @@
 import org.springframework.security.access.prepost.PreAuthorize;
 import org.springframework.stereotype.Component;
 
+import com.google.common.collect.Lists;
+
 /**
  */
 @Component("adminService")
@@ -49,7 +51,7 @@
      * Get Java Env info as string
      */
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
-    public String getEnv() throws ConfigurationException {
+    public String getEnv() throws ConfigurationException, UnsupportedEncodingException {
         PropertiesConfiguration tempConfig = new PropertiesConfiguration();
         OrderedProperties orderedProperties = new OrderedProperties(new TreeMap<String, String>());
         // Add Java Env
@@ -76,7 +78,7 @@ public String getEnv() throws ConfigurationException {
 
         // do save
         tempConfig.save(baos);
-        content = baos.toString();
+        content = baos.toString("UTF-8");
         return content;
     }
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 58a3b2f7af..96d60c71ff 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -167,8 +167,10 @@ public boolean isCubeNameVaildate(final String cubeName) {
         List<CubeInstance> filterCubes = new ArrayList<CubeInstance>();
         for (CubeInstance cubeInstance : filterModelCubes) {
             boolean isCubeMatch = (null == cubeName)
-                    || (!exactMatch && cubeInstance.getName().toLowerCase().contains(cubeName.toLowerCase()))
-                    || (exactMatch && cubeInstance.getName().toLowerCase().equals(cubeName.toLowerCase()));
+                    || (!exactMatch && cubeInstance.getName().toLowerCase(Locale.ROOT)
+                            .contains(cubeName.toLowerCase(Locale.ROOT)))
+                    || (exactMatch && cubeInstance.getName().toLowerCase(Locale.ROOT)
+                            .equals(cubeName.toLowerCase(Locale.ROOT)));
 
             if (isCubeMatch) {
                 filterCubes.add(cubeInstance);
@@ -200,11 +202,11 @@ public CubeInstance createCubeAndDesc(ProjectInstance project, CubeDesc desc) th
         String cubeName = desc.getName();
 
         if (getCubeManager().getCube(cubeName) != null) {
-            throw new BadRequestException(String.format(msg.getCUBE_ALREADY_EXIST(), cubeName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getCUBE_ALREADY_EXIST(), cubeName));
         }
 
         if (getCubeDescManager().getCubeDesc(desc.getName()) != null) {
-            throw new BadRequestException(String.format(msg.getCUBE_DESC_ALREADY_EXIST(), desc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getCUBE_DESC_ALREADY_EXIST(), desc.getName()));
         }
 
         String owner = SecurityContextHolder.getContext().getAuthentication().getName();
@@ -273,12 +275,12 @@ public CubeDesc updateCubeAndDesc(CubeInstance cube, CubeDesc desc, String newPr
         final List<CubingJob> cubingJobs = jobService.listJobsByRealizationName(cube.getName(), null,
                 EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING));
         if (!cubingJobs.isEmpty()) {
-            throw new BadRequestException(String.format(msg.getDISCARD_JOB_FIRST(), cube.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getDISCARD_JOB_FIRST(), cube.getName()));
         }
 
         //double check again
         if (!forceUpdate && !cube.getDescriptor().consistentWith(desc)) {
-            throw new BadRequestException(String.format(msg.getINCONSISTENT_CUBE_DESC(), desc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getINCONSISTENT_CUBE_DESC(), desc.getName()));
         }
 
         CubeDesc updatedCubeDesc = getCubeDescManager().updateCubeDesc(desc);
@@ -302,7 +304,7 @@ public void deleteCube(CubeInstance cube) throws IOException {
         final List<CubingJob> cubingJobs = jobService.listJobsByRealizationName(cube.getName(), null,
                 EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING, ExecutableState.ERROR));
         if (!cubingJobs.isEmpty()) {
-            throw new BadRequestException(String.format(msg.getDISCARD_JOB_FIRST(), cube.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getDISCARD_JOB_FIRST(), cube.getName()));
         }
 
         try {
@@ -322,20 +324,21 @@ public void deleteCube(CubeInstance cube) throws IOException {
                 List<RealizationEntry> cubeRealizationEntries = instance.getRealizationEntries();
 
                 boolean needUpdateHybrid = false;
-                for (RealizationEntry cubeRealizationEntry : cubeRealizationEntries){
-                    if (cube.getName().equals(cubeRealizationEntry.getRealization())){
+                for (RealizationEntry cubeRealizationEntry : cubeRealizationEntries) {
+                    if (cube.getName().equals(cubeRealizationEntry.getRealization())) {
                         needUpdateHybrid = true;
                         cubeRealizationEntries.remove(cubeRealizationEntry);
                         break;
                     }
                 }
 
-                if (needUpdateHybrid){
+                if (needUpdateHybrid) {
                     String[] cubeNames = new String[cubeRealizationEntries.size()];
-                    for (int i = 0; i < cubeRealizationEntries.size(); i++){
+                    for (int i = 0; i < cubeRealizationEntries.size(); i++) {
                         cubeNames[i] = cubeRealizationEntries.get(i).getRealization();
                     }
-                    hybridService.updateHybridCubeNoCheck(instance.getName(), projectInstance.getName(), cube.getModel().getName(), cubeNames);
+                    hybridService.updateHybridCubeNoCheck(instance.getName(), projectInstance.getName(),
+                            cube.getModel().getName(), cubeNames);
                 }
             }
         }
@@ -361,12 +364,13 @@ public CubeInstance purgeCube(CubeInstance cube) throws IOException {
         final List<CubingJob> cubingJobs = jobService.listJobsByRealizationName(cubeName, null, EnumSet
                 .of(ExecutableState.READY, ExecutableState.RUNNING, ExecutableState.ERROR, ExecutableState.STOPPED));
         if (!cubingJobs.isEmpty()) {
-            throw new BadRequestException(String.format(msg.getDISCARD_JOB_FIRST(), cubeName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getDISCARD_JOB_FIRST(), cubeName));
         }
 
         RealizationStatusEnum ostatus = cube.getStatus();
         if (null != ostatus && !RealizationStatusEnum.DISABLED.equals(ostatus)) {
-            throw new BadRequestException(String.format(msg.getPURGE_NOT_DISABLED_CUBE(), cubeName, ostatus));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getPURGE_NOT_DISABLED_CUBE(), cubeName, ostatus));
         }
 
         this.releaseAllSegments(cube);
@@ -389,7 +393,8 @@ public CubeInstance disableCube(CubeInstance cube) throws IOException {
 
         RealizationStatusEnum ostatus = cube.getStatus();
         if (null != ostatus && !RealizationStatusEnum.READY.equals(ostatus)) {
-            throw new BadRequestException(String.format(msg.getDISABLE_NOT_READY_CUBE(), cubeName, ostatus));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getDISABLE_NOT_READY_CUBE(), cubeName, ostatus));
         }
 
         return getCubeManager().updateCubeStatus(cube, RealizationStatusEnum.DISABLED);
@@ -403,16 +408,17 @@ public void checkEnableCubeCondition(CubeInstance cube) {
         RealizationStatusEnum ostatus = cube.getStatus();
 
         if (!cube.getStatus().equals(RealizationStatusEnum.DISABLED)) {
-            throw new BadRequestException(String.format(msg.getENABLE_NOT_DISABLED_CUBE(), cubeName, ostatus));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getENABLE_NOT_DISABLED_CUBE(), cubeName, ostatus));
         }
 
         if (cube.getSegments(SegmentStatusEnum.READY).size() == 0) {
-            throw new BadRequestException(String.format(msg.getNO_READY_SEGMENT(), cubeName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getNO_READY_SEGMENT(), cubeName));
         }
 
         if (!cube.getDescriptor().checkSignature()) {
             throw new BadRequestException(
-                    String.format(msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
+                    String.format(Locale.ROOT, msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
         }
     }
 
@@ -519,29 +525,32 @@ public CubeInstance deleteSegment(CubeInstance cube, String segmentName) throws
         }
 
         if (toDelete == null) {
-            throw new BadRequestException(String.format(msg.getSEG_NOT_FOUND(), segmentName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getSEG_NOT_FOUND(), segmentName));
         }
 
         if (toDelete.getStatus() != SegmentStatusEnum.READY) {
             if (toDelete.getStatus() == SegmentStatusEnum.NEW) {
                 if (!isOrphonSegment(cube, toDelete.getUuid())) {
-                    throw new BadRequestException(String.format(msg.getDELETE_NOT_READY_SEG(), segmentName));
+                    throw new BadRequestException(
+                            String.format(Locale.ROOT, msg.getDELETE_NOT_READY_SEG(), segmentName));
                 }
             } else {
-                throw new BadRequestException(String.format(msg.getDELETE_NOT_READY_SEG(), segmentName));
+                throw new BadRequestException(String.format(Locale.ROOT, msg.getDELETE_NOT_READY_SEG(), segmentName));
             }
         }
 
         if (!segmentName.equals(cube.getSegments().get(0).getName())
                 && !segmentName.equals(cube.getSegments().get(cube.getSegments().size() - 1).getName())) {
-            logger.warn(String.format(msg.getDELETE_SEGMENT_CAUSE_GAPS(), cube.getName(), segmentName));
+            logger.warn(String.format(Locale.ROOT, msg.getDELETE_SEGMENT_CAUSE_GAPS(), cube.getName(), segmentName));
         }
 
         return CubeManager.getInstance(getConfig()).updateCubeDropSegments(cube, toDelete);
     }
 
     public boolean isOrphonSegment(CubeInstance cube, String segId) {
-        List<JobInstance> jobInstances = jobService.searchJobsByCubeName(cube.getName(), cube.getProject(), Lists.newArrayList(JobStatusEnum.NEW, JobStatusEnum.PENDING, JobStatusEnum.RUNNING, JobStatusEnum.ERROR, JobStatusEnum.STOPPED),
+        List<JobInstance> jobInstances = jobService.searchJobsByCubeName(cube.getName(),
+                cube.getProject(), Lists.newArrayList(JobStatusEnum.NEW, JobStatusEnum.PENDING, JobStatusEnum.RUNNING,
+                        JobStatusEnum.ERROR, JobStatusEnum.STOPPED),
                 JobTimeFilterEnum.ALL, JobService.JobSearchMode.CUBING_ONLY);
         for (JobInstance jobInstance : jobInstances) {
             // if there are segment related jobs, can not delete this segment.
@@ -575,15 +584,15 @@ private void releaseAllSegments(CubeInstance cube) throws IOException {
         update.setToRemoveSegs(cube.getSegments().toArray(new CubeSegment[cube.getSegments().size()]));
         update.setCuboids(Maps.<Long, Long> newHashMap());
         update.setCuboidsRecommend(Sets.<Long> newHashSet());
-        update.setUpdateTableSnapshotPath(Maps.<String, String>newHashMap());
+        update.setUpdateTableSnapshotPath(Maps.<String, String> newHashMap());
         CubeManager.getInstance(getConfig()).updateCube(update);
     }
 
     public void updateOnNewSegmentReady(String cubeName) {
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         String serverMode = kylinConfig.getServerMode();
-        if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase())
-                || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) {
+        if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase(Locale.ROOT))
+                || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase(Locale.ROOT))) {
             CubeInstance cube = getCubeManager().getCube(cubeName);
             if (cube != null) {
                 CubeSegment seg = cube.getLatestBuiltSegment();
@@ -666,18 +675,20 @@ public void validateCubeDesc(CubeDesc desc, boolean isDraft) {
         }
         if (!ValidateUtil.isAlphanumericUnderscore(cubeName)) {
             logger.info("Invalid Cube name {}, only letters, numbers and underscore supported.", cubeName);
-            throw new BadRequestException(String.format(msg.getINVALID_CUBE_NAME(), cubeName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getINVALID_CUBE_NAME(), cubeName));
         }
 
         if (!isDraft) {
             DataModelDesc modelDesc = modelService.getDataModelManager().getDataModelDesc(desc.getModelName());
             if (modelDesc == null) {
-                throw new BadRequestException(String.format(msg.getMODEL_NOT_FOUND(), desc.getModelName()));
+                throw new BadRequestException(
+                        String.format(Locale.ROOT, msg.getMODEL_NOT_FOUND(), desc.getModelName()));
             }
 
             if (modelDesc.isDraft()) {
                 logger.info("Cannot use draft model.");
-                throw new BadRequestException(String.format(msg.getUSE_DRAFT_MODEL(), desc.getModelName()));
+                throw new BadRequestException(
+                        String.format(Locale.ROOT, msg.getUSE_DRAFT_MODEL(), desc.getModelName()));
             }
         }
     }
@@ -739,7 +750,8 @@ public CubeDesc updateCube(CubeInstance cube, CubeDesc desc, ProjectInstance pro
 
         try {
             if (cube.getSegments().size() != 0 && !cube.getDescriptor().consistentWith(desc)) {
-                throw new BadRequestException(String.format(msg.getINCONSISTENT_CUBE_DESC(), desc.getName()));
+                throw new BadRequestException(
+                        String.format(Locale.ROOT, msg.getINCONSISTENT_CUBE_DESC(), desc.getName()));
             }
 
             desc = updateCubeAndDesc(cube, desc, projectName, true);
@@ -774,9 +786,13 @@ public Draft getCubeDraft(String cubeName, String projectName) throws IOExceptio
             RootPersistentEntity e = d.getEntity();
             if (e instanceof CubeDesc) {
                 CubeDesc c = (CubeDesc) e;
-                if ((cubeName == null || (exactMatch && cubeName.toLowerCase().equals(c.getName().toLowerCase()))
-                        || (!exactMatch && c.getName().toLowerCase().contains(cubeName.toLowerCase())))
-                        && (modelName == null || modelName.toLowerCase().equals(c.getModelName().toLowerCase()))) {
+                if ((cubeName == null
+                        || (exactMatch
+                                && cubeName.toLowerCase(Locale.ROOT).equals(c.getName().toLowerCase(Locale.ROOT)))
+                        || (!exactMatch
+                                && c.getName().toLowerCase(Locale.ROOT).contains(cubeName.toLowerCase(Locale.ROOT))))
+                        && (modelName == null || modelName.toLowerCase(Locale.ROOT)
+                                .equals(c.getModelName().toLowerCase(Locale.ROOT)))) {
                     // backward compability for percentile
                     if (c.getMeasures() != null) {
                         for (MeasureDesc m : c.getMeasures()) {
@@ -923,8 +939,8 @@ public void migrateCube(CubeInstance cube, String projectName) {
                 "Destination configuration should not be empty.");
 
         String stringBuilder = ("%s/bin/kylin.sh org.apache.kylin.tool.CubeMigrationCLI %s %s %s %s %s %s true true");
-        String cmd = String.format(stringBuilder, KylinConfig.getKylinHome(), srcCfgUri, dstCfgUri, cube.getName(),
-                projectName, config.isAutoMigrateCubeCopyAcl(), config.isAutoMigrateCubePurge());
+        String cmd = String.format(Locale.ROOT, stringBuilder, KylinConfig.getKylinHome(), srcCfgUri, dstCfgUri,
+                cube.getName(), projectName, config.isAutoMigrateCubeCopyAcl(), config.isAutoMigrateCubePurge());
 
         logger.info("One click migration cmd: " + cmd);
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java b/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
index e548693301..ec395e024e 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
@@ -16,11 +16,11 @@
  * limitations under the License.
 */
 
-
 package org.apache.kylin.rest.service;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.metadata.project.ProjectInstance;
@@ -53,14 +53,14 @@
     @Autowired
     private CubeService cubeService;
 
-    private enum CategoryEnum {QUERY, JOB}
+    private enum CategoryEnum {
+        QUERY, JOB
+    }
 
     private enum QueryDimensionEnum {
-        PROJECT(QueryPropertyEnum.PROJECT.toString()),
-        CUBE(QueryPropertyEnum.REALIZATION.toString()),
-        DAY(TimePropertyEnum.DAY_DATE.toString()),
-        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()),
-        MONTH(TimePropertyEnum.MONTH.toString());
+        PROJECT(QueryPropertyEnum.PROJECT.toString()), CUBE(QueryPropertyEnum.REALIZATION.toString()), DAY(
+                TimePropertyEnum.DAY_DATE.toString()), WEEK(
+                        TimePropertyEnum.WEEK_BEGIN_DATE.toString()), MONTH(TimePropertyEnum.MONTH.toString());
         private final String sql;
 
         QueryDimensionEnum(String sql) {
@@ -73,11 +73,9 @@ public String toSQL() {
     };
 
     private enum JobDimensionEnum {
-        PROJECT(JobPropertyEnum.PROJECT.toString()),
-        CUBE(JobPropertyEnum.CUBE.toString()),
-        DAY(TimePropertyEnum.DAY_DATE.toString()),
-        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()),
-        MONTH(TimePropertyEnum.MONTH.toString());
+        PROJECT(JobPropertyEnum.PROJECT.toString()), CUBE(JobPropertyEnum.CUBE.toString()), DAY(
+                TimePropertyEnum.DAY_DATE.toString()), WEEK(
+                        TimePropertyEnum.WEEK_BEGIN_DATE.toString()), MONTH(TimePropertyEnum.MONTH.toString());
         private final String sql;
 
         JobDimensionEnum(String sql) {
@@ -90,10 +88,10 @@ public String toSQL() {
     };
 
     private enum QueryMetricEnum {
-        QUERY_COUNT("count(*)"),
-        AVG_QUERY_LATENCY("sum(" + QueryPropertyEnum.TIME_COST.toString() + ")/(count(" + QueryPropertyEnum.TIME_COST.toString() + "))"),
-        MAX_QUERY_LATENCY("max(" + QueryPropertyEnum.TIME_COST.toString() + ")"),
-        MIN_QUERY_LATENCY("min(" + QueryPropertyEnum.TIME_COST.toString() + ")");
+        QUERY_COUNT("count(*)"), AVG_QUERY_LATENCY("sum(" + QueryPropertyEnum.TIME_COST.toString() + ")/(count("
+                + QueryPropertyEnum.TIME_COST.toString() + "))"), MAX_QUERY_LATENCY(
+                        "max(" + QueryPropertyEnum.TIME_COST.toString() + ")"), MIN_QUERY_LATENCY(
+                                "min(" + QueryPropertyEnum.TIME_COST.toString() + ")");
 
         private final String sql;
 
@@ -107,10 +105,10 @@ public String toSQL() {
     }
 
     private enum JobMetricEnum {
-        JOB_COUNT("count(*)"),
-        AVG_JOB_BUILD_TIME("sum(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")/count(" + JobPropertyEnum.PER_BYTES_TIME_COST + ")"),
-        MAX_JOB_BUILD_TIME("max(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"),
-        MIN_JOB_BUILD_TIME("min(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")");
+        JOB_COUNT("count(*)"), AVG_JOB_BUILD_TIME("sum(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")/count("
+                + JobPropertyEnum.PER_BYTES_TIME_COST + ")"), MAX_JOB_BUILD_TIME(
+                        "max(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"), MIN_JOB_BUILD_TIME(
+                                "min(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")");
 
         private final String sql;
 
@@ -133,7 +131,7 @@ public MetricsResponse getCubeMetrics(String projectName, String cubeName) {
             totalCube += getHybridManager().listHybridInstances().size();
         } else {
             ProjectInstance project = getProjectManager().getProject(projectName);
-            totalCube +=  project.getRealizationCount(RealizationType.HYBRID);
+            totalCube += project.getRealizationCount(RealizationType.HYBRID);
         }
         Float minCubeExpansion = Float.POSITIVE_INFINITY;
         Float maxCubeExpansion = Float.NEGATIVE_INFINITY;
@@ -142,7 +140,8 @@ public MetricsResponse getCubeMetrics(String projectName, String cubeName) {
             if (cubeInstance.getInputRecordSizeBytes() > 0) {
                 totalCubeSize += cubeInstance.getSizeKB();
                 totalRecoadSize += cubeInstance.getInputRecordSizeBytes();
-                Float cubeExpansion = new Float(cubeInstance.getSizeKB()) * 1024 / cubeInstance.getInputRecordSizeBytes();
+                Float cubeExpansion = new Float(cubeInstance.getSizeKB()) * 1024
+                        / cubeInstance.getInputRecordSizeBytes();
                 if (cubeExpansion > maxCubeExpansion) {
                     maxCubeExpansion = cubeExpansion;
                 }
@@ -178,33 +177,41 @@ public MetricsResponse getCubeMetrics(String projectName, String cubeName) {
     }
 
     public String getQueryMetricsSQL(String startTime, String endTime, String projectName, String cubeName) {
-        String[] metrics = new String[] {QueryMetricEnum.QUERY_COUNT.toSQL(), QueryMetricEnum.AVG_QUERY_LATENCY.toSQL(), QueryMetricEnum.MAX_QUERY_LATENCY.toSQL(), QueryMetricEnum.MIN_QUERY_LATENCY.toSQL()};
+        String[] metrics = new String[] { QueryMetricEnum.QUERY_COUNT.toSQL(),
+                QueryMetricEnum.AVG_QUERY_LATENCY.toSQL(), QueryMetricEnum.MAX_QUERY_LATENCY.toSQL(),
+                QueryMetricEnum.MIN_QUERY_LATENCY.toSQL() };
         List<String> filters = getBaseFilters(CategoryEnum.QUERY, projectName, startTime, endTime);
         filters = addCubeFilter(filters, CategoryEnum.QUERY, cubeName);
-        return createSql(null, metrics, getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQuery()), filters.toArray(new String[filters.size()]));
+        return createSql(null, metrics,
+                getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQuery()),
+                filters.toArray(new String[filters.size()]));
     }
 
     public String getJobMetricsSQL(String startTime, String endTime, String projectName, String cubeName) {
-        String[] metrics = new String[] {JobMetricEnum.JOB_COUNT.toSQL(), JobMetricEnum.AVG_JOB_BUILD_TIME.toSQL(), JobMetricEnum.MAX_JOB_BUILD_TIME.toSQL(), JobMetricEnum.MIN_JOB_BUILD_TIME.toSQL()};
+        String[] metrics = new String[] { JobMetricEnum.JOB_COUNT.toSQL(), JobMetricEnum.AVG_JOB_BUILD_TIME.toSQL(),
+                JobMetricEnum.MAX_JOB_BUILD_TIME.toSQL(), JobMetricEnum.MIN_JOB_BUILD_TIME.toSQL() };
         List<String> filters = getBaseFilters(CategoryEnum.JOB, projectName, startTime, endTime);
         filters = addCubeFilter(filters, CategoryEnum.JOB, cubeName);
-        return createSql(null, metrics, getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectJob()), filters.toArray(new String[filters.size()]));
+        return createSql(null, metrics,
+                getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectJob()),
+                filters.toArray(new String[filters.size()]));
     }
 
-    public String getChartSQL(String startTime, String endTime, String projectName, String cubeName, String dimension, String metric, String category) {
-        try{
+    public String getChartSQL(String startTime, String endTime, String projectName, String cubeName, String dimension,
+            String metric, String category) {
+        try {
             CategoryEnum categoryEnum = CategoryEnum.valueOf(category);
             String table = "";
             String[] dimensionSQL = null;
             String[] metricSQL = null;
 
-            if(categoryEnum == CategoryEnum.QUERY) {
-                dimensionSQL = new String[] {QueryDimensionEnum.valueOf(dimension).toSQL()};
-                metricSQL = new String[] {QueryMetricEnum.valueOf(metric).toSQL()};
+            if (categoryEnum == CategoryEnum.QUERY) {
+                dimensionSQL = new String[] { QueryDimensionEnum.valueOf(dimension).toSQL() };
+                metricSQL = new String[] { QueryMetricEnum.valueOf(metric).toSQL() };
                 table = getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQuery());
             } else if (categoryEnum == CategoryEnum.JOB) {
-                dimensionSQL = new String[] {JobDimensionEnum.valueOf(dimension).toSQL()};
-                metricSQL = new String[] {JobMetricEnum.valueOf(metric).toSQL()};
+                dimensionSQL = new String[] { JobDimensionEnum.valueOf(dimension).toSQL() };
+                metricSQL = new String[] { JobMetricEnum.valueOf(metric).toSQL() };
                 table = getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectJob());
             }
 
@@ -220,17 +227,17 @@ public String getChartSQL(String startTime, String endTime, String projectName,
     }
 
     public MetricsResponse transformChartData(SQLResponse sqlResponse) {
-        if(!sqlResponse.getIsException()){
+        if (!sqlResponse.getIsException()) {
             MetricsResponse metrics = new MetricsResponse();
             List<List<String>> results = sqlResponse.getResults();
             for (List<String> result : results) {
                 String dimension = result.get(0);
-                if (dimension !=null && !dimension.isEmpty()) {
+                if (dimension != null && !dimension.isEmpty()) {
                     String metric = result.get(1);
                     metrics.increase(dimension, getMetricValue(metric));
                 }
             }
-            return  metrics;
+            return metrics;
         }
         return null;
     }
@@ -248,7 +255,7 @@ public void checkAuthorization(ProjectInstance project) throws AccessDeniedExcep
     }
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
-    public void checkAuthorization() throws AccessDeniedException{
+    public void checkAuthorization() throws AccessDeniedException {
     }
 
     private List<String> getBaseFilters(CategoryEnum category, String projectName, String startTime, String endTime) {
@@ -262,7 +269,7 @@ public void checkAuthorization() throws AccessDeniedException{
         filters.add(TimePropertyEnum.DAY_DATE.toString() + " >= '" + startTime + "'");
         filters.add(TimePropertyEnum.DAY_DATE.toString() + " <= '" + endTime + "'");
         if (!Strings.isNullOrEmpty(projectName)) {
-            filters.add(project + " ='" + projectName.toUpperCase() + "'");
+            filters.add(project + " ='" + projectName.toUpperCase(Locale.ROOT) + "'");
         } else {
             filters.add(project + " <> '" + MetricsManager.SYSTEM_PROJECT + "'");
         }
@@ -279,7 +286,7 @@ public void checkAuthorization() throws AccessDeniedException{
             HybridInstance hybridInstance = getHybridManager().getHybridInstance(cubeName);
             if (null != hybridInstance) {
                 StringBuffer cubeNames = new StringBuffer();
-                for (CubeInstance cube:getCubeByHybrid(hybridInstance)) {
+                for (CubeInstance cube : getCubeByHybrid(hybridInstance)) {
                     cubeNames.append(",'" + cube.getName() + "'");
                 }
                 baseFilter.add(JobPropertyEnum.CUBE.toString() + " IN (" + cubeNames.substring(1) + ")");
@@ -320,7 +327,7 @@ private String createSql(String[] dimensions, String[] metrics, String category,
         if (filters != null && filters.length > 0) {
             StringBuffer filterSQL = new StringBuffer(" where ");
             filterSQL.append(filters[0]);
-            for(int i = 1; i < filters.length; i++) {
+            for (int i = 1; i < filters.length; i++) {
                 filterSQL.append(" and ");
                 filterSQL.append(filters[i]);
             }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java b/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
index fb6fbf171c..57900ebef8 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
@@ -23,6 +23,7 @@
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.lang3.StringUtils;
@@ -64,7 +65,7 @@ private String getDiagnosisPackageName(File destDir) {
         File[] files = destDir.listFiles();
         if (files == null) {
             throw new BadRequestException(
-                    String.format(msg.getDIAG_PACKAGE_NOT_AVAILABLE(), destDir.getAbsolutePath()));
+                    String.format(Locale.ROOT, msg.getDIAG_PACKAGE_NOT_AVAILABLE(), destDir.getAbsolutePath()));
         }
         for (File subDir : files) {
             if (subDir.isDirectory()) {
@@ -75,7 +76,8 @@ private String getDiagnosisPackageName(File destDir) {
                 }
             }
         }
-        throw new BadRequestException(String.format(msg.getDIAG_PACKAGE_NOT_FOUND(), destDir.getAbsolutePath()));
+        throw new BadRequestException(
+                String.format(Locale.ROOT, msg.getDIAG_PACKAGE_NOT_FOUND(), destDir.getAbsolutePath()));
     }
 
     public BadQueryHistory getProjectBadQueryHistory(String project) throws IOException {
@@ -108,7 +110,8 @@ private void runDiagnosisCLI(String[] args) throws IOException {
         logger.debug("DiagnosisInfoCLI args: " + Arrays.toString(args));
         File script = new File(KylinConfig.getKylinHome() + File.separator + "bin", "diag.sh");
         if (!script.exists()) {
-            throw new BadRequestException(String.format(msg.getDIAG_NOT_FOUND(), script.getAbsolutePath()));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getDIAG_NOT_FOUND(), script.getAbsolutePath()));
         }
 
         String diagCmd = script.getAbsolutePath() + " " + StringUtils.join(args, " ");
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java b/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
index f3742debb0..7775d66d9a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.service;
 
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.dimension.BooleanDimEnc;
 import org.apache.kylin.dimension.DateDimEnc;
@@ -42,16 +43,19 @@
         Message msg = MsgPicker.getMsg();
 
         if (dataType.isIntegerFamily()) {
-            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME, IntegerDimEnc.ENCODING_NAME);
+            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME,
+                    DictionaryDimEnc.ENCODING_NAME, IntegerDimEnc.ENCODING_NAME);
         } else if (dataType.isNumberFamily()) { //numbers include integers
             return Lists.newArrayList(DictionaryDimEnc.ENCODING_NAME);
         } else if (dataType.isDateTimeFamily()) {
-            return Lists.newArrayList(DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME);
+            return Lists.newArrayList(DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME,
+                    DictionaryDimEnc.ENCODING_NAME);
         } else if (dataType.isStringFamily()) {
-            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME, FixedLenDimEnc.ENCODING_NAME, //
+            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME,
+                    FixedLenDimEnc.ENCODING_NAME, //
                     FixedLenHexDimEnc.ENCODING_NAME, IntegerDimEnc.ENCODING_NAME);
         } else {
-            throw new BadRequestException(String.format(msg.getVALID_ENCODING_NOT_AVAILABLE(), dataType));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getVALID_ENCODING_NOT_AVAILABLE(), dataType));
         }
     }
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/ExtFilterService.java b/server-base/src/main/java/org/apache/kylin/rest/service/ExtFilterService.java
index 5bd6a81504..5d2fa68006 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/ExtFilterService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/ExtFilterService.java
@@ -20,6 +20,7 @@
 
 import java.io.IOException;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.metadata.model.ExternalFilterDesc;
 import org.apache.kylin.rest.constant.Constant;
@@ -41,7 +42,7 @@ public void saveExternalFilter(ExternalFilterDesc desc) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         if (getTableManager().getExtFilterDesc(desc.getName()) != null) {
-            throw new BadRequestException(String.format(msg.getFILTER_ALREADY_EXIST(), desc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getFILTER_ALREADY_EXIST(), desc.getName()));
         }
         getTableManager().saveExternalFilter(desc);
     }
@@ -51,7 +52,7 @@ public void updateExternalFilter(ExternalFilterDesc desc) throws IOException {
         Message msg = MsgPicker.getMsg();
 
         if (getTableManager().getExtFilterDesc(desc.getName()) == null) {
-            throw new BadRequestException(String.format(msg.getFILTER_NOT_FOUND(), desc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getFILTER_NOT_FOUND(), desc.getName()));
         }
         getTableManager().saveExternalFilter(desc);
     }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
index f3b0c62f21..d8aa7111e6 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
@@ -181,7 +181,7 @@ private ExecutableState parseToExecutableState(JobStatusEnum status) {
         case STOPPED:
             return ExecutableState.STOPPED;
         default:
-            throw new BadRequestException(String.format(msg.getILLEGAL_EXECUTABLE_STATE(), status));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getILLEGAL_EXECUTABLE_STATE(), status));
         }
     }
 
@@ -204,7 +204,7 @@ private long getTimeStartInMillis(Calendar calendar, JobTimeFilterEnum timeFilte
         case ALL:
             return 0;
         default:
-            throw new BadRequestException(String.format(msg.getILLEGAL_TIME_FILTER(), timeFilter));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getILLEGAL_TIME_FILTER(), timeFilter));
         }
     }
 
@@ -224,7 +224,7 @@ public JobInstance submitJobInternal(CubeInstance cube, TSRange tsRange, Segment
         Message msg = MsgPicker.getMsg();
 
         if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
-            throw new BadRequestException(String.format(msg.getBUILD_BROKEN_CUBE(), cube.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getBUILD_BROKEN_CUBE(), cube.getName()));
         }
 
         checkCubeDescSignature(cube);
@@ -252,7 +252,7 @@ public JobInstance submitJobInternal(CubeInstance cube, TSRange tsRange, Segment
                 newSeg = getCubeManager().refreshSegment(cube, tsRange, segRange);
                 job = EngineFactory.createBatchCubingJob(newSeg, submitter);
             } else {
-                throw new BadRequestException(String.format(msg.getINVALID_BUILD_TYPE(), buildType));
+                throw new BadRequestException(String.format(Locale.ROOT, msg.getINVALID_BUILD_TYPE(), buildType));
             }
 
             getExecutableManager().addJob(job);
@@ -289,7 +289,7 @@ public JobInstance submitJobInternal(CubeInstance cube, TSRange tsRange, Segment
         Message msg = MsgPicker.getMsg();
 
         if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
-            throw new BadRequestException(String.format(msg.getBUILD_BROKEN_CUBE(), cube.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getBUILD_BROKEN_CUBE(), cube.getName()));
         }
 
         checkCubeDescSignature(cube);
@@ -414,7 +414,7 @@ private void checkCubeDescSignature(CubeInstance cube) {
 
         if (!cube.getDescriptor().checkSignature())
             throw new BadRequestException(
-                    String.format(msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
+                    String.format(Locale.ROOT, msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
     }
 
     private void checkAllowBuilding(CubeInstance cube) {
@@ -476,7 +476,7 @@ protected JobInstance getSingleJobInstance(AbstractExecutable job) {
             return null;
         }
         if (!(job instanceof CubingJob)) {
-            throw new BadRequestException(String.format(msg.getILLEGAL_JOB_TYPE(), job.getId()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getILLEGAL_JOB_TYPE(), job.getId()));
         }
 
         CubingJob cubeJob = (CubingJob) job;
@@ -536,7 +536,7 @@ protected JobInstance getCheckpointJobInstance(AbstractExecutable job) {
             return null;
         }
         if (!(job instanceof CheckpointExecutable)) {
-            throw new BadRequestException(String.format(msg.getILLEGAL_JOB_TYPE(), job.getId()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getILLEGAL_JOB_TYPE(), job.getId()));
         }
 
         CheckpointExecutable checkpointExecutable = (CheckpointExecutable) job;
@@ -736,7 +736,7 @@ public void dropJob(JobInstance job) {
             aclEvaluate.checkProjectOperationPermission(projectName);
         }
         // prepare time range
-        Calendar calendar = Calendar.getInstance();
+        Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         calendar.setTime(new Date());
         long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
         long timeEndInMillis = Long.MAX_VALUE;
@@ -786,7 +786,8 @@ public boolean apply(AbstractExecutable executable) {
                                     if (nameExactMatch)
                                         return executableCubeName.equalsIgnoreCase(cubeName);
                                     else
-                                        return executableCubeName.toLowerCase().contains(cubeName.toLowerCase());
+                                        return executableCubeName.toLowerCase(Locale.ROOT)
+                                                .contains(cubeName.toLowerCase(Locale.ROOT));
                                 } else {
                                     return false;
                                 }
@@ -835,7 +836,8 @@ public boolean apply(@Nullable CubingJob cubeJob) {
                                 if (nameExactMatch) {
                                     return cubeJob.getName().equalsIgnoreCase(jobName);
                                 } else {
-                                    return cubeJob.getName().toLowerCase().contains(jobName.toLowerCase());
+                                    return cubeJob.getName().toLowerCase(Locale.ROOT)
+                                            .contains(jobName.toLowerCase(Locale.ROOT));
                                 }
                             }
                         })));
@@ -846,7 +848,7 @@ public boolean apply(@Nullable CubingJob cubeJob) {
             final String projectName, final List<JobStatusEnum> statusList, final JobTimeFilterEnum timeFilter) {
         // TODO: use cache of jobs for this method
         // prepare time range
-        Calendar calendar = Calendar.getInstance();
+        Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         calendar.setTime(new Date());
         long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
         long timeEndInMillis = Long.MAX_VALUE;
@@ -884,7 +886,8 @@ public boolean apply(AbstractExecutable executable) {
                                     if (nameExactMatch)
                                         return executableCubeName.equalsIgnoreCase(cubeName);
                                     else
-                                        return executableCubeName.toLowerCase().contains(cubeName.toLowerCase());
+                                        return executableCubeName.toLowerCase(Locale.ROOT)
+                                                .contains(cubeName.toLowerCase(Locale.ROOT));
                                 } else {
                                     return false;
                                 }
@@ -933,7 +936,8 @@ public boolean apply(@Nullable CheckpointExecutable checkpointExecutable) {
                                 if (nameExactMatch) {
                                     return checkpointExecutable.getName().equalsIgnoreCase(jobName);
                                 } else {
-                                    return checkpointExecutable.getName().toLowerCase().contains(jobName.toLowerCase());
+                                    return checkpointExecutable.getName().toLowerCase(Locale.ROOT)
+                                            .contains(jobName.toLowerCase(Locale.ROOT));
                                 }
                             }
                         })));
@@ -1006,7 +1010,7 @@ public boolean apply(@Nullable CheckpointExecutable checkpointExecutable) {
             aclEvaluate.checkProjectOperationPermission(projectName);
         }
         // prepare time range
-        Calendar calendar = Calendar.getInstance();
+        Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         calendar.setTime(new Date());
         long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
         long timeEndInMillis = Long.MAX_VALUE;
@@ -1038,7 +1042,7 @@ public boolean apply(@Nullable JobSearchResult input) {
             aclEvaluate.checkProjectOperationPermission(projectName);
         }
         // prepare time range
-        Calendar calendar = Calendar.getInstance();
+        Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         calendar.setTime(new Date());
         long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
         long timeEndInMillis = Long.MAX_VALUE;
@@ -1087,7 +1091,8 @@ public boolean apply(AbstractExecutable executable) {
                                     if (nameExactMatch)
                                         return executableCubeName.equalsIgnoreCase(cubeName);
                                     else
-                                        return executableCubeName.toLowerCase().contains(cubeName.toLowerCase());
+                                        return executableCubeName.toLowerCase(Locale.ROOT)
+                                                .contains(cubeName.toLowerCase(Locale.ROOT));
                                 } else {
                                     return false;
                                 }
@@ -1133,7 +1138,8 @@ public boolean apply(@Nullable CubingJob cubeJob) {
                                 if (nameExactMatch) {
                                     return cubeJob.getName().equalsIgnoreCase(jobName);
                                 } else {
-                                    return cubeJob.getName().toLowerCase().contains(jobName.toLowerCase());
+                                    return cubeJob.getName().toLowerCase(Locale.ROOT)
+                                            .contains(jobName.toLowerCase(Locale.ROOT));
                                 }
                             }
                         })));
@@ -1160,7 +1166,8 @@ public boolean apply(AbstractExecutable executable) {
                                     if (nameExactMatch)
                                         return executableCubeName.equalsIgnoreCase(cubeName);
                                     else
-                                        return executableCubeName.toLowerCase().contains(cubeName.toLowerCase());
+                                        return executableCubeName.toLowerCase(Locale.ROOT)
+                                                .contains(cubeName.toLowerCase(Locale.ROOT));
                                 } else {
                                     return false;
                                 }
@@ -1206,7 +1213,8 @@ public boolean apply(@Nullable CheckpointExecutable checkpointExecutable) {
                                 if (nameExactMatch) {
                                     return checkpointExecutable.getName().equalsIgnoreCase(jobName);
                                 } else {
-                                    return checkpointExecutable.getName().toLowerCase().contains(jobName.toLowerCase());
+                                    return checkpointExecutable.getName().toLowerCase(Locale.ROOT)
+                                            .contains(jobName.toLowerCase(Locale.ROOT));
                                 }
                             }
                         })));
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java b/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java
index 32c7339ba2..ad0de2e141 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java
@@ -21,6 +21,7 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.rest.exception.BadRequestException;
 import org.apache.kylin.rest.msg.Message;
@@ -52,7 +53,8 @@
         return kafkaConfigs;
     }
 
-    public List<KafkaConfig> getKafkaConfigs(final String kafkaConfigName, final String project, final Integer limit, final Integer offset) throws IOException {
+    public List<KafkaConfig> getKafkaConfigs(final String kafkaConfigName, final String project, final Integer limit,
+            final Integer offset) throws IOException {
         aclEvaluate.checkProjectWritePermission(project);
         List<KafkaConfig> kafkaConfigs;
         kafkaConfigs = listAllKafkaConfigs(kafkaConfigName);
@@ -73,7 +75,8 @@ public KafkaConfig createKafkaConfig(KafkaConfig config, String project) throws
         Message msg = MsgPicker.getMsg();
 
         if (getKafkaManager().getKafkaConfig(config.getName()) != null) {
-            throw new BadRequestException(String.format(msg.getKAFKA_CONFIG_ALREADY_EXIST(), config.getName()));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getKAFKA_CONFIG_ALREADY_EXIST(), config.getName()));
         }
         getKafkaManager().createKafkaConfig(config);
         return config;
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/KylinUserService.java b/server-base/src/main/java/org/apache/kylin/rest/service/KylinUserService.java
index 0cc48a9593..eea8cd7096 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/KylinUserService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/KylinUserService.java
@@ -21,6 +21,7 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 import javax.annotation.PostConstruct;
 
@@ -118,7 +119,7 @@ public UserDetails loadUserByUsername(String userName) throws UsernameNotFoundEx
         Message msg = MsgPicker.getMsg();
         ManagedUser managedUser = getKylinUserManager().get(userName);
         if (managedUser == null) {
-            throw new UsernameNotFoundException(String.format(msg.getUSER_NOT_FOUND(), userName));
+            throw new UsernameNotFoundException(String.format(Locale.ROOT, msg.getUSER_NOT_FOUND(), userName));
         }
         logger.trace("load user : {}", userName);
         return managedUser;
@@ -130,7 +131,7 @@ public UserDetails loadUserByUsername(String userName) throws UsernameNotFoundEx
     }
 
     @Override
-    public List<String> listAdminUsers() throws IOException{
+    public List<String> listAdminUsers() throws IOException {
         List<String> adminUsers = new ArrayList<>();
         for (ManagedUser managedUser : listUsers()) {
             if (managedUser.getAuthorities().contains(new SimpleGrantedAuthority(Constant.ROLE_ADMIN))) {
@@ -141,7 +142,7 @@ public UserDetails loadUserByUsername(String userName) throws UsernameNotFoundEx
     }
 
     @Override
-    public void completeUserInfo(ManagedUser user){
+    public void completeUserInfo(ManagedUser user) {
     }
 
     public static String getId(String userName) {
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java b/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
index b3f6e2d4dd..f3281e2182 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
@@ -23,6 +23,7 @@
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -97,8 +98,10 @@ public boolean isModelNameValidate(final String modelName) {
         List<DataModelDesc> filterModels = new ArrayList<DataModelDesc>();
         for (DataModelDesc modelDesc : models) {
             boolean isModelMatch = (null == modelName) || modelName.length() == 0
-                    || (exactMatch && modelDesc.getName().toLowerCase().equals(modelName.toLowerCase()))
-                    || (!exactMatch && modelDesc.getName().toLowerCase().contains(modelName.toLowerCase()));
+                    || (exactMatch
+                            && modelDesc.getName().toLowerCase(Locale.ROOT).equals(modelName.toLowerCase(Locale.ROOT)))
+                    || (!exactMatch && modelDesc.getName().toLowerCase(Locale.ROOT)
+                            .contains(modelName.toLowerCase(Locale.ROOT)));
 
             if (isModelMatch) {
                 filterModels.add(modelDesc);
@@ -130,7 +133,7 @@ public DataModelDesc createModelDesc(String projectName, DataModelDesc desc) thr
         aclEvaluate.checkProjectWritePermission(projectName);
         Message msg = MsgPicker.getMsg();
         if (getDataModelManager().getDataModelDesc(desc.getName()) != null) {
-            throw new BadRequestException(String.format(msg.getDUPLICATE_MODEL_NAME(), desc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getDUPLICATE_MODEL_NAME(), desc.getName()));
         }
 
         String factTableName = desc.getRootFactTableName();
@@ -159,7 +162,8 @@ public void dropModel(DataModelDesc desc) throws IOException {
         List<CubeDesc> cubeDescs = getCubeDescManager().listAllDesc();
         for (CubeDesc cubeDesc : cubeDescs) {
             if (cubeDesc.getModelName().equals(desc.getName())) {
-                throw new BadRequestException(String.format(msg.getDROP_REFERENCED_MODEL(), cubeDesc.getName()));
+                throw new BadRequestException(
+                        String.format(Locale.ROOT, msg.getDROP_REFERENCED_MODEL(), cubeDesc.getName()));
             }
         }
 
@@ -338,7 +342,7 @@ public void primaryCheck(DataModelDesc modelDesc) {
         }
         if (!ValidateUtil.isAlphanumericUnderscore(modelName)) {
             logger.info("Invalid model name {}, only letters, numbers and underscore supported.", modelDesc.getName());
-            throw new BadRequestException(String.format(msg.getINVALID_MODEL_NAME(), modelName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getINVALID_MODEL_NAME(), modelName));
         }
     }
 
@@ -368,7 +372,7 @@ public DataModelDesc updateModelToResourceStore(DataModelDesc modelDesc, String
         }
 
         if (!modelDesc.getError().isEmpty()) {
-            throw new BadRequestException(String.format(msg.getBROKEN_MODEL_DESC(), modelDesc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getBROKEN_MODEL_DESC(), modelDesc.getName()));
         }
 
         return modelDesc;
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java b/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java
index 22ee95e0b2..a7fec44a81 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java
@@ -23,6 +23,8 @@
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
+
 import java.util.Set;
 import javax.annotation.Nullable;
 import org.apache.directory.api.util.Strings;
@@ -81,7 +83,7 @@ public ProjectInstance createProject(ProjectInstance newProject) throws IOExcept
         ProjectInstance currentProject = getProjectManager().getProject(projectName);
 
         if (currentProject != null) {
-            throw new BadRequestException(String.format(msg.getPROJECT_ALREADY_EXIST(), projectName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getPROJECT_ALREADY_EXIST(), projectName));
         }
         String owner = SecurityContextHolder.getContext().getAuthentication().getName();
         ProjectInstance createdProject = getProjectManager().createProject(projectName, owner, description,
@@ -138,6 +140,7 @@ public void deleteProject(String projectName, ProjectInstance project) throws IO
             tableService.unloadHiveTable(table, projectName);
         }
         getProjectManager().dropProject(projectName);
+
         accessService.clean(project, true);
     }
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index f94ba26bdb..7bef0f22d5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -41,16 +41,13 @@
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.Set;
 
 import javax.annotation.PostConstruct;
 
-import net.sf.ehcache.Cache;
-import net.sf.ehcache.CacheManager;
-import net.sf.ehcache.Element;
-
 import org.apache.calcite.avatica.ColumnMetaData.Rep;
 import org.apache.calcite.config.CalciteConnectionConfig;
 import org.apache.calcite.jdbc.CalcitePrepare;
@@ -128,6 +125,10 @@
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 
+import net.sf.ehcache.Cache;
+import net.sf.ehcache.CacheManager;
+import net.sf.ehcache.Element;
+
 /**
  * @author xduo
  */
@@ -171,7 +172,8 @@ public QueryService() {
         config.setMaxTotal(kylinConfig.getQueryMaxCacheStatementNum());
         config.setBlockWhenExhausted(false);
         config.setMinEvictableIdleTimeMillis(10 * 60 * 1000L); // cached statement will be evict if idle for 10 minutes
-        GenericKeyedObjectPool<PreparedContextKey, PreparedContext> pool = new GenericKeyedObjectPool<>(factory, config);
+        GenericKeyedObjectPool<PreparedContextKey, PreparedContext> pool = new GenericKeyedObjectPool<>(factory,
+                config);
         return pool;
     }
 
@@ -357,9 +359,9 @@ public SQLResponse doQueryWithCache(SQLRequest sqlRequest, boolean isQueryInspec
 
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         String serverMode = kylinConfig.getServerMode();
-        if (!(Constant.SERVER_MODE_QUERY.equals(serverMode.toLowerCase())
-                || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase()))) {
-            throw new BadRequestException(String.format(msg.getQUERY_NOT_ALLOWED(), serverMode));
+        if (!(Constant.SERVER_MODE_QUERY.equals(serverMode.toLowerCase(Locale.ROOT))
+                || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase(Locale.ROOT)))) {
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getQUERY_NOT_ALLOWED(), serverMode));
         }
         if (StringUtils.isBlank(sqlRequest.getProject())) {
             throw new BadRequestException(msg.getEMPTY_PROJECT_NAME());
@@ -537,7 +539,7 @@ private SQLResponse queryWithSqlMassage(SQLRequest sqlRequest) throws Exception
         boolean borrowPrepareContext = false;
         PreparedContextKey preparedContextKey = null;
         PreparedContext preparedContext = null;
-        
+
         try {
             conn = QueryConnection.getConnection(sqlRequest.getProject());
             String userInfo = SecurityContextHolder.getContext().getAuthentication().getName();
@@ -573,7 +575,7 @@ private SQLResponse queryWithSqlMassage(SQLRequest sqlRequest) throws Exception
             OLAPContext.setParameters(parameters);
             // force clear the query context before a new query
             OLAPContext.clearThreadLocalContexts();
-            
+
             // special case for prepare query.
             List<List<String>> results = Lists.newArrayList();
             List<SelectedColumnMeta> columnMetas = Lists.newArrayList();
@@ -683,7 +685,7 @@ private void resetRealizationInContext(OLAPContext olapContext) {
                         columnMeta.getString(23));
 
                 if (!"metadata".equalsIgnoreCase(colmnMeta.getTABLE_SCHEM())
-                        && !colmnMeta.getCOLUMN_NAME().toUpperCase().startsWith("_KY_")) {
+                        && !colmnMeta.getCOLUMN_NAME().toUpperCase(Locale.ROOT).startsWith("_KY_")) {
                     tableMap.get(colmnMeta.getTABLE_SCHEM() + "#" + colmnMeta.getTABLE_NAME()).addColumn(colmnMeta);
                 }
             }
@@ -759,7 +761,7 @@ private void resetRealizationInContext(OLAPContext olapContext) {
                         columnMeta.getString(23));
 
                 if (!"metadata".equalsIgnoreCase(colmnMeta.getTABLE_SCHEM())
-                        && !colmnMeta.getCOLUMN_NAME().toUpperCase().startsWith("_KY_")) {
+                        && !colmnMeta.getCOLUMN_NAME().toUpperCase(Locale.ROOT).startsWith("_KY_")) {
                     tableMap.get(colmnMeta.getTABLE_SCHEM() + "#" + colmnMeta.getTABLE_NAME()).addColumn(colmnMeta);
                     columnMap.put(colmnMeta.getTABLE_SCHEM() + "#" + colmnMeta.getTABLE_NAME() + "#"
                             + colmnMeta.getCOLUMN_NAME(), colmnMeta);
@@ -880,7 +882,7 @@ private SQLResponse executeRequest(String correctedSql, SQLRequest sqlRequest, C
             processStatementAttr(stat, sqlRequest);
             resultSet = stat.executeQuery(correctedSql);
 
-            r = createResponseFromResultSet(resultSet); 
+            r = createResponseFromResultSet(resultSet);
 
         } catch (SQLException sqlException) {
             r = pushDownQuery(sqlRequest, correctedSql, conn, sqlException);
@@ -895,8 +897,8 @@ private SQLResponse executeRequest(String correctedSql, SQLRequest sqlRequest, C
         return buildSqlResponse(isPushDown, r.getFirst(), r.getSecond());
     }
 
-    private SQLResponse executePrepareRequest(String correctedSql, PrepareSqlRequest sqlRequest, PreparedContext preparedContext)
-            throws Exception {
+    private SQLResponse executePrepareRequest(String correctedSql, PrepareSqlRequest sqlRequest,
+            PreparedContext preparedContext) throws Exception {
         ResultSet resultSet = null;
         boolean isPushDown = false;
 
@@ -923,7 +925,8 @@ private SQLResponse executePrepareRequest(String correctedSql, PrepareSqlRequest
         return buildSqlResponse(isPushDown, r.getFirst(), r.getSecond());
     }
 
-    private Pair<List<List<String>>, List<SelectedColumnMeta>> pushDownQuery(SQLRequest sqlRequest, String correctedSql, Connection conn, SQLException sqlException) throws Exception{
+    private Pair<List<List<String>>, List<SelectedColumnMeta>> pushDownQuery(SQLRequest sqlRequest, String correctedSql,
+            Connection conn, SQLException sqlException) throws Exception {
         try {
             return PushDownUtil.tryPushDownSelectQuery(sqlRequest.getProject(), correctedSql, conn.getSchema(),
                     sqlException, BackdoorToggles.getPrepareOnly());
@@ -944,12 +947,13 @@ private SQLResponse executePrepareRequest(String correctedSql, PrepareSqlRequest
 
         // Fill in selected column meta
         for (int i = 1; i <= columnCount; ++i) {
-            columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), metaData
-                    .isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i), metaData
-                    .getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i), metaData
-                    .getSchemaName(i), metaData.getCatalogName(i), metaData.getTableName(i), metaData.getPrecision(i),
-                    metaData.getScale(i), metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData
-                            .isReadOnly(i), metaData.isWritable(i), metaData.isDefinitelyWritable(i)));
+            columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i),
+                    metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i),
+                    metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i),
+                    metaData.getSchemaName(i), metaData.getCatalogName(i), metaData.getTableName(i),
+                    metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i),
+                    metaData.getColumnTypeName(i), metaData.isReadOnly(i), metaData.isWritable(i),
+                    metaData.isDefinitelyWritable(i)));
         }
 
         // fill in results
@@ -1147,7 +1151,7 @@ public void setCacheManager(CacheManager cacheManager) {
         this.cacheManager = cacheManager;
     }
 
-    private static PreparedContext createPreparedContext(String project, String sql) throws Exception{
+    private static PreparedContext createPreparedContext(String project, String sql) throws Exception {
         Connection conn = QueryConnection.getConnection(project);
         PreparedStatement preparedStatement = conn.prepareStatement(sql);
         Collection<OLAPContext> olapContexts = OLAPContext.getThreadLocalContexts();
@@ -1179,8 +1183,8 @@ public QueryRecord deserialize(DataInputStream in) throws IOException {
         }
     }
 
-    private static class PreparedContextFactory extends
-            BaseKeyedPooledObjectFactory<PreparedContextKey, PreparedContext> {
+    private static class PreparedContextFactory
+            extends BaseKeyedPooledObjectFactory<PreparedContextKey, PreparedContext> {
 
         @Override
         public PreparedContext create(PreparedContextKey key) throws Exception {
@@ -1217,13 +1221,17 @@ public PreparedContextKey(String project, long prjLastModifyTime, String sql) {
 
         @Override
         public boolean equals(Object o) {
-            if (this == o) return true;
-            if (o == null || getClass() != o.getClass()) return false;
+            if (this == o)
+                return true;
+            if (o == null || getClass() != o.getClass())
+                return false;
 
             PreparedContextKey that = (PreparedContextKey) o;
 
-            if (prjLastModifyTime != that.prjLastModifyTime) return false;
-            if (project != null ? !project.equals(that.project) : that.project != null) return false;
+            if (prjLastModifyTime != that.prjLastModifyTime)
+                return false;
+            if (project != null ? !project.equals(that.project) : that.project != null)
+                return false;
             return sql != null ? sql.equals(that.sql) : that.sql == null;
 
         }
@@ -1243,7 +1251,7 @@ public int hashCode() {
         private Collection<OLAPContext> olapContexts;
 
         public PreparedContext(Connection conn, PreparedStatement preparedStatement,
-                               Collection<OLAPContext> olapContexts) {
+                Collection<OLAPContext> olapContexts) {
             this.conn = conn;
             this.preparedStatement = preparedStatement;
             this.olapContexts = olapContexts;
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java b/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java
index d4d7cc71f1..09acb58d04 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java
@@ -21,6 +21,7 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.metadata.streaming.StreamingConfig;
@@ -50,7 +51,8 @@
         return streamingConfigs;
     }
 
-    public List<StreamingConfig> getStreamingConfigs(final String table, final String project, final Integer limit, final Integer offset) throws IOException {
+    public List<StreamingConfig> getStreamingConfigs(final String table, final String project, final Integer limit,
+            final Integer offset) throws IOException {
         aclEvaluate.checkProjectWritePermission(project);
         List<StreamingConfig> streamingConfigs;
         streamingConfigs = listAllStreamingConfigs(table);
@@ -71,7 +73,8 @@ public StreamingConfig createStreamingConfig(StreamingConfig config, String proj
         Message msg = MsgPicker.getMsg();
 
         if (getStreamingManager().getStreamingConfig(config.getName()) != null) {
-            throw new BadRequestException(String.format(msg.getSTREAMING_CONFIG_ALREADY_EXIST(), config.getName()));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getSTREAMING_CONFIG_ALREADY_EXIST(), config.getName()));
         }
         StreamingConfig streamingConfig = getStreamingManager().createStreamingConfig(config);
         return streamingConfig;
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java b/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java
index cffe36dd62..89a505a256 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java
@@ -22,6 +22,7 @@
 import static java.lang.String.format;
 
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
 import javax.annotation.Nullable;
@@ -60,15 +61,17 @@ void raiseExceptionWhenInvalid() {
         }
 
         static CheckResult validOnFirstLoad(String tableName) {
-            return new CheckResult(true, format("Table '%s' hasn't been loaded before", tableName));
+            return new CheckResult(true, format(Locale.ROOT, "Table '%s' hasn't been loaded before", tableName));
         }
 
         static CheckResult validOnCompatibleSchema(String tableName) {
-            return new CheckResult(true, format("Table '%s' is compatible with all existing cubes", tableName));
+            return new CheckResult(true,
+                    format(Locale.ROOT, "Table '%s' is compatible with all existing cubes", tableName));
         }
 
         static CheckResult invalidOnFetchSchema(String tableName, Exception e) {
-            return new CheckResult(false, format("Failed to fetch metadata of '%s': %s", tableName, e.getMessage()));
+            return new CheckResult(false,
+                    format(Locale.ROOT, "Failed to fetch metadata of '%s': %s", tableName, e.getMessage()));
         }
 
         static CheckResult invalidOnIncompatibleSchema(String tableName, List<String> reasons) {
@@ -77,7 +80,10 @@ static CheckResult invalidOnIncompatibleSchema(String tableName, List<String> re
                 buf.append("- ").append(reason).append("\n");
             }
 
-            return new CheckResult(false, format("Found %d issue(s) with '%s':%n%s Please disable and purge related cube(s) first", reasons.size(), tableName, buf.toString()));
+            return new CheckResult(false,
+                    format(Locale.ROOT,
+                            "Found %d issue(s) with '%s':%n%s Please disable and " + "purge related " + "cube(s) first",
+                            reasons.size(), tableName, buf.toString()));
         }
     }
 
@@ -87,18 +93,19 @@ static CheckResult invalidOnIncompatibleSchema(String tableName, List<String> re
     }
 
     private List<CubeInstance> findCubeByTable(final TableDesc table) {
-        Iterable<CubeInstance> relatedCubes = Iterables.filter(cubeManager.listAllCubes(), new Predicate<CubeInstance>() {
-            @Override
-            public boolean apply(@Nullable CubeInstance cube) {
-                if (cube == null || cube.allowBrokenDescriptor()) {
-                    return false;
-                }
-                DataModelDesc model = cube.getModel();
-                if (model == null)
-                    return false;
-                return model.containsTable(table);
-            }
-        });
+        Iterable<CubeInstance> relatedCubes = Iterables.filter(cubeManager.listAllCubes(),
+                new Predicate<CubeInstance>() {
+                    @Override
+                    public boolean apply(@Nullable CubeInstance cube) {
+                        if (cube == null || cube.allowBrokenDescriptor()) {
+                            return false;
+                        }
+                        DataModelDesc model = cube.getModel();
+                        if (model == null)
+                            return false;
+                        return model.containsTable(table);
+                    }
+                });
 
         return ImmutableList.copyOf(relatedCubes);
     }
@@ -185,7 +192,8 @@ public CheckResult allowReload(TableDesc newTableDesc, String prj) {
                 TableDesc factTable = cube.getModel().findFirstTable(fullTableName).getTableDesc();
                 List<String> violateColumns = checkAllColumnsInCube(cube, factTable, newTableDesc);
                 if (!violateColumns.isEmpty()) {
-                    issues.add(format("Column %s used in cube[%s] and model[%s], but changed in hive", violateColumns, cube.getName(), modelName));
+                    issues.add(format(Locale.ROOT, "Column %s used in cube[%s] and model[%s], but changed " + "in hive",
+                            violateColumns, cube.getName(), modelName));
                 }
             }
 
@@ -194,7 +202,8 @@ public CheckResult allowReload(TableDesc newTableDesc, String prj) {
             if (cube.getModel().isLookupTable(fullTableName)) {
                 TableDesc lookupTable = cube.getModel().findFirstTable(fullTableName).getTableDesc();
                 if (!checkAllColumnsInTableDesc(lookupTable, newTableDesc)) {
-                    issues.add(format("Table '%s' is used as Lookup Table in cube[%s] and model[%s], but changed in hive", lookupTable.getIdentity(), cube.getName(), modelName));
+                    issues.add(format(Locale.ROOT, "Table '%s' is used as Lookup Table in cube[%s] and model[%s], but "
+                            + "changed in " + "hive", lookupTable.getIdentity(), cube.getName(), modelName));
                 }
             }
         }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java b/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java
index 1bb03e4b2f..4009fc9851 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java
@@ -25,6 +25,7 @@
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.UUID;
 
@@ -138,7 +139,8 @@ public TableDesc getTableDescByName(String tableName, boolean withExt, String pr
         return loadTablesToProject(Lists.newArrayList(Pair.newPair(tableDesc, extDesc)), project);
     }
 
-    private String[] loadTablesToProject(List<Pair<TableDesc, TableExtDesc>> allMeta, String project) throws IOException {
+    private String[] loadTablesToProject(List<Pair<TableDesc, TableExtDesc>> allMeta, String project)
+            throws IOException {
         aclEvaluate.checkProjectAdminPermission(project);
         // do schema check
         TableMetadataManager metaMgr = getTableManager();
@@ -187,7 +189,7 @@ public TableDesc getTableDescByName(String tableName, boolean withExt, String pr
         return result;
     }
 
-    public List<Pair<TableDesc, TableExtDesc>> extractHiveTableMeta(String[] tables, String project) throws Exception {        // de-dup
+    public List<Pair<TableDesc, TableExtDesc>> extractHiveTableMeta(String[] tables, String project) throws Exception { // de-dup
         SetMultimap<String, String> db2tables = LinkedHashMultimap.create();
         for (String fullTableName : tables) {
             String[] parts = HadoopUtil.parseHiveTableName(fullTableName);
@@ -201,10 +203,10 @@ public TableDesc getTableDescByName(String tableName, boolean withExt, String pr
         for (Map.Entry<String, String> entry : db2tables.entries()) {
             Pair<TableDesc, TableExtDesc> pair = explr.loadTableMetadata(entry.getKey(), entry.getValue(), project);
             TableDesc tableDesc = pair.getFirst();
-            Preconditions.checkState(tableDesc.getDatabase().equals(entry.getKey().toUpperCase()));
-            Preconditions.checkState(tableDesc.getName().equals(entry.getValue().toUpperCase()));
+            Preconditions.checkState(tableDesc.getDatabase().equals(entry.getKey().toUpperCase(Locale.ROOT)));
+            Preconditions.checkState(tableDesc.getName().equals(entry.getValue().toUpperCase(Locale.ROOT)));
             Preconditions.checkState(tableDesc.getIdentity()
-                    .equals(entry.getKey().toUpperCase() + "." + entry.getValue().toUpperCase()));
+                    .equals(entry.getKey().toUpperCase(Locale.ROOT) + "." + entry.getValue().toUpperCase(Locale.ROOT)));
             TableExtDesc extDesc = pair.getSecond();
             Preconditions.checkState(tableDesc.getIdentity().equals(extDesc.getIdentity()));
             allMeta.add(pair);
@@ -250,7 +252,7 @@ public boolean unloadHiveTable(String tableName, String project) throws IOExcept
             rtn = true;
         } else {
             List<String> models = modelService.getModelsUsingTable(desc, project);
-            throw new BadRequestException(String.format(msg.getTABLE_IN_USE_BY_MODEL(), models));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getTABLE_IN_USE_BY_MODEL(), models));
         }
 
         // it is a project local table, ready to remove since no model is using it within the project
@@ -347,7 +349,8 @@ public void updateSnapshotLocalCache(String project, String tableName, String sn
         ExtTableSnapshotInfo extTableSnapshotInfo = snapshotInfoManager.getSnapshot(tableName, snapshotID);
         TableDesc tableDesc = getTableManager().getTableDesc(tableName, project);
         if (extTableSnapshotInfo == null) {
-            throw new IllegalArgumentException("cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
+            throw new IllegalArgumentException(
+                    "cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
         }
         LookupProviderFactory.rebuildLocalCache(tableDesc, extTableSnapshotInfo);
     }
@@ -356,7 +359,8 @@ public void removeSnapshotLocalCache(String tableName, String snapshotID) {
         ExtTableSnapshotInfoManager snapshotInfoManager = ExtTableSnapshotInfoManager.getInstance(getConfig());
         ExtTableSnapshotInfo extTableSnapshotInfo = snapshotInfoManager.getSnapshot(tableName, snapshotID);
         if (extTableSnapshotInfo == null) {
-            throw new IllegalArgumentException("cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
+            throw new IllegalArgumentException(
+                    "cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
         }
         LookupProviderFactory.removeLocalCache(extTableSnapshotInfo);
     }
@@ -365,7 +369,8 @@ public String getSnapshotLocalCacheState(String tableName, String snapshotID) {
         ExtTableSnapshotInfoManager snapshotInfoManager = ExtTableSnapshotInfoManager.getInstance(getConfig());
         ExtTableSnapshotInfo extTableSnapshotInfo = snapshotInfoManager.getSnapshot(tableName, snapshotID);
         if (extTableSnapshotInfo == null) {
-            throw new IllegalArgumentException("cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
+            throw new IllegalArgumentException(
+                    "cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
         }
         CacheState cacheState = LookupProviderFactory.getCacheState(extTableSnapshotInfo);
         return cacheState.name();
@@ -378,7 +383,8 @@ public String getSnapshotLocalCacheState(String tableName, String snapshotID) {
         return internalGetLookupTableSnapshots(tableName, signature);
     }
 
-    List<TableSnapshotResponse> internalGetLookupTableSnapshots(String tableName, TableSignature signature) throws IOException {
+    List<TableSnapshotResponse> internalGetLookupTableSnapshots(String tableName, TableSignature signature)
+            throws IOException {
         ExtTableSnapshotInfoManager extSnapshotInfoManager = ExtTableSnapshotInfoManager.getInstance(getConfig());
         SnapshotManager snapshotManager = SnapshotManager.getInstance(getConfig());
         List<ExtTableSnapshotInfo> extTableSnapshots = extSnapshotInfoManager.getSnapshots(tableName);
@@ -459,7 +465,8 @@ public void calculateCardinality(String tableName, String submitter, String prj)
         TableDesc table = getTableManager().getTableDesc(tableName, prj);
         final TableExtDesc tableExt = getTableManager().getTableExt(tableName, prj);
         if (table == null) {
-            BadRequestException e = new BadRequestException(String.format(msg.getTABLE_DESC_NOT_FOUND(), tableName));
+            BadRequestException e = new BadRequestException(
+                    String.format(Locale.ROOT, msg.getTABLE_DESC_NOT_FOUND(), tableName));
             logger.error("Cannot find table descriptor " + tableName, e);
             throw e;
         }
@@ -495,6 +502,6 @@ public void calculateCardinality(String tableName, String submitter, String prj)
 
     public String normalizeHiveTableName(String tableName) {
         String[] dbTableName = HadoopUtil.parseHiveTableName(tableName);
-        return (dbTableName[0] + "." + dbTableName[1]).toUpperCase();
+        return (dbTableName[0] + "." + dbTableName[1]).toUpperCase(Locale.ROOT);
     }
 }
diff --git a/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java b/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java
index 08010e4c9d..a7aea99915 100644
--- a/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java
+++ b/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java
@@ -28,6 +28,7 @@
 import java.lang.reflect.Proxy;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -42,7 +43,8 @@
     /**
      * Tests the get/set methods of the specified class.
      */
-    public static <T> void validateAccssor(final Class<T> clazz, final String... skipThese) throws IntrospectionException {
+    public static <T> void validateAccssor(final Class<T> clazz, final String... skipThese)
+            throws IntrospectionException {
         final PropertyDescriptor[] props = Introspector.getBeanInfo(clazz).getPropertyDescriptors();
         for (PropertyDescriptor prop : props) {
 
@@ -69,18 +71,23 @@
 
                         setter.invoke(bean, value);
 
-                        Assert.assertEquals(String.format("Failed while testing property %s", prop.getName()), value, getter.invoke(bean));
+                        Assert.assertEquals(
+                                String.format(Locale.ROOT, "Failed while testing property %s", prop.getName()), value,
+                                getter.invoke(bean));
 
                     } catch (Exception ex) {
                         ex.printStackTrace();
-                        System.err.println(String.format("An exception was thrown while testing the property %s: %s", prop.getName(), ex.toString()));
+                        System.err.println(
+                                String.format(Locale.ROOT, "An exception was thrown while testing the property %s: %s",
+                                        prop.getName(), ex.toString()));
                     }
                 }
             }
         }
     }
 
-    private static Object buildValue(Class<?> clazz) throws InstantiationException, IllegalAccessException, IllegalArgumentException, SecurityException, InvocationTargetException {
+    private static Object buildValue(Class<?> clazz) throws InstantiationException, IllegalAccessException,
+            IllegalArgumentException, SecurityException, InvocationTargetException {
 
         final Constructor<?>[] ctrs = clazz.getConstructors();
         for (Constructor<?> ctr : ctrs) {
@@ -117,30 +124,33 @@ private static Object buildValue(Class<?> clazz) throws InstantiationException,
         } else if (clazz.isEnum()) {
             return clazz.getEnumConstants()[0];
         } else if (clazz.isInterface()) {
-            return Proxy.newProxyInstance(clazz.getClassLoader(), new java.lang.Class[] { clazz }, new java.lang.reflect.InvocationHandler() {
-                @Override
-                public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
-                    if (Object.class.getMethod("equals", Object.class).equals(method)) {
-                        return proxy == args[0];
-                    }
-                    if (Object.class.getMethod("hashCode", Object.class).equals(method)) {
-                        return Integer.valueOf(System.identityHashCode(proxy));
-                    }
-                    if (Object.class.getMethod("toString", Object.class).equals(method)) {
-                        return "Bean " + getMockedType(proxy);
-                    }
-
-                    return null;
-                }
-
-            });
+            return Proxy.newProxyInstance(clazz.getClassLoader(), new java.lang.Class[] { clazz },
+                    new java.lang.reflect.InvocationHandler() {
+                        @Override
+                        public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
+                            if (Object.class.getMethod("equals", Object.class).equals(method)) {
+                                return proxy == args[0];
+                            }
+                            if (Object.class.getMethod("hashCode", Object.class).equals(method)) {
+                                return Integer.valueOf(System.identityHashCode(proxy));
+                            }
+                            if (Object.class.getMethod("toString", Object.class).equals(method)) {
+                                return "Bean " + getMockedType(proxy);
+                            }
+
+                            return null;
+                        }
+
+                    });
         } else {
-            System.err.println("Unable to build an instance of class " + clazz.getName() + ", please add some code to the " + BeanValidator.class.getName() + " class to do this.");
+            System.err.println("Unable to build an instance of class " + clazz.getName()
+                    + ", please add some code to the " + BeanValidator.class.getName() + " class to do this.");
             return null;
         }
     }
 
-    public static <T> void findBooleanIsMethods(Class<T> clazz, PropertyDescriptor descriptor) throws IntrospectionException {
+    public static <T> void findBooleanIsMethods(Class<T> clazz, PropertyDescriptor descriptor)
+            throws IntrospectionException {
         if (descriptor.getReadMethod() == null && descriptor.getPropertyType() == Boolean.class) {
             try {
                 PropertyDescriptor pd = new PropertyDescriptor(descriptor.getName(), clazz);
diff --git a/server-base/src/test/java/org/apache/kylin/rest/controller/StreamingControllerTest.java b/server-base/src/test/java/org/apache/kylin/rest/controller/StreamingControllerTest.java
index 171d22e5f2..bbbecf602d 100644
--- a/server-base/src/test/java/org/apache/kylin/rest/controller/StreamingControllerTest.java
+++ b/server-base/src/test/java/org/apache/kylin/rest/controller/StreamingControllerTest.java
@@ -18,35 +18,36 @@
 
 package org.apache.kylin.rest.controller;
 
+import java.io.IOException;
+import java.util.Locale;
+
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.junit.Assert;
 import org.junit.Test;
 
-import java.io.IOException;
-
 public class StreamingControllerTest {
 
     @Test
     public void testReadTableDesc() throws IOException {
-        String requestTableData = "{\"name\":\"my_table_name\",\"source_type\":1,\"columns\":[{\"id\":1,\"name\":" +
-                "\"amount\",\"datatype\":\"decimal\"},{\"id\":2,\"name\":\"category\",\"datatype\":\"varchar(256)\"}," +
-                "{\"id\":3,\"name\":\"order_time\",\"datatype\":\"timestamp\"},{\"id\":4,\"name\":\"device\"," +
-                "\"datatype\":\"varchar(256)\"},{\"id\":5,\"name\":\"qty\",\"datatype\":\"int\"},{\"id\":6,\"name\":" +
-                "\"user_id\",\"datatype\":\"varchar(256)\"},{\"id\":7,\"name\":\"user_age\",\"datatype\":\"int\"}," +
-                "{\"id\":8,\"name\":\"user_gender\",\"datatype\":\"varchar(256)\"},{\"id\":9,\"name\":\"currency\"," +
-                "\"datatype\":\"varchar(256)\"},{\"id\":10,\"name\":\"country\",\"datatype\":\"varchar(256)\"}," +
-                "{\"id\":11,\"name\":\"year_start\",\"datatype\":\"date\"},{\"id\":12,\"name\":\"quarter_start\"," +
-                "\"datatype\":\"date\"},{\"id\":13,\"name\":\"month_start\",\"datatype\":\"date\"},{\"id\":14," +
-                "\"name\":\"week_start\",\"datatype\":\"date\"},{\"id\":15,\"name\":\"day_start\",\"datatype\":" +
-                "\"date\"},{\"id\":16,\"name\":\"hour_start\",\"datatype\":\"timestamp\"},{\"id\":17,\"name\":" +
-                "\"minute_start\",\"datatype\":\"timestamp\"}],\"database\":\"my_database_name\"}";
+        String requestTableData = "{\"name\":\"my_table_name\",\"source_type\":1,\"columns\":[{\"id\":1,\"name\":"
+                + "\"amount\",\"datatype\":\"decimal\"},{\"id\":2,\"name\":\"category\",\"datatype\":\"varchar(256)\"},"
+                + "{\"id\":3,\"name\":\"order_time\",\"datatype\":\"timestamp\"},{\"id\":4,\"name\":\"device\","
+                + "\"datatype\":\"varchar(256)\"},{\"id\":5,\"name\":\"qty\",\"datatype\":\"int\"},{\"id\":6,\"name\":"
+                + "\"user_id\",\"datatype\":\"varchar(256)\"},{\"id\":7,\"name\":\"user_age\",\"datatype\":\"int\"},"
+                + "{\"id\":8,\"name\":\"user_gender\",\"datatype\":\"varchar(256)\"},{\"id\":9,\"name\":\"currency\","
+                + "\"datatype\":\"varchar(256)\"},{\"id\":10,\"name\":\"country\",\"datatype\":\"varchar(256)\"},"
+                + "{\"id\":11,\"name\":\"year_start\",\"datatype\":\"date\"},{\"id\":12,\"name\":\"quarter_start\","
+                + "\"datatype\":\"date\"},{\"id\":13,\"name\":\"month_start\",\"datatype\":\"date\"},{\"id\":14,"
+                + "\"name\":\"week_start\",\"datatype\":\"date\"},{\"id\":15,\"name\":\"day_start\",\"datatype\":"
+                + "\"date\"},{\"id\":16,\"name\":\"hour_start\",\"datatype\":\"timestamp\"},{\"id\":17,\"name\":"
+                + "\"minute_start\",\"datatype\":\"timestamp\"}],\"database\":\"my_database_name\"}";
         TableDesc desc = JsonUtil.readValue(requestTableData, TableDesc.class);
         String[] dbTable = HadoopUtil.parseHiveTableName(desc.getIdentity());
         desc.setName(dbTable[1]);
         desc.setDatabase(dbTable[0]);
-        Assert.assertEquals("my_table_name".toUpperCase(), desc.getName());
-        Assert.assertEquals("my_database_name".toUpperCase(), desc.getDatabase());
+        Assert.assertEquals("my_table_name".toUpperCase(Locale.ROOT), desc.getName());
+        Assert.assertEquals("my_database_name".toUpperCase(Locale.ROOT), desc.getDatabase());
     }
 }
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/DBConnConf.java b/source-hive/src/main/java/org/apache/kylin/source/hive/DBConnConf.java
index fd9bfa97ad..3460d5c23e 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/DBConnConf.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/DBConnConf.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.source.hive;
 
+import java.util.Locale;
+
 import org.apache.commons.configuration.PropertiesConfiguration;
 
 public class DBConnConf {
@@ -49,7 +51,7 @@ public DBConnConf(String driver, String url, String user, String pass) {
     }
 
     public String toString() {
-        return String.format("%s,%s,%s,%s", driver, url, user, pass);
+        return String.format(Locale.ROOT, "%s,%s,%s,%s", driver, url, user, pass);
     }
 
     public String getDriver() {
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveInputBase.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveInputBase.java
index 94c1a02d0a..2c998dfb08 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveInputBase.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveInputBase.java
@@ -20,6 +20,7 @@
 
 import java.io.IOException;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
 import org.apache.hadoop.fs.FileSystem;
@@ -54,7 +55,7 @@ protected static String getTableNameForHCat(TableDesc table, String uuid) {
         String tableName = (table.isView()) ? table.getMaterializedName(uuid) : table.getName();
         String database = (table.isView()) ? KylinConfig.getInstanceFromEnv().getHiveDatabaseForIntermediateTable()
                 : table.getDatabase();
-        return String.format("%s.%s", database, tableName).toUpperCase();
+        return String.format(Locale.ROOT, "%s.%s", database, tableName).toUpperCase(Locale.ROOT);
     }
 
     protected void addStepPhase1_DoCreateFlatTable(DefaultChainedExecutable jobFlow, String hdfsWorkingDir,
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMetadataExplorer.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMetadataExplorer.java
index 42df327aca..061d5d46f1 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMetadataExplorer.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMetadataExplorer.java
@@ -18,9 +18,11 @@
 
 package org.apache.kylin.source.hive;
 
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 import java.util.UUID;
 
 import org.apache.commons.lang3.StringUtils;
@@ -69,8 +71,8 @@
         // make a new TableDesc instance, don't modify the one in use
         if (tableDesc == null) {
             tableDesc = new TableDesc();
-            tableDesc.setDatabase(database.toUpperCase());
-            tableDesc.setName(tableName.toUpperCase());
+            tableDesc.setDatabase(database.toUpperCase(Locale.ROOT));
+            tableDesc.setName(tableName.toUpperCase(Locale.ROOT));
             tableDesc.setUuid(RandomUtil.randomUUID().toString());
             tableDesc.setLastModified(0);
         } else {
@@ -87,7 +89,7 @@
         for (int i = 0, n = hiveTableMeta.partitionColumns.size(); i < n; i++) {
             if (i > 0)
                 partitionColumnString.append(", ");
-            partitionColumnString.append(hiveTableMeta.partitionColumns.get(i).name.toUpperCase());
+            partitionColumnString.append(hiveTableMeta.partitionColumns.get(i).name.toUpperCase(Locale.ROOT));
         }
 
         TableExtDesc tableExtDesc = new TableExtDesc();
@@ -120,7 +122,7 @@ public void createSampleDatabase(String database) throws Exception {
     }
 
     private String generateCreateSchemaSql(String schemaName) {
-        return String.format("CREATE DATABASE IF NOT EXISTS %s", schemaName);
+        return String.format(Locale.ROOT, "CREATE DATABASE IF NOT EXISTS %s", schemaName);
     }
 
     @Override
@@ -177,10 +179,10 @@ public void createWrapperView(String origTableName, String viewName) throws Exce
     }
 
     private static String getHiveDataType(String javaDataType) {
-        String hiveDataType = javaDataType.toLowerCase().startsWith("varchar") ? "string" : javaDataType;
-        hiveDataType = javaDataType.toLowerCase().startsWith("integer") ? "int" : hiveDataType;
+        String hiveDataType = javaDataType.toLowerCase(Locale.ROOT).startsWith("varchar") ? "string" : javaDataType;
+        hiveDataType = javaDataType.toLowerCase(Locale.ROOT).startsWith("integer") ? "int" : hiveDataType;
 
-        return hiveDataType.toLowerCase();
+        return hiveDataType.toLowerCase(Locale.ROOT);
     }
 
     @Override
@@ -188,14 +190,15 @@ private static String getHiveDataType(String javaDataType) {
         if (StringUtils.isEmpty(query)) {
             throw new RuntimeException("Evaluate query shall not be empty.");
         }
-        
+
         KylinConfig config = KylinConfig.getInstanceFromEnv();
         String tmpDatabase = config.getHiveDatabaseForIntermediateTable();
-        String tmpView = "kylin_eval_query_" + UUID.nameUUIDFromBytes(query.getBytes()).toString().replace("-", "");
-        
+        String tmpView = "kylin_eval_query_"
+                + UUID.nameUUIDFromBytes(query.getBytes(StandardCharsets.UTF_8)).toString().replace("-", "");
+
         String dropViewSql = "DROP VIEW IF EXISTS " + tmpDatabase + "." + tmpView;
         String evalViewSql = "CREATE VIEW " + tmpDatabase + "." + tmpView + " as " + query;
-        
+
         try {
             logger.debug("Removing duplicate view {}", tmpView);
             hiveClient.executeHQL(dropViewSql);
@@ -223,7 +226,7 @@ private static String getHiveDataType(String javaDataType) {
         for (int i = 0; i < columnNumber; i++) {
             HiveTableMeta.HiveTableColumnMeta field = hiveTableMeta.allColumns.get(i);
             ColumnDesc cdesc = new ColumnDesc();
-            cdesc.setName(field.name.toUpperCase());
+            cdesc.setName(field.name.toUpperCase(Locale.ROOT));
 
             // use "double" in kylin for "float"
             if ("float".equalsIgnoreCase(field.dataType)) {
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java
index 5ffa2f676a..eb32756806 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java
@@ -24,6 +24,7 @@
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
@@ -55,7 +56,8 @@
     public static final String JOB_TITLE = "Kylin Hive Column Cardinality Update Job";
 
     @SuppressWarnings("static-access")
-    protected static final Option OPTION_TABLE = OptionBuilder.withArgName("table name").hasArg().isRequired(true).withDescription("The hive table name").create("table");
+    protected static final Option OPTION_TABLE = OptionBuilder.withArgName("table name").hasArg().isRequired(true)
+            .withDescription("The hive table name").create("table");
 
     public HiveColumnCardinalityUpdateJob() {
 
@@ -74,15 +76,15 @@ public int run(String[] args) throws Exception {
             parseOptions(options, args);
 
             String project = getOptionValue(OPTION_PROJECT);
-            String table = getOptionValue(OPTION_TABLE).toUpperCase();
-            
+            String table = getOptionValue(OPTION_TABLE).toUpperCase(Locale.ROOT);
+
             // start job
             String jobName = JOB_TITLE + getOptionsAsString();
             logger.info("Starting: " + jobName);
             Configuration conf = getConf();
             Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
 
-            updateKylinTableExd(table.toUpperCase(), output.toString(), conf, project);
+            updateKylinTableExd(table.toUpperCase(Locale.ROOT), output.toString(), conf, project);
             return 0;
         } catch (Exception e) {
             printUsage(options);
@@ -91,7 +93,8 @@ public int run(String[] args) throws Exception {
 
     }
 
-    public void updateKylinTableExd(String tableName, String outPath, Configuration config, String prj) throws IOException {
+    public void updateKylinTableExd(String tableName, String outPath, Configuration config, String prj)
+            throws IOException {
         List<String> columns = null;
         try {
             columns = readLines(new Path(outPath), config);
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/CmdStep.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/CmdStep.java
index 09895f92a5..287019beab 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/CmdStep.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/CmdStep.java
@@ -18,6 +18,7 @@
 package org.apache.kylin.source.jdbc;
 
 import java.io.IOException;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
@@ -45,7 +46,7 @@ public CmdStep() {
 
     protected void sqoopFlatHiveTable(KylinConfig config) throws IOException {
         String cmd = getParam("cmd");
-        stepLogger.log(String.format("exe cmd:%s", cmd));
+        stepLogger.log(String.format(Locale.ROOT, "exe cmd:%s", cmd));
         Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger);
         getManager().addJobInfo(getId(), stepLogger.getInfo());
         if (response.getFirst() != 0) {
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java
index bba20084d6..1c31e91dd7 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.source.jdbc;
 
+import java.nio.charset.StandardCharsets;
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
@@ -25,6 +26,7 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 import java.util.UUID;
 
 import org.apache.commons.lang3.StringUtils;
@@ -78,8 +80,8 @@ public JdbcExplorer() {
     public Pair<TableDesc, TableExtDesc> loadTableMetadata(String database, String table, String prj)
             throws SQLException {
         TableDesc tableDesc = new TableDesc();
-        tableDesc.setDatabase(database.toUpperCase());
-        tableDesc.setName(table.toUpperCase());
+        tableDesc.setDatabase(database.toUpperCase(Locale.ROOT));
+        tableDesc.setName(table.toUpperCase(Locale.ROOT));
         tableDesc.setUuid(RandomUtil.randomUUID().toString());
         tableDesc.setLastModified(0);
         tableDesc.setSourceType(ISourceAware.ID_JDBC);
@@ -95,7 +97,8 @@ public JdbcExplorer() {
             if (tableType != null) {
                 tableDesc.setTableType(tableType);
             } else {
-                throw new RuntimeException(String.format("table %s not found in schema:%s", table, database));
+                throw new RuntimeException(
+                        String.format(Locale.ROOT, "table %s not found in schema:%s", table, database));
             }
         }
 
@@ -116,12 +119,12 @@ public JdbcExplorer() {
 
     private String getSqlDataType(String javaDataType) {
         if (JdbcDialect.DIALECT_VERTICA.equals(dialect) || JdbcDialect.DIALECT_MSSQL.equals(dialect)) {
-            if (javaDataType.toLowerCase().equals("double")) {
+            if (javaDataType.toLowerCase(Locale.ROOT).equals("double")) {
                 return "float";
             }
         }
 
-        return javaDataType.toLowerCase();
+        return javaDataType.toLowerCase(Locale.ROOT);
     }
 
     @Override
@@ -131,10 +134,12 @@ public void createSampleDatabase(String database) throws Exception {
 
     private String generateCreateSchemaSql(String schemaName) {
         if (JdbcDialect.DIALECT_VERTICA.equals(dialect) || JdbcDialect.DIALECT_MYSQL.equals(dialect)) {
-            return String.format("CREATE schema IF NOT EXISTS %s", schemaName);
+            return String.format(Locale.ROOT, "CREATE schema IF NOT EXISTS %s", schemaName);
         } else if (JdbcDialect.DIALECT_MSSQL.equals(dialect)) {
-            return String.format("IF NOT EXISTS (SELECT name FROM sys.schemas WHERE name = N'%s') EXEC('CREATE SCHEMA"
-                    + " [%s] AUTHORIZATION [dbo]')", schemaName, schemaName);
+            return String.format(Locale.ROOT,
+                    "IF NOT EXISTS (SELECT name FROM sys.schemas WHERE name = N'%s') EXEC('CREATE SCHEMA"
+                            + " [%s] AUTHORIZATION [dbo]')",
+                    schemaName, schemaName);
         } else {
             logger.error("unsupported dialect {}.", dialect);
             return null;
@@ -148,14 +153,14 @@ public void loadSampleData(String tableName, String tmpDataDir) throws Exception
 
     private String generateLoadDataSql(String tableName, String tableFileDir) {
         if (JdbcDialect.DIALECT_VERTICA.equals(dialect)) {
-            return String.format("copy %s from local '%s/%s.csv' delimiter as ',';", tableName, tableFileDir,
-                    tableName);
+            return String.format(Locale.ROOT, "copy %s from local '%s/%s.csv' delimiter as ',';", tableName,
+                    tableFileDir, tableName);
         } else if (JdbcDialect.DIALECT_MYSQL.equals(dialect)) {
-            return String.format("LOAD DATA INFILE '%s/%s.csv' INTO %s FIELDS TERMINATED BY ',';", tableFileDir,
-                    tableName, tableName);
+            return String.format(Locale.ROOT, "LOAD DATA INFILE '%s/%s.csv' INTO %s FIELDS TERMINATED BY ',';",
+                    tableFileDir, tableName, tableName);
         } else if (JdbcDialect.DIALECT_MSSQL.equals(dialect)) {
-            return String.format("BULK INSERT %s FROM '%s/%s.csv' WITH(FIELDTERMINATOR = ',')", tableName, tableFileDir,
-                    tableName);
+            return String.format(Locale.ROOT, "BULK INSERT %s FROM '%s/%s.csv' WITH(FIELDTERMINATOR = ',')", tableName,
+                    tableFileDir, tableName);
         } else {
             logger.error("unsupported dialect {}.", dialect);
             return null;
@@ -169,8 +174,9 @@ public void createSampleTable(TableDesc table) throws Exception {
 
     private String[] generateCreateTableSql(TableDesc tableDesc) {
         logger.info("Generate create table sql: {}", tableDesc);
-        String tableIdentity = String.format("%s.%s", tableDesc.getDatabase().toUpperCase(), tableDesc.getName())
-                .toUpperCase();
+        String tableIdentity = String
+                .format(Locale.ROOT, "%s.%s", tableDesc.getDatabase().toUpperCase(Locale.ROOT), tableDesc.getName())
+                .toUpperCase(Locale.ROOT);
         String dropsql = "DROP TABLE IF EXISTS " + tableIdentity;
         String dropsql2 = "DROP VIEW IF EXISTS " + tableIdentity;
 
@@ -239,7 +245,7 @@ private void executeSQL(String[] sqls) throws SQLException {
         KylinConfig config = KylinConfig.getInstanceFromEnv();
         String tmpDatabase = config.getHiveDatabaseForIntermediateTable();
         String tmpView = tmpDatabase + ".kylin_eval_query_"
-                + UUID.nameUUIDFromBytes(query.getBytes()).toString().replaceAll("-", "");
+                + UUID.nameUUIDFromBytes(query.getBytes(StandardCharsets.UTF_8)).toString().replaceAll("-", "");
 
         String dropViewSql = "DROP VIEW IF EXISTS " + tmpView;
         String evalViewSql = "CREATE VIEW " + tmpView + " as " + query;
@@ -283,7 +289,7 @@ private void executeSQL(String[] sqls) throws SQLException {
             String remarks = meta.getString("REMARKS");
 
             ColumnDesc cdesc = new ColumnDesc();
-            cdesc.setName(cname.toUpperCase());
+            cdesc.setName(cname.toUpperCase(Locale.ROOT));
 
             String kylinType = SqlUtil.jdbcTypeToKylinDataType(type);
             int precision = (SqlUtil.isPrecisionApplicable(kylinType) && csize > 0) ? csize : -1;
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java
index 284aac5697..b5069a1d2c 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.source.jdbc;
 
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.common.KylinConfig;
@@ -170,8 +171,8 @@ private AbstractExecutable createSqoopToFlatHiveStep(String jobWorkingDir, Strin
             String filedDelimiter = config.getJdbcSourceFieldDelimiter();
             int mapperNum = config.getSqoopMapperNum();
 
-            String bquery = String.format("SELECT min(%s), max(%s) FROM \"%s\".%s as %s", splitColumn, splitColumn,
-                    splitDatabase, splitTable, splitTableAlias);
+            String bquery = String.format(Locale.ROOT, "SELECT min(%s), max(%s) FROM \"%s\".%s as %s", splitColumn,
+                    splitColumn, splitDatabase, splitTable, splitTableAlias);
             if (partitionDesc.isPartitioned()) {
                 SegmentRange segRange = flatDesc.getSegRange();
                 if (segRange != null && !segRange.isInfinite()) {
@@ -184,12 +185,14 @@ private AbstractExecutable createSqoopToFlatHiveStep(String jobWorkingDir, Strin
                 }
             }
 
-            String cmd = String.format("%s/bin/sqoop import" + generateSqoopConfigArgString()
-                    + "--connect \"%s\" --driver %s --username %s --password %s --query \"%s AND \\$CONDITIONS\" "
-                    + "--target-dir %s/%s --split-by %s --boundary-query \"%s\" --null-string '' "
-                    + "--fields-terminated-by '%s' --num-mappers %d", sqoopHome, connectionUrl, driverClass, jdbcUser,
-                    jdbcPass, selectSql, jobWorkingDir, hiveTable, splitColumn, bquery, filedDelimiter, mapperNum);
-            logger.debug(String.format("sqoop cmd:%s", cmd));
+            String cmd = String.format(Locale.ROOT,
+                    "%s/bin/sqoop import" + generateSqoopConfigArgString()
+                            + "--connect \"%s\" --driver %s --username %s --password %s --query \"%s AND \\$CONDITIONS\" "
+                            + "--target-dir %s/%s --split-by %s --boundary-query \"%s\" --null-string '' "
+                            + "--fields-terminated-by '%s' --num-mappers %d",
+                    sqoopHome, connectionUrl, driverClass, jdbcUser, jdbcPass, selectSql, jobWorkingDir, hiveTable,
+                    splitColumn, bquery, filedDelimiter, mapperNum);
+            logger.debug(String.format(Locale.ROOT, "sqoop cmd:%s", cmd));
             CmdStep step = new CmdStep();
             step.setCmd(cmd);
             step.setName(ExecutableConstants.STEP_NAME_SQOOP_TO_FLAT_HIVE_TABLE);
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTable.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTable.java
index 4313862c5e..e880a3d557 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTable.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTable.java
@@ -19,7 +19,7 @@
 package org.apache.kylin.source.jdbc;
 
 import java.io.IOException;
-
+import java.util.Locale;
 
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.source.IReadableTable;
@@ -35,7 +35,6 @@
     final private String database;
     final private String tableName;
 
-
     public JdbcTable(TableDesc tableDesc) {
         this.database = tableDesc.getDatabase();
         this.tableName = tableDesc.getName();
@@ -48,12 +47,12 @@ public TableReader getReader() throws IOException {
 
     @Override
     public TableSignature getSignature() throws IOException {
-        String path = String.format("%s.%s", database, tableName);
+        String path = String.format(Locale.ROOT, "%s.%s", database, tableName);
         long lastModified = System.currentTimeMillis(); // assume table is ever changing
-        int size=0;
+        int size = 0;
         return new TableSignature(path, size, lastModified);
     }
-    
+
     @Override
     public boolean exists() {
         return true;
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTableReader.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTableReader.java
index e2616b7f24..2e9974892e 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTableReader.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTableReader.java
@@ -23,6 +23,7 @@
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.source.IReadableTable.TableReader;
@@ -61,13 +62,13 @@ public JdbcTableReader(String dbName, String tableName) throws IOException {
         String jdbcPass = config.getJdbcSourcePass();
         dbconf = new DBConnConf(driverClass, connectionUrl, jdbcUser, jdbcPass);
         jdbcCon = SqlUtil.getConnection(dbconf);
-        String sql = String.format("select * from %s.%s", dbName, tableName);
+        String sql = String.format(Locale.ROOT, "select * from %s.%s", dbName, tableName);
         try {
             statement = jdbcCon.createStatement();
             rs = statement.executeQuery(sql);
             colCount = rs.getMetaData().getColumnCount();
         } catch (SQLException e) {
-            throw new IOException(String.format("error while exec %s", sql), e);
+            throw new IOException(String.format(Locale.ROOT, "error while exec %s", sql), e);
         }
     }
 
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/DefaultJdbcMetadata.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/DefaultJdbcMetadata.java
index f4ffc235a2..08421990ca 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/DefaultJdbcMetadata.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/DefaultJdbcMetadata.java
@@ -24,6 +24,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.kylin.source.hive.DBConnConf;
 import org.apache.kylin.source.jdbc.SqlUtil;
 import org.slf4j.Logger;
@@ -44,7 +45,7 @@ public DefaultJdbcMetadata(DBConnConf dbConnConf) {
             while (rs.next()) {
                 String schema = rs.getString("TABLE_SCHEM");
                 String catalog = rs.getString("TABLE_CATALOG");
-                logger.info(String.format("%s,%s", schema, catalog));
+                logger.info(String.format(Locale.ROOT, "%s,%s", schema, catalog));
                 ret.add(schema);
             }
         }
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/JdbcMetadataFactory.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/JdbcMetadataFactory.java
index 4100f795a4..ae4c0ff245 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/JdbcMetadataFactory.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/JdbcMetadataFactory.java
@@ -17,12 +17,13 @@
 */
 package org.apache.kylin.source.jdbc.metadata;
 
+import java.util.Locale;
 import org.apache.kylin.source.hive.DBConnConf;
 import org.apache.kylin.source.jdbc.JdbcDialect;
 
 public abstract class JdbcMetadataFactory {
     public static IJdbcMetadata getJdbcMetadata(String dialect, final DBConnConf dbConnConf) {
-        String jdbcDialect = (null == dialect) ? "" : dialect.toLowerCase();
+        String jdbcDialect = (null == dialect) ? "" : dialect.toLowerCase(Locale.ROOT);
         switch (jdbcDialect) {
         case (JdbcDialect.DIALECT_MSSQL):
             return new SQLServerJdbcMetadata(dbConnConf);
diff --git a/source-hive/src/test/java/org/apache/kylin/source/jdbc/JdbcExplorerTest.java b/source-hive/src/test/java/org/apache/kylin/source/jdbc/JdbcExplorerTest.java
index b2693296ee..a0df4f462e 100644
--- a/source-hive/src/test/java/org/apache/kylin/source/jdbc/JdbcExplorerTest.java
+++ b/source-hive/src/test/java/org/apache/kylin/source/jdbc/JdbcExplorerTest.java
@@ -32,6 +32,7 @@
 import java.sql.Types;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -138,13 +139,13 @@ public void testLoadTableMetadata() throws SQLException {
         TableDesc tableDesc = result.getFirst();
         ColumnDesc columnDesc = tableDesc.getColumns()[1];
 
-        Assert.assertEquals(databaseName.toUpperCase(), tableDesc.getDatabase());
+        Assert.assertEquals(databaseName.toUpperCase(Locale.ROOT), tableDesc.getDatabase());
         Assert.assertEquals(3, tableDesc.getColumnCount());
         Assert.assertEquals("TABLE", tableDesc.getTableType());
         Assert.assertEquals("COL2", columnDesc.getName());
         Assert.assertEquals("integer", columnDesc.getTypeName());
         Assert.assertEquals("comment2", columnDesc.getComment());
-        Assert.assertEquals(databaseName.toUpperCase() + "." + tableName.toUpperCase(),
+        Assert.assertEquals(databaseName.toUpperCase(Locale.ROOT) + "." + tableName.toUpperCase(Locale.ROOT),
                 result.getSecond().getIdentity());
     }
 
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
index 73b224e0e3..1c94f9c7c7 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
@@ -21,6 +21,7 @@
 import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
@@ -143,10 +144,11 @@ public void addStepPhase1_CreateFlatTable(DefaultChainedExecutable jobFlow) {
                 jobFlow.addTask(createSaveKafkaDataStep(jobFlow.getId(), tableLocation, seg));
                 intermediatePaths.add(tableLocation);
             } else {
-                final String mockFactTableName = MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeName.toLowerCase()
-                        + "_" + seg.getUuid().replaceAll("-", "_") + "_fact";
+                final String mockFactTableName = MetadataConstants.KYLIN_INTERMEDIATE_PREFIX
+                        + cubeName.toLowerCase(Locale.ROOT) + "_" + seg.getUuid().replaceAll("-", "_") + "_fact";
                 jobFlow.addTask(createSaveKafkaDataStep(jobFlow.getId(), baseLocation + "/" + mockFactTableName, seg));
-                jobFlow.addTask(createFlatTable(hiveTableDatabase, mockFactTableName, baseLocation, cubeName, cubeDesc, flatDesc, intermediateTables, intermediatePaths));
+                jobFlow.addTask(createFlatTable(hiveTableDatabase, mockFactTableName, baseLocation, cubeName, cubeDesc,
+                        flatDesc, intermediateTables, intermediatePaths));
             }
         }
 
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSparkInput.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSparkInput.java
index 7600329d76..7db6c32661 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSparkInput.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSparkInput.java
@@ -18,6 +18,7 @@
 package org.apache.kylin.source.kafka;
 
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeSegment;
@@ -84,8 +85,8 @@ public void addStepPhase1_CreateFlatTable(DefaultChainedExecutable jobFlow) {
                 jobFlow.addTask(createSaveKafkaDataStep(jobFlow.getId(), tableLocation, seg));
                 intermediatePaths.add(tableLocation);
             } else {
-                final String mockFactTableName = MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeName.toLowerCase()
-                        + "_" + seg.getUuid().replaceAll("-", "_") + "_fact";
+                final String mockFactTableName = MetadataConstants.KYLIN_INTERMEDIATE_PREFIX
+                        + cubeName.toLowerCase(Locale.ROOT) + "_" + seg.getUuid().replaceAll("-", "_") + "_fact";
                 jobFlow.addTask(createSaveKafkaDataStep(jobFlow.getId(), baseLocation + "/" + mockFactTableName, seg));
                 jobFlow.addTask(createFlatTable(hiveTableDatabase, mockFactTableName, baseLocation, cubeName, cubeDesc,
                         flatDesc, intermediateTables, intermediatePaths));
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
index 3618ba6a63..5aa60c9179 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
@@ -23,6 +23,7 @@
 import java.nio.ByteBuffer;
 import java.util.List;
 import java.util.ArrayList;
+import java.util.Locale;
 import java.util.Map;
 import java.util.HashMap;
 import java.util.TreeMap;
@@ -116,7 +117,7 @@ public TimedJsonStreamParser(List<TblColRef> allColumns, Map<String, String> pro
             ArrayList<String> result = Lists.newArrayList();
 
             for (TblColRef column : allColumns) {
-                final String columnName = column.getName().toLowerCase();
+                final String columnName = column.getName().toLowerCase(Locale.ROOT);
                 if (populateDerivedTimeColumns(columnName, result, t) == false) {
                     result.add(getValueByKey(column, root));
                 }
@@ -138,15 +139,15 @@ public boolean filter(StreamingMessageRow streamingMessageRow) {
     }
 
     public String[] getEmbeddedPropertyNames(TblColRef column) {
-        final String colName = column.getName().toLowerCase();
+        final String colName = column.getName().toLowerCase(Locale.ROOT);
         String[] names = nameMap.get(colName);
         if (names == null) {
             String comment = column.getColumnDesc().getComment(); // use comment to parse the structure
             if (!StringUtils.isEmpty(comment) && comment.contains(EMBEDDED_PROPERTY_SEPARATOR)) {
-                names = comment.toLowerCase().split("\\" + EMBEDDED_PROPERTY_SEPARATOR);
+                names = comment.toLowerCase(Locale.ROOT).split("\\" + EMBEDDED_PROPERTY_SEPARATOR);
                 nameMap.put(colName, names);
             } else if (colName.contains(separator)) { // deprecated, just be compitable for old version
-                names = colName.toLowerCase().split(separator);
+                names = colName.toLowerCase(Locale.ROOT).split(separator);
                 nameMap.put(colName, names);
             }
         }
@@ -155,7 +156,7 @@ public boolean filter(StreamingMessageRow streamingMessageRow) {
     }
 
     protected String getValueByKey(TblColRef column, Map<String, Object> rootMap) throws IOException {
-        final String key = column.getName().toLowerCase();
+        final String key = column.getName().toLowerCase(Locale.ROOT);
         if (rootMap.containsKey(key)) {
             return objToString(rootMap.get(key));
         }
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
index c22c72fca1..0902866ab2 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
@@ -21,6 +21,7 @@
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Iterator;
+import java.util.Locale;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
@@ -89,13 +90,15 @@ public void initialize(InputSplit split, Configuration conf) throws IOException,
 
         Properties kafkaProperties = KafkaConsumerProperties.extractKafkaConfigToProperties(conf);
 
-        consumer = org.apache.kylin.source.kafka.util.KafkaClient.getKafkaConsumer(brokers, consumerGroup, kafkaProperties);
+        consumer = org.apache.kylin.source.kafka.util.KafkaClient.getKafkaConsumer(brokers, consumerGroup,
+                kafkaProperties);
 
         earliestOffset = this.split.getOffsetStart();
         latestOffset = this.split.getOffsetEnd();
         TopicPartition topicPartition = new TopicPartition(topic, partition);
         consumer.assign(Arrays.asList(topicPartition));
-        log.info("Split {} Topic: {} Broker: {} Partition: {} Start: {} End: {}", new Object[] { this.split, topic, this.split.getBrokers(), partition, earliestOffset, latestOffset });
+        log.info("Split {} Topic: {} Broker: {} Partition: {} Start: {} End: {}",
+                new Object[] { this.split, topic, this.split.getBrokers(), partition, earliestOffset, latestOffset });
     }
 
     @Override
@@ -120,7 +123,9 @@ public boolean nextKeyValue() throws IOException, InterruptedException {
             iterator = messages.iterator();
             if (!iterator.hasNext()) {
                 log.info("No more messages, stop");
-                throw new IOException(String.format("Unexpected ending of stream, expected ending offset %d, but end at %d", latestOffset, watermark));
+                throw new IOException(String.format(Locale.ROOT,
+                        "Unexpected ending of stream, expected ending offset " + "%d, but end at %d", latestOffset,
+                        watermark));
             }
         }
 
@@ -139,7 +144,8 @@ public boolean nextKeyValue() throws IOException, InterruptedException {
         }
 
         log.error("Unexpected iterator end.");
-        throw new IOException(String.format("Unexpected ending of stream, expected ending offset %d, but end at %d", latestOffset, watermark));
+        throw new IOException(String.format(Locale.ROOT,
+                "Unexpected ending of stream, expected ending offset %d, but end at %d", latestOffset, watermark));
     }
 
     @Override
@@ -162,7 +168,8 @@ public float getProgress() throws IOException, InterruptedException {
 
     @Override
     public void close() throws IOException {
-        log.info("{} num. processed messages {} ", topic + ":" + split.getBrokers() + ":" + partition, numProcessedMessages);
+        log.info("{} num. processed messages {} ", topic + ":" + split.getBrokers() + ":" + partition,
+                numProcessedMessages);
         consumer.close();
     }
 
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index 16b8db2d08..911c8d51ba 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -23,6 +23,7 @@
 import java.nio.BufferOverflowException;
 import java.nio.ByteBuffer;
 import java.util.List;
+import java.util.Locale;
 import java.util.concurrent.ExecutorService;
 import java.util.zip.DataFormatException;
 
@@ -90,14 +91,18 @@ public CubeHBaseEndpointRPC(ISegment segment, Cuboid cuboid, GTInfo fullGTInfo,
 
         if (shardNum == totalShards) {
             //all shards
-            return Lists.newArrayList(Pair.newPair(getByteArrayForShort((short) 0), getByteArrayForShort((short) (shardNum - 1))));
+            return Lists.newArrayList(
+                    Pair.newPair(getByteArrayForShort((short) 0), getByteArrayForShort((short) (shardNum - 1))));
         } else if (baseShard + shardNum <= totalShards) {
             //endpoint end key is inclusive, so no need to append 0 or anything
-            return Lists.newArrayList(Pair.newPair(getByteArrayForShort(baseShard), getByteArrayForShort((short) (baseShard + shardNum - 1))));
+            return Lists.newArrayList(Pair.newPair(getByteArrayForShort(baseShard),
+                    getByteArrayForShort((short) (baseShard + shardNum - 1))));
         } else {
             //0,1,2,3,4 wants 4,0
-            return Lists.newArrayList(Pair.newPair(getByteArrayForShort(baseShard), getByteArrayForShort((short) (totalShards - 1))), //
-                    Pair.newPair(getByteArrayForShort((short) 0), getByteArrayForShort((short) (baseShard + shardNum - totalShards - 1))));
+            return Lists.newArrayList(
+                    Pair.newPair(getByteArrayForShort(baseShard), getByteArrayForShort((short) (totalShards - 1))), //
+                    Pair.newPair(getByteArrayForShort((short) 0),
+                            getByteArrayForShort((short) (baseShard + shardNum - totalShards - 1))));
         }
     }
 
@@ -149,14 +154,18 @@ public IGTScanner getGTScanner(final GTScanRequest scanRequest) throws IOExcepti
 
         final ExpectedSizeIterator epResultItr = new ExpectedSizeIterator(queryContext, shardNum, coprocessorTimeout);
 
-        logger.info("Serialized scanRequestBytes {} bytes, rawScanBytesString {} bytes", scanRequestByteString.size(), rawScanByteString.size());
+        logger.info("Serialized scanRequestBytes {} bytes, rawScanBytesString {} bytes", scanRequestByteString.size(),
+                rawScanByteString.size());
 
-        logger.info("The scan {} for segment {} is as below with {} separate raw scans, shard part of start/end key is set to 0", Integer.toHexString(System.identityHashCode(scanRequest)), cubeSeg, rawScans.size());
+        logger.info(
+                "The scan {} for segment {} is as below with {} separate raw scans, shard part of start/end key is set to 0",
+                Integer.toHexString(System.identityHashCode(scanRequest)), cubeSeg, rawScans.size());
         for (RawScan rs : rawScans) {
             logScan(rs, cubeSeg.getStorageLocationIdentifier());
         }
 
-        logger.debug("Submitting rpc to {} shards starting from shard {}, scan range count {}", shardNum, cuboidBaseShard, rawScans.size());
+        logger.debug("Submitting rpc to {} shards starting from shard {}, scan range count {}", shardNum,
+                cuboidBaseShard, rawScans.size());
 
         // KylinConfig: use env instance instead of CubeSegment, because KylinConfig will share among queries
         // for different cubes until redeployment of coprocessor jar.
@@ -177,8 +186,8 @@ public IGTScanner getGTScanner(final GTScanRequest scanRequest) throws IOExcepti
         builder.setMaxScanBytes(cubeSeg.getConfig().getPartitionMaxScanBytes());
         builder.setIsExactAggregate(storageContext.isExactAggregation());
 
-        final String logHeader = String.format("<sub-thread for Query %s GTScanRequest %s>", queryContext.getQueryId(),
-                Integer.toHexString(System.identityHashCode(scanRequest)));
+        final String logHeader = String.format(Locale.ROOT, "<sub-thread for Query %s GTScanRequest %s>",
+                queryContext.getQueryId(), Integer.toHexString(System.identityHashCode(scanRequest)));
         for (final Pair<byte[], byte[]> epRange : getEPKeyRanges(cuboidBaseShard, shardNum, totalShards)) {
             executorService.submit(new Runnable() {
                 @Override
@@ -370,7 +379,8 @@ private ByteString serializeRawScans(List<RawScan> rawScans) {
                     RawScan.serializer.serialize(rs, rawScanBuffer);
                 }
                 rawScanBuffer.flip();
-                rawScanByteString = HBaseZeroCopyByteString.wrap(rawScanBuffer.array(), rawScanBuffer.position(), rawScanBuffer.limit());
+                rawScanByteString = HBaseZeroCopyByteString.wrap(rawScanBuffer.array(), rawScanBuffer.position(),
+                        rawScanBuffer.limit());
                 break;
             } catch (BufferOverflowException boe) {
                 logger.info("Buffer size {} cannot hold the raw scans, resizing to 4 times", rawScanBufferSize);
@@ -385,13 +395,17 @@ private String getStatsString(byte[] region, CubeVisitResponse result) {
         Stats stats = result.getStats();
         byte[] compressedRows = HBaseZeroCopyByteString.zeroCopyGetBytes(result.getCompressedRows());
 
-        sb.append("Endpoint RPC returned from HTable ").append(cubeSeg.getStorageLocationIdentifier()).append(" Shard ").append(BytesUtil.toHex(region)).append(" on host: ").append(stats.getHostname()).append(".");
+        sb.append("Endpoint RPC returned from HTable ").append(cubeSeg.getStorageLocationIdentifier()).append(" Shard ")
+                .append(BytesUtil.toHex(region)).append(" on host: ").append(stats.getHostname()).append(".");
         sb.append("Total scanned row: ").append(stats.getScannedRowCount()).append(". ");
         sb.append("Total scanned bytes: ").append(stats.getScannedBytes()).append(". ");
         sb.append("Total filtered row: ").append(stats.getFilteredRowCount()).append(". ");
         sb.append("Total aggred row: ").append(stats.getAggregatedRowCount()).append(". ");
-        sb.append("Time elapsed in EP: ").append(stats.getServiceEndTime() - stats.getServiceStartTime()).append("(ms). ");
-        sb.append("Server CPU usage: ").append(stats.getSystemCpuLoad()).append(", server physical mem left: ").append(stats.getFreePhysicalMemorySize()).append(", server swap mem left:").append(stats.getFreeSwapSpaceSize()).append(".");
+        sb.append("Time elapsed in EP: ").append(stats.getServiceEndTime() - stats.getServiceStartTime())
+                .append("(ms). ");
+        sb.append("Server CPU usage: ").append(stats.getSystemCpuLoad()).append(", server physical mem left: ")
+                .append(stats.getFreePhysicalMemorySize()).append(", server swap mem left:")
+                .append(stats.getFreeSwapSpaceSize()).append(".");
         sb.append("Etc message: ").append(stats.getEtcMsg()).append(".");
         sb.append("Normal Complete: ").append(stats.getNormalComplete() == 1).append(".");
         sb.append("Compressed row size: ").append(compressedRows.length);
@@ -401,7 +415,8 @@ private String getStatsString(byte[] region, CubeVisitResponse result) {
 
     private RuntimeException getCoprocessorException(CubeVisitResponse response) {
         if (!response.hasErrorInfo()) {
-            return new RuntimeException("Coprocessor aborts due to scan timeout or other reasons, please re-deploy coprocessor to see concrete error message");
+            return new RuntimeException(
+                    "Coprocessor aborts due to scan timeout or other reasons, please re-deploy coprocessor to see concrete error message");
         }
 
         CubeVisitResponse.ErrorInfo errorInfo = response.getErrorInfo();
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
index 199a1fe1ff..0135a2204f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.storage.hbase.lookup;
 
 import java.io.IOException;
+import java.util.Locale;
 import java.util.Random;
 
 import org.apache.commons.cli.Options;
@@ -87,7 +88,7 @@ public int run(String[] args) throws Exception {
             parseOptions(options, args);
 
             Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String tableName = getOptionValue(OPTION_TABLE_NAME);
             String lookupSnapshotID = getOptionValue(OPTION_LOOKUP_SNAPSHOT_ID);
             String jobId = getOptionValue(OPTION_CUBING_JOB_ID);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
index 37e36acc0c..7205802388 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
@@ -22,6 +22,7 @@
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -87,7 +88,7 @@ public int run(String[] args) throws Exception {
 
         partitionFilePath = new Path(getOptionValue(OPTION_PARTITION_FILE_PATH));
 
-        String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+        String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
         CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
         cube = cubeMgr.getCube(cubeName);
         cubeDesc = cube.getDescriptor();
@@ -226,13 +227,14 @@ private void exportHBaseConfiguration(String hbaseTableName) throws Exception {
                 }
 
                 if (shardNum > nRegion) {
-                    logger.info(
-                            String.format("Cuboid %d 's estimated size %.2f MB will generate %d regions, reduce to %d",
-                                    cuboidId, estimatedSize, shardNum, nRegion));
+                    logger.info(String.format(Locale.ROOT,
+                            "Cuboid %d 's estimated size %.2f MB will generate %d regions, " + "reduce to %d", cuboidId,
+                            estimatedSize, shardNum, nRegion));
                     shardNum = nRegion;
                 } else {
-                    logger.info(String.format("Cuboid %d 's estimated size %.2f MB will generate %d regions", cuboidId,
-                            estimatedSize, shardNum));
+                    logger.info(
+                            String.format(Locale.ROOT, "Cuboid %d 's estimated size %.2f MB will generate %d regions",
+                                    cuboidId, estimatedSize, shardNum));
                 }
 
                 cuboidShards.put(cuboidId, (short) shardNum);
@@ -245,8 +247,9 @@ private void exportHBaseConfiguration(String hbaseTableName) throws Exception {
             }
 
             for (int i = 0; i < nRegion; ++i) {
-                logger.info(String.format("Region %d's estimated size is %.2f MB, accounting for %.2f percent", i,
-                        regionSizes[i], 100.0 * regionSizes[i] / totalSizeInM));
+                logger.info(
+                        String.format(Locale.ROOT, "Region %d's estimated size is %.2f MB, accounting for %.2f percent",
+                                i, regionSizes[i], 100.0 * regionSizes[i] / totalSizeInM));
             }
 
             CuboidShardUtil.saveCuboidShards(cubeSegment, cuboidShards, nRegion);
@@ -341,7 +344,8 @@ protected static void saveHFileSplits(final List<HashMap<Long, Double>> innerReg
             for (Long cuboid : allCuboids) {
 
                 if (accumulatedSize >= hfileSizeMB) {
-                    logger.info(String.format("Region %d's hfile %d size is %.2f mb", i, j, accumulatedSize));
+                    logger.info(
+                            String.format(Locale.ROOT, "Region %d's hfile %d size is %.2f mb", i, j, accumulatedSize));
                     byte[] split = new byte[RowConstants.ROWKEY_SHARD_AND_CUBOID_LEN];
                     BytesUtil.writeUnsigned(i, split, 0, RowConstants.ROWKEY_SHARDID_LEN);
                     System.arraycopy(Bytes.toBytes(cuboid), 0, split, RowConstants.ROWKEY_SHARDID_LEN,
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index f006adb748..d06c993be3 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -20,6 +20,7 @@
 
 import java.io.IOException;
 
+import java.util.Locale;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -161,7 +162,7 @@ public static HColumnDescriptor createColumnFamily(KylinConfig kylinConfig, Stri
             cf.setBlocksize(kylinConfig.getHbaseSmallFamilyBlockSize());
         }
 
-        String hbaseDefaultCC = kylinConfig.getHbaseDefaultCompressionCodec().toLowerCase();
+        String hbaseDefaultCC = kylinConfig.getHbaseDefaultCompressionCodec().toLowerCase(Locale.ROOT);
         switch (hbaseDefaultCC) {
         case "snappy": {
             logger.info("hbase will use snappy to compress data");
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java
index e58a00e1ff..1f7566092c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java
@@ -21,6 +21,7 @@
 import java.io.UnsupportedEncodingException;
 import java.net.URLDecoder;
 import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
@@ -94,55 +95,45 @@
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class HFileOutputFormat3
-        extends FileOutputFormat<ImmutableBytesWritable, Cell> {
+public class HFileOutputFormat3 extends FileOutputFormat<ImmutableBytesWritable, Cell> {
     static Log LOG = LogFactory.getLog(HFileOutputFormat3.class);
 
     // The following constants are private since these are used by
     // HFileOutputFormat2 to internally transfer data between job setup and
     // reducer run using conf.
     // These should not be changed by the client.
-    private static final String COMPRESSION_FAMILIES_CONF_KEY =
-            "hbase.hfileoutputformat.families.compression";
-    private static final String BLOOM_TYPE_FAMILIES_CONF_KEY =
-            "hbase.hfileoutputformat.families.bloomtype";
-    private static final String BLOCK_SIZE_FAMILIES_CONF_KEY =
-            "hbase.mapreduce.hfileoutputformat.blocksize";
-    private static final String DATABLOCK_ENCODING_FAMILIES_CONF_KEY =
-            "hbase.mapreduce.hfileoutputformat.families.datablock.encoding";
+    private static final String COMPRESSION_FAMILIES_CONF_KEY = "hbase.hfileoutputformat.families.compression";
+    private static final String BLOOM_TYPE_FAMILIES_CONF_KEY = "hbase.hfileoutputformat.families.bloomtype";
+    private static final String BLOCK_SIZE_FAMILIES_CONF_KEY = "hbase.mapreduce.hfileoutputformat.blocksize";
+    private static final String DATABLOCK_ENCODING_FAMILIES_CONF_KEY = "hbase.mapreduce.hfileoutputformat.families.datablock.encoding";
 
     // This constant is public since the client can modify this when setting
     // up their conf object and thus refer to this symbol.
     // It is present for backwards compatibility reasons. Use it only to
     // override the auto-detection of datablock encoding.
-    public static final String DATABLOCK_ENCODING_OVERRIDE_CONF_KEY =
-            "hbase.mapreduce.hfileoutputformat.datablock.encoding";
+    public static final String DATABLOCK_ENCODING_OVERRIDE_CONF_KEY = "hbase.mapreduce.hfileoutputformat.datablock.encoding";
 
     @Override
-    public RecordWriter<ImmutableBytesWritable, Cell> getRecordWriter(
-            final TaskAttemptContext context) throws IOException, InterruptedException {
+    public RecordWriter<ImmutableBytesWritable, Cell> getRecordWriter(final TaskAttemptContext context)
+            throws IOException, InterruptedException {
         return createRecordWriter(context, this.getOutputCommitter(context));
     }
 
-    static <V extends Cell> RecordWriter<ImmutableBytesWritable, V>
-    createRecordWriter(final TaskAttemptContext context, final OutputCommitter committer)
-            throws IOException, InterruptedException {
+    static <V extends Cell> RecordWriter<ImmutableBytesWritable, V> createRecordWriter(final TaskAttemptContext context,
+            final OutputCommitter committer) throws IOException, InterruptedException {
 
         // Get the path of the temporary output file
-        final Path outputdir = ((FileOutputCommitter)committer).getWorkPath();
+        final Path outputdir = ((FileOutputCommitter) committer).getWorkPath();
         final Configuration conf = context.getConfiguration();
         LOG.debug("Task output path: " + outputdir);
         final FileSystem fs = outputdir.getFileSystem(conf);
         // These configs. are from hbase-*.xml
-        final long maxsize = conf.getLong(HConstants.HREGION_MAX_FILESIZE,
-                HConstants.DEFAULT_MAX_FILE_SIZE);
+        final long maxsize = conf.getLong(HConstants.HREGION_MAX_FILESIZE, HConstants.DEFAULT_MAX_FILE_SIZE);
         // Invented config.  Add to hbase-*.xml if other than default compression.
-        final String defaultCompressionStr = conf.get("hfile.compression",
-                Compression.Algorithm.NONE.getName());
-        final Algorithm defaultCompression = AbstractHFileWriter
-                .compressionByName(defaultCompressionStr);
-        final boolean compactionExclude = conf.getBoolean(
-                "hbase.mapreduce.hfileoutputformat.compaction.exclude", false);
+        final String defaultCompressionStr = conf.get("hfile.compression", Compression.Algorithm.NONE.getName());
+        final Algorithm defaultCompression = AbstractHFileWriter.compressionByName(defaultCompressionStr);
+        final boolean compactionExclude = conf.getBoolean("hbase.mapreduce.hfileoutputformat.compaction.exclude",
+                false);
 
         // create a map from column family to the compression algorithm
         final Map<byte[], Algorithm> compressionMap = createFamilyCompressionMap(conf);
@@ -150,8 +141,7 @@
         final Map<byte[], Integer> blockSizeMap = createFamilyBlockSizeMap(conf);
 
         String dataBlockEncodingStr = conf.get(DATABLOCK_ENCODING_OVERRIDE_CONF_KEY);
-        final Map<byte[], DataBlockEncoding> datablockEncodingMap
-                = createFamilyDataBlockEncodingMap(conf);
+        final Map<byte[], DataBlockEncoding> datablockEncodingMap = createFamilyDataBlockEncodingMap(conf);
         final DataBlockEncoding overriddenEncoding;
         if (dataBlockEncodingStr != null) {
             overriddenEncoding = DataBlockEncoding.valueOf(dataBlockEncodingStr);
@@ -161,23 +151,21 @@
 
         return new RecordWriter<ImmutableBytesWritable, V>() {
             // Map of families to writers and how much has been output on the writer.
-            private final Map<byte [], WriterLength> writers =
-                    new TreeMap<byte [], WriterLength>(Bytes.BYTES_COMPARATOR);
-            private byte [] previousRow = HConstants.EMPTY_BYTE_ARRAY;
-            private final byte [] now = Bytes.toBytes(System.currentTimeMillis());
+            private final Map<byte[], WriterLength> writers = new TreeMap<byte[], WriterLength>(Bytes.BYTES_COMPARATOR);
+            private byte[] previousRow = HConstants.EMPTY_BYTE_ARRAY;
+            private final byte[] now = Bytes.toBytes(System.currentTimeMillis());
             private boolean rollRequested = false;
 
             @Override
-            public void write(ImmutableBytesWritable row, V cell)
-                    throws IOException {
+            public void write(ImmutableBytesWritable row, V cell) throws IOException {
                 KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
                 if (row == null && kv == null) {
                     rollWriters();
                     return;
                 }
-                byte [] rowKey = CellUtil.cloneRow(kv);
+                byte[] rowKey = CellUtil.cloneRow(kv);
                 long length = kv.getLength();
-                byte [] family = CellUtil.cloneFamily(kv);
+                byte[] family = CellUtil.cloneFamily(kv);
                 WriterLength wl = this.writers.get(family);
                 if (wl == null) {
                     fs.mkdirs(new Path(outputdir, Bytes.toString(family)));
@@ -200,8 +188,7 @@ public void write(ImmutableBytesWritable row, V cell)
             private void rollWriters() throws IOException {
                 for (WriterLength wl : this.writers.values()) {
                     if (wl.writer != null) {
-                        LOG.info("Writer=" + wl.writer.getPath() +
-                                ((wl.written == 0)? "": ", wrote=" + wl.written));
+                        LOG.info("Writer=" + wl.writer.getPath() + ((wl.written == 0) ? "" : ", wrote=" + wl.written));
                         close(wl.writer);
                     }
                     wl.writer = null;
@@ -210,10 +197,8 @@ private void rollWriters() throws IOException {
                 this.rollRequested = false;
             }
 
-            @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="BX_UNBOXING_IMMEDIATELY_REBOXED",
-                    justification="Not important")
-            private WriterLength getNewWriter(byte[] family, Configuration conf)
-                    throws IOException {
+            @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "BX_UNBOXING_IMMEDIATELY_REBOXED", justification = "Not important")
+            private WriterLength getNewWriter(byte[] family, Configuration conf) throws IOException {
                 WriterLength wl = new WriterLength();
                 Path familydir = new Path(outputdir, Bytes.toString(family));
                 Algorithm compression = compressionMap.get(family);
@@ -227,18 +212,15 @@ private WriterLength getNewWriter(byte[] family, Configuration conf)
                 encoding = encoding == null ? DataBlockEncoding.NONE : encoding;
                 Configuration tempConf = new Configuration(conf);
                 tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);
-                HFileContextBuilder contextBuilder = new HFileContextBuilder()
-                        .withCompression(compression)
+                HFileContextBuilder contextBuilder = new HFileContextBuilder().withCompression(compression)
                         .withChecksumType(HStore.getChecksumType(conf))
-                        .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
-                        .withBlockSize(blockSize);
+                        .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)).withBlockSize(blockSize);
                 contextBuilder.withDataBlockEncoding(encoding);
                 HFileContext hFileContext = contextBuilder.build();
 
-                wl.writer = new StoreFile.WriterBuilder(conf, new CacheConfig(tempConf), fs)
-                        .withOutputDir(familydir).withBloomType(bloomType)
-                        .withComparator(KeyValue.COMPARATOR)
-                        .withFileContext(hFileContext).build();
+                wl.writer = new StoreFile.WriterBuilder(conf, new CacheConfig(tempConf), fs).withOutputDir(familydir)
+                        .withBloomType(bloomType).withComparator(KeyValue.COMPARATOR).withFileContext(hFileContext)
+                        .build();
 
                 this.writers.put(family, wl);
                 return wl;
@@ -246,23 +228,18 @@ private WriterLength getNewWriter(byte[] family, Configuration conf)
 
             private void close(final StoreFile.Writer w) throws IOException {
                 if (w != null) {
-                    w.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY,
-                            Bytes.toBytes(System.currentTimeMillis()));
-                    w.appendFileInfo(StoreFile.BULKLOAD_TASK_KEY,
-                            Bytes.toBytes(context.getTaskAttemptID().toString()));
-                    w.appendFileInfo(StoreFile.MAJOR_COMPACTION_KEY,
-                            Bytes.toBytes(true));
-                    w.appendFileInfo(StoreFile.EXCLUDE_FROM_MINOR_COMPACTION_KEY,
-                            Bytes.toBytes(compactionExclude));
+                    w.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY, Bytes.toBytes(System.currentTimeMillis()));
+                    w.appendFileInfo(StoreFile.BULKLOAD_TASK_KEY, Bytes.toBytes(context.getTaskAttemptID().toString()));
+                    w.appendFileInfo(StoreFile.MAJOR_COMPACTION_KEY, Bytes.toBytes(true));
+                    w.appendFileInfo(StoreFile.EXCLUDE_FROM_MINOR_COMPACTION_KEY, Bytes.toBytes(compactionExclude));
                     w.appendTrackedTimestampsToMetadata();
                     w.close();
                 }
             }
 
             @Override
-            public void close(TaskAttemptContext c)
-                    throws IOException, InterruptedException {
-                for (WriterLength wl: this.writers.values()) {
+            public void close(TaskAttemptContext c) throws IOException, InterruptedException {
+                for (WriterLength wl : this.writers.values()) {
                     close(wl.writer);
                 }
             }
@@ -281,11 +258,9 @@ public void close(TaskAttemptContext c)
      * Return the start keys of all of the regions in this table,
      * as a list of ImmutableBytesWritable.
      */
-    private static List<ImmutableBytesWritable> getRegionStartKeys(RegionLocator table)
-            throws IOException {
+    private static List<ImmutableBytesWritable> getRegionStartKeys(RegionLocator table) throws IOException {
         byte[][] byteKeys = table.getStartKeys();
-        ArrayList<ImmutableBytesWritable> ret =
-                new ArrayList<ImmutableBytesWritable>(byteKeys.length);
+        ArrayList<ImmutableBytesWritable> ret = new ArrayList<ImmutableBytesWritable>(byteKeys.length);
         for (byte[] byteKey : byteKeys) {
             ret.add(new ImmutableBytesWritable(byteKey));
         }
@@ -297,8 +272,8 @@ public void close(TaskAttemptContext c)
      * {@link TotalOrderPartitioner} that contains the split points in startKeys.
      */
     @SuppressWarnings("deprecation")
-    private static void writePartitions(Configuration conf, Path partitionsPath,
-                                        List<ImmutableBytesWritable> startKeys) throws IOException {
+    private static void writePartitions(Configuration conf, Path partitionsPath, List<ImmutableBytesWritable> startKeys)
+            throws IOException {
         LOG.info("Writing partition information to " + partitionsPath);
         if (startKeys.isEmpty()) {
             throw new IllegalArgumentException("No regions passed");
@@ -308,21 +283,18 @@ private static void writePartitions(Configuration conf, Path partitionsPath,
         // have keys < the first region (which has an empty start key)
         // so we need to remove it. Otherwise we would end up with an
         // empty reducer with index 0
-        TreeSet<ImmutableBytesWritable> sorted =
-                new TreeSet<ImmutableBytesWritable>(startKeys);
+        TreeSet<ImmutableBytesWritable> sorted = new TreeSet<ImmutableBytesWritable>(startKeys);
 
         ImmutableBytesWritable first = sorted.first();
         if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {
-            throw new IllegalArgumentException(
-                    "First region of table should have empty start key. Instead has: "
-                            + Bytes.toStringBinary(first.get()));
+            throw new IllegalArgumentException("First region of table should have empty start key. Instead has: "
+                    + Bytes.toStringBinary(first.get()));
         }
         sorted.remove(first);
 
         // Write the actual file
         FileSystem fs = partitionsPath.getFileSystem(conf);
-        SequenceFile.Writer writer = SequenceFile.createWriter(
-                fs, conf, partitionsPath, ImmutableBytesWritable.class,
+        SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, partitionsPath, ImmutableBytesWritable.class,
                 NullWritable.class);
 
         try {
@@ -351,8 +323,7 @@ private static void writePartitions(Configuration conf, Path partitionsPath,
      * @deprecated Use {@link #configureIncrementalLoad(Job, Table, RegionLocator)} instead.
      */
     @Deprecated
-    public static void configureIncrementalLoad(Job job, HTable table)
-            throws IOException {
+    public static void configureIncrementalLoad(Job job, HTable table) throws IOException {
         configureIncrementalLoad(job, table.getTableDescriptor(), table.getRegionLocator());
     }
 
@@ -370,8 +341,7 @@ public static void configureIncrementalLoad(Job job, HTable table)
      * The user should be sure to set the map output value class to either KeyValue or Put before
      * running this function.
      */
-    public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator)
-            throws IOException {
+    public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator) throws IOException {
         configureIncrementalLoad(job, table.getTableDescriptor(), regionLocator);
     }
 
@@ -389,14 +359,13 @@ public static void configureIncrementalLoad(Job job, Table table, RegionLocator
      * The user should be sure to set the map output value class to either KeyValue or Put before
      * running this function.
      */
-    public static void configureIncrementalLoad(Job job, HTableDescriptor tableDescriptor,
-                                                RegionLocator regionLocator) throws IOException {
+    public static void configureIncrementalLoad(Job job, HTableDescriptor tableDescriptor, RegionLocator regionLocator)
+            throws IOException {
         configureIncrementalLoad(job, tableDescriptor, regionLocator, HFileOutputFormat3.class);
     }
 
-    static void configureIncrementalLoad(Job job, HTableDescriptor tableDescriptor,
-                                         RegionLocator regionLocator, Class<? extends OutputFormat<?, ?>> cls) throws IOException,
-            UnsupportedEncodingException {
+    static void configureIncrementalLoad(Job job, HTableDescriptor tableDescriptor, RegionLocator regionLocator,
+            Class<? extends OutputFormat<?, ?>> cls) throws IOException, UnsupportedEncodingException {
         Configuration conf = job.getConfiguration();
         job.setOutputKeyClass(ImmutableBytesWritable.class);
         job.setOutputValueClass(KeyValue.class);
@@ -415,15 +384,13 @@ static void configureIncrementalLoad(Job job, HTableDescriptor tableDescriptor,
             LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass());
         }
 
-        conf.setStrings("io.serializations", conf.get("io.serializations"),
-                MutationSerialization.class.getName(), ResultSerialization.class.getName(),
-                KeyValueSerialization.class.getName());
+        conf.setStrings("io.serializations", conf.get("io.serializations"), MutationSerialization.class.getName(),
+                ResultSerialization.class.getName(), KeyValueSerialization.class.getName());
 
         // Use table's region boundaries for TOP split points.
         LOG.info("Looking up current regions for table " + tableDescriptor.getTableName());
         List<ImmutableBytesWritable> startKeys = getRegionStartKeys(regionLocator);
-        LOG.info("Configuring " + startKeys.size() + " reduce partitions " +
-                "to match current region count");
+        LOG.info("Configuring " + startKeys.size() + " reduce partitions " + "to match current region count");
         job.setNumReduceTasks(startKeys.size());
 
         configurePartitioner(job, startKeys);
@@ -465,12 +432,9 @@ public static void configureIncrementalLoadMap(Job job, Table table) throws IOEx
      * @return a map from column family to the configured compression algorithm
      */
     @VisibleForTesting
-    static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration
-                                                                     conf) {
-        Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
-                COMPRESSION_FAMILIES_CONF_KEY);
-        Map<byte[], Algorithm> compressionMap = new TreeMap<byte[],
-                Algorithm>(Bytes.BYTES_COMPARATOR);
+    static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration conf) {
+        Map<byte[], String> stringMap = createFamilyConfValueMap(conf, COMPRESSION_FAMILIES_CONF_KEY);
+        Map<byte[], Algorithm> compressionMap = new TreeMap<byte[], Algorithm>(Bytes.BYTES_COMPARATOR);
         for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
             Algorithm algorithm = AbstractHFileWriter.compressionByName(e.getValue());
             compressionMap.put(e.getKey(), algorithm);
@@ -487,10 +451,8 @@ public static void configureIncrementalLoadMap(Job job, Table table) throws IOEx
      */
     @VisibleForTesting
     static Map<byte[], BloomType> createFamilyBloomTypeMap(Configuration conf) {
-        Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
-                BLOOM_TYPE_FAMILIES_CONF_KEY);
-        Map<byte[], BloomType> bloomTypeMap = new TreeMap<byte[],
-                BloomType>(Bytes.BYTES_COMPARATOR);
+        Map<byte[], String> stringMap = createFamilyConfValueMap(conf, BLOOM_TYPE_FAMILIES_CONF_KEY);
+        Map<byte[], BloomType> bloomTypeMap = new TreeMap<byte[], BloomType>(Bytes.BYTES_COMPARATOR);
         for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
             BloomType bloomType = BloomType.valueOf(e.getValue());
             bloomTypeMap.put(e.getKey(), bloomType);
@@ -507,10 +469,8 @@ public static void configureIncrementalLoadMap(Job job, Table table) throws IOEx
      */
     @VisibleForTesting
     static Map<byte[], Integer> createFamilyBlockSizeMap(Configuration conf) {
-        Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
-                BLOCK_SIZE_FAMILIES_CONF_KEY);
-        Map<byte[], Integer> blockSizeMap = new TreeMap<byte[],
-                Integer>(Bytes.BYTES_COMPARATOR);
+        Map<byte[], String> stringMap = createFamilyConfValueMap(conf, BLOCK_SIZE_FAMILIES_CONF_KEY);
+        Map<byte[], Integer> blockSizeMap = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
         for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
             Integer blockSize = Integer.parseInt(e.getValue());
             blockSizeMap.put(e.getKey(), blockSize);
@@ -527,19 +487,15 @@ public static void configureIncrementalLoadMap(Job job, Table table) throws IOEx
      *         configured data block type for the family
      */
     @VisibleForTesting
-    static Map<byte[], DataBlockEncoding> createFamilyDataBlockEncodingMap(
-            Configuration conf) {
-        Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
-                DATABLOCK_ENCODING_FAMILIES_CONF_KEY);
-        Map<byte[], DataBlockEncoding> encoderMap = new TreeMap<byte[],
-                DataBlockEncoding>(Bytes.BYTES_COMPARATOR);
+    static Map<byte[], DataBlockEncoding> createFamilyDataBlockEncodingMap(Configuration conf) {
+        Map<byte[], String> stringMap = createFamilyConfValueMap(conf, DATABLOCK_ENCODING_FAMILIES_CONF_KEY);
+        Map<byte[], DataBlockEncoding> encoderMap = new TreeMap<byte[], DataBlockEncoding>(Bytes.BYTES_COMPARATOR);
         for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
             encoderMap.put(e.getKey(), DataBlockEncoding.valueOf((e.getValue())));
         }
         return encoderMap;
     }
 
-
     /**
      * Run inside the task to deserialize column family to given conf value map.
      *
@@ -547,8 +503,7 @@ public static void configureIncrementalLoadMap(Job job, Table table) throws IOEx
      * @param confName conf key to read from the configuration
      * @return a map of column family to the given configuration value
      */
-    private static Map<byte[], String> createFamilyConfValueMap(
-            Configuration conf, String confName) {
+    private static Map<byte[], String> createFamilyConfValueMap(Configuration conf, String confName) {
         Map<byte[], String> confValMap = new TreeMap<byte[], String>(Bytes.BYTES_COMPARATOR);
         String confVal = conf.get(confName, "");
         for (String familyConf : confVal.split("&")) {
@@ -557,7 +512,7 @@ public static void configureIncrementalLoadMap(Job job, Table table) throws IOEx
                 continue;
             }
             try {
-                confValMap.put(URLDecoder.decode(familySplit[0], "UTF-8").getBytes(),
+                confValMap.put(URLDecoder.decode(familySplit[0], "UTF-8").getBytes(StandardCharsets.UTF_8),
                         URLDecoder.decode(familySplit[1], "UTF-8"));
             } catch (UnsupportedEncodingException e) {
                 // will not happen with UTF-8 encoding
@@ -571,8 +526,7 @@ public static void configureIncrementalLoadMap(Job job, Table table) throws IOEx
      * Configure <code>job</code> with a TotalOrderPartitioner, partitioning against
      * <code>splitPoints</code>. Cleans up the partitions file after job exists.
      */
-    static void configurePartitioner(Job job, List<ImmutableBytesWritable> splitPoints)
-            throws IOException {
+    static void configurePartitioner(Job job, List<ImmutableBytesWritable> splitPoints) throws IOException {
         Configuration conf = job.getConfiguration();
         // create the partitions file
         FileSystem fs = FileSystem.get(conf);
@@ -595,13 +549,12 @@ static void configurePartitioner(Job job, List<ImmutableBytesWritable> splitPoin
      * @throws IOException
      *           on failure to read column family descriptors
      */
-    @edu.umd.cs.findbugs.annotations.SuppressWarnings(
-            value="RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
+    @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
     @VisibleForTesting
     static void configureCompression(Configuration conf, HTableDescriptor tableDescriptor)
             throws UnsupportedEncodingException {
         StringBuilder compressionConfigValue = new StringBuilder();
-        if(tableDescriptor == null){
+        if (tableDescriptor == null) {
             // could happen with mock table instance
             return;
         }
@@ -611,11 +564,9 @@ static void configureCompression(Configuration conf, HTableDescriptor tableDescr
             if (i++ > 0) {
                 compressionConfigValue.append('&');
             }
-            compressionConfigValue.append(URLEncoder.encode(
-                    familyDescriptor.getNameAsString(), "UTF-8"));
+            compressionConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
             compressionConfigValue.append('=');
-            compressionConfigValue.append(URLEncoder.encode(
-                    familyDescriptor.getCompression().getName(), "UTF-8"));
+            compressionConfigValue.append(URLEncoder.encode(familyDescriptor.getCompression().getName(), "UTF-8"));
         }
         // Get rid of the last ampersand
         conf.set(COMPRESSION_FAMILIES_CONF_KEY, compressionConfigValue.toString());
@@ -644,11 +595,9 @@ static void configureBlockSize(HTableDescriptor tableDescriptor, Configuration c
             if (i++ > 0) {
                 blockSizeConfigValue.append('&');
             }
-            blockSizeConfigValue.append(URLEncoder.encode(
-                    familyDescriptor.getNameAsString(), "UTF-8"));
+            blockSizeConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
             blockSizeConfigValue.append('=');
-            blockSizeConfigValue.append(URLEncoder.encode(
-                    String.valueOf(familyDescriptor.getBlocksize()), "UTF-8"));
+            blockSizeConfigValue.append(URLEncoder.encode(String.valueOf(familyDescriptor.getBlocksize()), "UTF-8"));
         }
         // Get rid of the last ampersand
         conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, blockSizeConfigValue.toString());
@@ -677,8 +626,7 @@ static void configureBloomType(HTableDescriptor tableDescriptor, Configuration c
             if (i++ > 0) {
                 bloomTypeConfigValue.append('&');
             }
-            bloomTypeConfigValue.append(URLEncoder.encode(
-                    familyDescriptor.getNameAsString(), "UTF-8"));
+            bloomTypeConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
             bloomTypeConfigValue.append('=');
             String bloomType = familyDescriptor.getBloomFilterType().toString();
             if (bloomType == null) {
@@ -699,8 +647,8 @@ static void configureBloomType(HTableDescriptor tableDescriptor, Configuration c
      *           on failure to read column family descriptors
      */
     @VisibleForTesting
-    static void configureDataBlockEncoding(HTableDescriptor tableDescriptor,
-                                           Configuration conf) throws UnsupportedEncodingException {
+    static void configureDataBlockEncoding(HTableDescriptor tableDescriptor, Configuration conf)
+            throws UnsupportedEncodingException {
         if (tableDescriptor == null) {
             // could happen with mock table instance
             return;
@@ -712,17 +660,14 @@ static void configureDataBlockEncoding(HTableDescriptor tableDescriptor,
             if (i++ > 0) {
                 dataBlockEncodingConfigValue.append('&');
             }
-            dataBlockEncodingConfigValue.append(
-                    URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
+            dataBlockEncodingConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
             dataBlockEncodingConfigValue.append('=');
             DataBlockEncoding encoding = familyDescriptor.getDataBlockEncoding();
             if (encoding == null) {
                 encoding = DataBlockEncoding.NONE;
             }
-            dataBlockEncodingConfigValue.append(URLEncoder.encode(encoding.toString(),
-                    "UTF-8"));
+            dataBlockEncodingConfigValue.append(URLEncoder.encode(encoding.toString(), "UTF-8"));
         }
-        conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,
-                dataBlockEncodingConfigValue.toString());
+        conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY, dataBlockEncodingConfigValue.toString());
     }
 }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
index 6e03b7e819..f1b46576cf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
@@ -20,6 +20,7 @@
 
 import java.io.IOException;
 
+import java.util.Locale;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
@@ -44,7 +45,7 @@ private void clean() throws IOException {
         Admin hbaseAdmin = conn.getAdmin();
 
         for (HTableDescriptor descriptor : hbaseAdmin.listTables()) {
-            String name = descriptor.getNameAsString().toLowerCase();
+            String name = descriptor.getNameAsString().toLowerCase(Locale.ROOT);
             if (name.startsWith("kylin") || name.startsWith("_kylin")) {
                 String x = descriptor.getValue(IRealizationConstants.HTableTag);
                 System.out.println("table name " + descriptor.getNameAsString() + " host: " + x);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index 292d9d6009..00635baa6a 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -22,6 +22,7 @@
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -336,7 +337,7 @@ private static void doOpt(Opt opt) throws IOException, InterruptedException {
         case COPY_DICT_OR_SNAPSHOT: {
             String item = (String) opt.params[0];
 
-            if (item.toLowerCase().endsWith(".dict")) {
+            if (item.toLowerCase(Locale.ROOT).endsWith(".dict")) {
                 DictionaryManager dstDictMgr = DictionaryManager.getInstance(dstConfig);
                 DictionaryManager srcDicMgr = DictionaryManager.getInstance(srcConfig);
                 DictionaryInfo dictSrc = srcDicMgr.getDictionaryInfo(item);
@@ -368,7 +369,7 @@ private static void doOpt(Opt opt) throws IOException, InterruptedException {
                     logger.info("Item " + item + " is dup, instead " + dictSaved.getResourcePath() + " is reused");
                 }
 
-            } else if (item.toLowerCase().endsWith(".snapshot")) {
+            } else if (item.toLowerCase(Locale.ROOT).endsWith(".snapshot")) {
                 SnapshotManager dstSnapMgr = SnapshotManager.getInstance(dstConfig);
                 SnapshotManager srcSnapMgr = SnapshotManager.getInstance(srcConfig);
                 SnapshotTable snapSrc = srcSnapMgr.getSnapshotTable(item);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index 23ec77ffda..6cd29d2ad2 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -26,6 +26,7 @@
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.CountDownLatch;
@@ -115,7 +116,7 @@ public static void main(String[] args) throws IOException {
             List<String> tableNames = getHTableNames(kylinConfig);
             logger.info("Identify tables " + tableNames);
 
-            String filterType = args[curIdx++].toLowerCase();
+            String filterType = args[curIdx++].toLowerCase(Locale.ROOT);
             if (filterType.equals("-table")) {
                 tableNames = filterByTables(tableNames, Arrays.asList(args).subList(curIdx, args.length));
             } else if (filterType.equals("-cube")) {
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
index a3171101ad..b7e97a1d8f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.storage.hbase.util;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Random;
 
@@ -46,8 +47,8 @@
 public class GridTableHBaseBenchmark {
 
     private static final String TEST_TABLE = "GridTableTest";
-    private static final byte[] CF = "F".getBytes();
-    private static final byte[] QN = "C".getBytes();
+    private static final byte[] CF = "F".getBytes(StandardCharsets.UTF_8);
+    private static final byte[] QN = "C".getBytes(StandardCharsets.UTF_8);
     private static final int N_ROWS = 10000;
     private static final int CELL_SIZE = 128 * 1024; // 128 KB
     private static final double DFT_HIT_RATIO = 0.3;
@@ -83,7 +84,8 @@ public static void testGridTable(double hitRatio, double indexRatio) throws IOEx
         Hits hits = new Hits(N_ROWS, hitRatio, indexRatio);
 
         for (int i = 0; i < ROUND; i++) {
-            System.out.println("==================================== ROUND " + (i + 1) + " ========================================");
+            System.out.println("==================================== ROUND " + (i + 1)
+                    + " ========================================");
             testRowScanWithIndex(conn, hits.getHitsForRowScanWithIndex());
             testRowScanNoIndexFullScan(conn, hits.getHitsForRowScanNoIndex());
             testRowScanNoIndexSkipScan(conn, hits.getHitsForRowScanNoIndex());
@@ -386,7 +388,8 @@ public void markStart() {
         public void markEnd() {
             endTime = System.currentTimeMillis();
             System.out.println();
-            System.out.println(name + " ends, " + (endTime - startTime) + " ms, " + rowsRead + " rows read, " + bytesRead + " bytes read");
+            System.out.println(name + " ends, " + (endTime - startTime) + " ms, " + rowsRead + " rows read, "
+                    + bytesRead + " bytes read");
         }
     }
 
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
index 8dd2164a39..47f4c586f2 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
@@ -19,12 +19,15 @@
 package org.apache.kylin.storage.hbase.util;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Arrays;
 import java.util.Calendar;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
+import java.util.TimeZone;
 import java.util.concurrent.Semaphore;
 
 import org.apache.commons.io.IOUtils;
@@ -54,8 +57,8 @@
     private static final Logger logger = LoggerFactory.getLogger(HbaseStreamingInput.class);
 
     private static final int CELL_SIZE = 128 * 1024; // 128 KB
-    private static final byte[] CF = "F".getBytes();
-    private static final byte[] QN = "C".getBytes();
+    private static final byte[] CF = "F".getBytes(StandardCharsets.UTF_8);
+    private static final byte[] QN = "C".getBytes(StandardCharsets.UTF_8);
 
     public static void createTable(String tableName) throws IOException {
         Connection conn = getConnection();
@@ -197,7 +200,8 @@ public void run() {
                         logger.error("value size invalid!!!!!");
                     }
 
-                    hash += Arrays.hashCode(Arrays.copyOfRange(value, cell.getValueOffset(), cell.getValueLength() + cell.getValueOffset()));
+                    hash += Arrays.hashCode(Arrays.copyOfRange(value, cell.getValueOffset(),
+                            cell.getValueLength() + cell.getValueOffset()));
                     rowCount++;
                 }
                 scanner.close();
@@ -231,8 +235,8 @@ private static Connection getConnection() throws IOException {
     }
 
     private static String formatTime(long time) {
-        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
-        Calendar cal = Calendar.getInstance();
+        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss", Locale.ROOT);
+        Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"), Locale.ROOT);
         cal.setTimeInMillis(time);
         return dateFormat.format(cal.getTime());
     }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
index 42a54c8caf..3f290ac5bf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
@@ -23,6 +23,7 @@
 import java.util.Arrays;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -69,11 +70,11 @@ public static void main(String[] args) throws Exception {
         }
 
         List<String> tableNames = getHTableNames(KylinConfig.getInstanceFromEnv());
-        if (!args[0].toLowerCase().equals("-from")) {
+        if (!args[0].toLowerCase(Locale.ROOT).equals("-from")) {
             printUsageAndExit();
         }
-        String oldHostValue = args[1].toLowerCase();
-        String filterType = args[2].toLowerCase();
+        String oldHostValue = args[1].toLowerCase(Locale.ROOT);
+        String filterType = args[2].toLowerCase(Locale.ROOT);
         if (filterType.equals("-table")) {
             tableNames = filterByTables(tableNames, Arrays.asList(args).subList(3, args.length));
         } else if (filterType.equals("-cube")) {
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
index d2b3488dc4..0b83af4a9c 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
@@ -19,8 +19,11 @@
 package org.apache.kylin.storage.hbase.common;
 
 import java.io.File;
-import java.io.FileWriter;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
@@ -53,9 +56,9 @@ public void setup() {
 
         if (!testFile.exists()) {
 
-            FileWriter writer;
+            Writer writer;
             try {
-                writer = new FileWriter(testFile);
+                writer = new OutputStreamWriter(new FileOutputStream(testFile), StandardCharsets.UTF_8);
                 writer.write("1 a\n");
                 writer.write("2 b\n");
 
@@ -86,7 +89,8 @@ public void testConnectToHive() throws SQLException {
         Statement stmt = con.createStatement();
         String tableName = "testHiveDriverTable";
         stmt.execute("drop table if exists " + tableName);
-        stmt.execute("create table " + tableName + " (key int, value string) row format delimited fields terminated by ' '");
+        stmt.execute(
+                "create table " + tableName + " (key int, value string) row format delimited fields terminated by ' '");
         // show tables
         String sql = "show tables '" + tableName + "'";
         System.out.println("Running: " + sql);
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
index 291072fd7f..c08faeb2e3 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
@@ -25,8 +25,10 @@
 import java.util.Collection;
 import java.util.Date;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
 
+import java.util.TimeZone;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.metadata.filter.CaseTupleFilter;
 import org.apache.kylin.metadata.filter.ColumnTupleFilter;
@@ -101,12 +103,12 @@ protected CompareTupleFilter buildINCompareFilter(TblColRef dateColumn) throws P
         compareFilter.addChild(columnFilter);
 
         List<String> inValues = Lists.newArrayList();
-        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
+        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd", Locale.ROOT);
         Date startDate = simpleDateFormat.parse("1970-01-01");
         Date endDate = simpleDateFormat.parse("2100-01-01");
-        Calendar start = Calendar.getInstance();
+        Calendar start = Calendar.getInstance(TimeZone.getTimeZone("GMT"), Locale.ROOT);
         start.setTime(startDate);
-        Calendar end = Calendar.getInstance();
+        Calendar end = Calendar.getInstance(TimeZone.getTimeZone("GMT"), Locale.ROOT);
         end.setTime(endDate);
         for (Date date = start.getTime(); start.before(end); start.add(Calendar.DATE, 1), date = start.getTime()) {
             inValues.add(simpleDateFormat.format(date));
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapperTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapperTest.java
index eba4a3793c..8aeeca498d 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapperTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapperTest.java
@@ -21,6 +21,7 @@
 import static org.junit.Assert.assertEquals;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
 import org.apache.hadoop.hbase.KeyValue;
@@ -68,13 +69,13 @@ public void testMapper2() throws IOException {
         Pair<RowKeyWritable, KeyValue> p2 = result.get(1);
 
         assertEquals(key, p1.getFirst());
-        assertEquals("cf1", new String(p1.getSecond().getFamily()));
-        assertEquals("usd_amt", new String(p1.getSecond().getQualifier()));
-        assertEquals("35.43", new String(p1.getSecond().getValue()));
+        assertEquals("cf1", new String(p1.getSecond().getFamily(), StandardCharsets.UTF_8));
+        assertEquals("usd_amt", new String(p1.getSecond().getQualifier(), StandardCharsets.UTF_8));
+        assertEquals("35.43", new String(p1.getSecond().getValue(), StandardCharsets.UTF_8));
 
         assertEquals(key, p2.getFirst());
-        assertEquals("cf1", new String(p2.getSecond().getFamily()));
-        assertEquals("item_count", new String(p2.getSecond().getQualifier()));
-        assertEquals("2", new String(p2.getSecond().getValue()));
+        assertEquals("cf1", new String(p2.getSecond().getFamily(), StandardCharsets.UTF_8));
+        assertEquals("item_count", new String(p2.getSecond().getQualifier(), StandardCharsets.UTF_8));
+        assertEquals("2", new String(p2.getSecond().getValue(), StandardCharsets.UTF_8));
     }
 }
diff --git a/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java b/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
index 94df3bc1e0..eb59d2024c 100644
--- a/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
@@ -24,6 +24,7 @@
 import java.nio.charset.Charset;
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.Locale;
 
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
@@ -84,11 +85,9 @@ protected Options getOptions() {
     protected void execute(OptionsHelper optionsHelper) throws Exception {
         String exportDest = optionsHelper.getOptionValue(options.getOption("destDir"));
         boolean shouldCompress = optionsHelper.hasOption(OPTION_COMPRESS)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_COMPRESS))
-                : true;
+                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_COMPRESS)) : true;
         boolean isSubmodule = optionsHelper.hasOption(OPTION_SUBMODULE)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_SUBMODULE))
-                : false;
+                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_SUBMODULE)) : false;
         packageType = optionsHelper.getOptionValue(OPTION_PACKAGETYPE);
 
         if (packageType == null)
@@ -102,8 +101,8 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
         }
 
         // create new folder to contain the output
-        String packageName = packageType.toLowerCase() + "_"
-                + new SimpleDateFormat("YYYY_MM_dd_HH_mm_ss").format(new Date());
+        String packageName = packageType.toLowerCase(Locale.ROOT) + "_"
+                + new SimpleDateFormat("YYYY_MM_dd_HH_mm_ss", Locale.ROOT).format(new Date());
         if (!isSubmodule && new File(exportDest).exists()) {
             exportDest = exportDest + packageName + "/";
         }
@@ -156,8 +155,8 @@ private void dumpBasicDiagInfo() throws IOException {
 
         StringBuilder basicSb = new StringBuilder();
         basicSb.append("MetaStoreID: ").append(ToolUtil.getMetaStoreId()).append("\n");
-        basicSb.append("PackageType: ").append(packageType.toUpperCase()).append("\n");
-        SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z");
+        basicSb.append("PackageType: ").append(packageType.toUpperCase(Locale.ROOT)).append("\n");
+        SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z", Locale.ROOT);
         basicSb.append("PackageTimestamp: ").append(format.format(new Date())).append("\n");
         basicSb.append("Host: ").append(ToolUtil.getHostName()).append("\n");
         FileUtils.writeStringToFile(new File(exportDir, "info"), basicSb.toString(), Charset.defaultCharset());
diff --git a/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
index c42254b904..e15007ee80 100644
--- a/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.tool;
 
+import java.util.Locale;
+
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.service.AclTableMigrationTool;
 
@@ -31,7 +33,7 @@ public static void main(String[] args) throws Exception {
         if (args == null || args.length != 1) {
             throw new IllegalArgumentException("Args num error");
         }
-        String cmd = args[0].toUpperCase();
+        String cmd = args[0].toUpperCase(Locale.ROOT);
         AclTableMigrationTool tool = new AclTableMigrationTool();
         switch (cmd) {
         case MIGRATE:
@@ -40,7 +42,8 @@ public static void main(String[] args) throws Exception {
         case CHECK:
             boolean needMigrate = tool.checkIfNeedMigrate(KylinConfig.getInstanceFromEnv());
             if (needMigrate) {
-                System.out.println("Found ACL metadata in legacy format. Please execute command : ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.AclTableMigrationCLI MIGRATE");
+                System.out.println(
+                        "Found ACL metadata in legacy format. Please execute command : ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.AclTableMigrationCLI MIGRATE");
                 System.exit(2);
             }
             break;
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
index 5426b627ea..6909b741d2 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
@@ -23,6 +23,7 @@
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -440,7 +441,7 @@ private void doOpt(Opt opt) throws IOException, InterruptedException {
         case COPY_DICT_OR_SNAPSHOT: {
             String item = (String) opt.params[0];
 
-            if (item.toLowerCase().endsWith(".dict")) {
+            if (item.toLowerCase(Locale.ROOT).endsWith(".dict")) {
                 DictionaryManager dstDictMgr = DictionaryManager.getInstance(dstConfig);
                 DictionaryManager srcDicMgr = DictionaryManager.getInstance(srcConfig);
                 DictionaryInfo dictSrc = srcDicMgr.getDictionaryInfo(item);
@@ -472,7 +473,7 @@ private void doOpt(Opt opt) throws IOException, InterruptedException {
                     logger.info("Item " + item + " is dup, instead " + dictSaved.getResourcePath() + " is reused");
                 }
 
-            } else if (item.toLowerCase().endsWith(".snapshot")) {
+            } else if (item.toLowerCase(Locale.ROOT).endsWith(".snapshot")) {
                 SnapshotManager dstSnapMgr = SnapshotManager.getInstance(dstConfig);
                 SnapshotManager srcSnapMgr = SnapshotManager.getInstance(srcConfig);
                 SnapshotTable snapSrc = srcSnapMgr.getSnapshotTable(item);
diff --git a/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java b/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
index cda3fec0a0..80af352d95 100644
--- a/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
@@ -23,6 +23,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.kylin.common.KylinConfig;
@@ -107,7 +108,8 @@ protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throw
         for (File logFile : requiredLogFiles) {
             logger.info("Log file:" + logFile.getAbsolutePath());
             if (logFile.exists()) {
-                String cmd = String.format("cp %s %s", logFile.getAbsolutePath(), exportDir.getAbsolutePath());
+                String cmd = String.format(Locale.ROOT, "cp %s %s", logFile.getAbsolutePath(), exportDir
+                    .getAbsolutePath());
                 config.getCliCommandExecutor().execute(cmd);
             }
         }
diff --git a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/CubeInstanceCreator.java b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/CubeInstanceCreator.java
index c1672c0b3b..0fcec3b607 100644
--- a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/CubeInstanceCreator.java
+++ b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/CubeInstanceCreator.java
@@ -42,7 +42,7 @@ public static void main(String[] args) throws Exception {
         CubeManager.CUBE_SERIALIZER.serialize(cubeInstance, dout);
         dout.close();
         buf.close();
-        System.out.println(buf.toString());
+        System.out.println(buf.toString("UTF-8"));
     }
 
     public static CubeInstance generateKylinCubeInstanceForMetricsQuery(String owner, KylinConfig config,
diff --git a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/HiveTableCreator.java b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/HiveTableCreator.java
index 35b296a682..d07cd080ab 100644
--- a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/HiveTableCreator.java
+++ b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/HiveTableCreator.java
@@ -20,6 +20,7 @@
 
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.metrics.lib.ActiveReservoirReporter;
@@ -264,7 +265,7 @@ public static HiveTypeEnum getByTypeName(String typeName) {
                 return null;
             }
             for (HiveTypeEnum hiveType : HiveTypeEnum.values()) {
-                if (hiveType.typeName.equals(typeName.toLowerCase())) {
+                if (hiveType.typeName.equals(typeName.toLowerCase(Locale.ROOT))) {
                     return hiveType;
                 }
             }
diff --git a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/KylinTableCreator.java b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/KylinTableCreator.java
index 108c2105dc..c84df4a8e5 100644
--- a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/KylinTableCreator.java
+++ b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/KylinTableCreator.java
@@ -47,7 +47,7 @@ public static void main(String[] args) throws Exception {
         TableMetadataManager.TABLE_SERIALIZER.serialize(kylinTable, dout);
         dout.close();
         buf.close();
-        System.out.println(buf.toString());
+        System.out.println(buf.toString("UTF-8"));
     }
 
     public static TableDesc generateKylinTableForMetricsQuery(KylinConfig kylinConfig, SinkTool sinkTool) {
diff --git a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/ModelCreator.java b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/ModelCreator.java
index 0679f0af15..cc94411f53 100644
--- a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/ModelCreator.java
+++ b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/ModelCreator.java
@@ -54,7 +54,7 @@ public static void main(String[] args) throws Exception {
         MODELDESC_SERIALIZER.serialize(kylinModel, dout);
         dout.close();
         buf.close();
-        System.out.println(buf.toString());
+        System.out.println(buf.toString("UTF-8"));
     }
 
     public static PartitionDesc getPartitionDesc(String tableName) {
diff --git a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/SCCreator.java b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/SCCreator.java
index 38edb1b2f5..8a6c98cd6f 100644
--- a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/SCCreator.java
+++ b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/SCCreator.java
@@ -24,8 +24,10 @@
 import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileWriter;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.List;
@@ -246,7 +248,7 @@ private void generateKylinProjectFileForSystemCube(String output, ProjectInstanc
         dout.close();
         buf.close();
 
-        saveToFile(fileName, buf.toString());
+        saveToFile(fileName, buf.toString("UTF-8"));
     }
 
     private void saveToFile(String fileName, String contents) throws IOException {
@@ -255,7 +257,8 @@ private void saveToFile(String fileName, String contents) throws IOException {
             parentDir.mkdirs();
         }
 
-        try (BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(fileName))) {
+        try (BufferedWriter bufferedWriter = new BufferedWriter(
+                new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8))) {
             bufferedWriter.append(contents);
         }
     }
diff --git a/tool/src/test/java/org/apache/kylin/tool/KylinConfigCLITest.java b/tool/src/test/java/org/apache/kylin/tool/KylinConfigCLITest.java
index feb404a6a6..e28e567cd1 100644
--- a/tool/src/test/java/org/apache/kylin/tool/KylinConfigCLITest.java
+++ b/tool/src/test/java/org/apache/kylin/tool/KylinConfigCLITest.java
@@ -37,7 +37,7 @@
     public void testGetProperty() throws IOException {
         PrintStream o = System.out;
         File f = File.createTempFile("cfg", ".tmp");
-        PrintStream tmpOut = new PrintStream(new FileOutputStream(f));
+        PrintStream tmpOut = new PrintStream(new FileOutputStream(f), false, "UTF-8");
         System.setOut(tmpOut);
         KylinConfigCLI.main(new String[] { "kylin.storage.url" });
 
@@ -52,13 +52,13 @@ public void testGetProperty() throws IOException {
     public void testGetPrefix() throws IOException {
         PrintStream o = System.out;
         File f = File.createTempFile("cfg", ".tmp");
-        PrintStream tmpOut = new PrintStream(new FileOutputStream(f));
+        PrintStream tmpOut = new PrintStream(new FileOutputStream(f), false, "UTF-8");
         System.setOut(tmpOut);
         KylinConfigCLI.main(new String[] { "kylin.cube.engine." });
 
         String val = FileUtils.readFileToString(f, Charset.defaultCharset()).trim();
-        assertEquals("2=org.apache.kylin.engine.mr.MRBatchCubingEngine2"
-                + System.lineSeparator() + "0=org.apache.kylin.engine.mr.MRBatchCubingEngine", val);
+        assertEquals("2=org.apache.kylin.engine.mr.MRBatchCubingEngine2" + System.lineSeparator()
+                + "0=org.apache.kylin.engine.mr.MRBatchCubingEngine", val);
 
         tmpOut.close();
         FileUtils.forceDelete(f);


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services