You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2018/11/30 11:48:51 UTC

[kylin] branch 2.5.x updated: KYLIN-3187 Forbid calls to JDK APIs that use the default locale

This is an automated email from the ASF dual-hosted git repository.

shaofengshi pushed a commit to branch 2.5.x
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/2.5.x by this push:
     new 09851df  KYLIN-3187 Forbid calls to JDK APIs that use the default locale
09851df is described below

commit 09851df665c38d4c30d3611808382baf1648d239
Author: etherge <et...@163.com>
AuthorDate: Thu Aug 30 21:20:09 2018 +0800

    KYLIN-3187 Forbid calls to JDK APIs that use the default locale
---
 .../test/java/org/apache/kylin/job/DeployUtil.java |  38 ++--
 .../job/streaming/StreamingTableDataGenerator.java |   5 +-
 .../kylin/common/BackwardCompatibilityConfig.java  |  20 +-
 .../org/apache/kylin/common/KylinConfigBase.java   |   6 +-
 .../kylin/common/lock/DistributedLockFactory.java  |   7 +-
 .../common/metrics/common/MetricsNameBuilder.java  |   8 +-
 .../common/metrics/metrics2/CodahaleMetrics.java   |   3 +-
 .../metrics/metrics2/JsonFileMetricsReporter.java  |   4 +-
 .../kylin/common/persistence/ResourceTool.java     |   7 +-
 .../common/persistence/RootPersistentEntity.java   |   7 +-
 .../apache/kylin/common/restclient/RestClient.java |  15 +-
 .../java/org/apache/kylin/common/util/Bytes.java   |  96 ++++++---
 .../org/apache/kylin/common/util/BytesUtil.java    |   3 +-
 .../kylin/common/util/CaseInsensitiveString.java   |   4 +-
 .../kylin/common/util/CliCommandExecutor.java      |   4 +-
 .../org/apache/kylin/common/util/EncryptUtil.java  |   6 +-
 .../apache/kylin/common/util/HiveCmdBuilder.java   |   5 +-
 .../org/apache/kylin/common/util/SSHClient.java    |  11 +-
 .../apache/kylin/common/util/SetThreadName.java    |   3 +-
 .../org/apache/kylin/common/util/StringUtil.java   |   3 +-
 .../org/apache/kylin/common/util/TimeUtil.java     |   9 +-
 .../org/apache/kylin/common/util/ZipFileUtils.java |   3 +-
 .../org/apache/kylin/common/util/BasicTest.java    |  25 +--
 .../kylin/common/util/InstallJarIntoMavenTest.java |  12 +-
 .../org/apache/kylin/common/util/TimeUtilTest.java |   7 +-
 .../java/org/apache/kylin/cube/CubeSegment.java    |   3 +-
 .../apache/kylin/cube/cuboid/CuboidModeEnum.java   |   4 +-
 .../cube/cuboid/algorithm/BPUSCalculator.java      |  16 +-
 .../cuboid/algorithm/generic/GeneticAlgorithm.java |  12 +-
 .../cuboid/algorithm/greedy/GreedyAlgorithm.java   |  26 +--
 .../java/org/apache/kylin/cube/model/CubeDesc.java |  98 +++++-----
 .../kylin/cube/model/CubeJoinedFlatTableDesc.java  |   5 +-
 .../apache/kylin/cube/model/DictionaryDesc.java    |   6 +-
 .../org/apache/kylin/cube/model/DimensionDesc.java |   3 +-
 .../apache/kylin/cube/model/HBaseMappingDesc.java  |  12 +-
 .../org/apache/kylin/cube/util/CubingUtils.java    |  18 +-
 .../java/org/apache/kylin/cube/CubeDescTest.java   |  12 +-
 .../kylin/cube/common/BytesSplitterTest.java       |  25 ++-
 .../kylin/cube/cuboid/TreeCuboidSchedulerTest.java |   8 +-
 .../gridtable/AggregationCacheMemSizeTest.java     |  51 ++---
 .../apache/kylin/dict/AppendTrieDictionary.java    |  46 +++--
 .../apache/kylin/dict/GlobalDictionaryBuilder.java |  11 +-
 .../org/apache/kylin/dict/ShrunkenDictionary.java  |   3 +-
 .../apache/kylin/dict/global/AppendDictNode.java   |  26 ++-
 .../apache/kylin/dict/global/AppendDictSlice.java  |  22 ++-
 .../kylin/dict/global/GlobalDictHDFSStore.java     |  18 +-
 .../dict/global/SegmentAppendTrieDictBuilder.java  |   7 +-
 .../kylin/dict/AppendTrieDictionaryTest.java       |  55 ++++--
 .../apache/kylin/dict/NumberDictionaryTest.java    |  20 +-
 .../kylin/dict/TrieDictionaryForestTest.java       |   5 +-
 .../java/org/apache/kylin/job/JoinedFlatTable.java |  10 +-
 .../apache/kylin/job/engine/JobEngineConfig.java   |   3 +-
 .../kylin/job/execution/ExecutableManager.java     |   3 +-
 .../job/impl/threadpool/DefaultScheduler.java      |   7 +-
 .../job/impl/threadpool/DistributedScheduler.java  |   7 +-
 .../apache/kylin/job/metrics/JobMetricsFacade.java |   4 +-
 .../apache/kylin/dimension/DictionaryDimEnc.java   |   4 +-
 .../apache/kylin/measure/MeasureTypeFactory.java   |   9 +-
 .../extendedcolumn/ExtendedColumnMeasureType.java  |  16 +-
 .../kylin/metadata/TableMetadataManager.java       |   5 +-
 .../kylin/metadata/datatype/BooleanSerializer.java |   6 +-
 .../apache/kylin/metadata/datatype/DataType.java   |   3 +-
 .../filter/BuiltInFunctionTupleFilter.java         |   3 +-
 .../metadata/filter/function/BuiltInMethod.java    |   5 +-
 .../kylin/metadata/filter/function/Functions.java  |   3 +-
 .../metadata/filter/function/LikeMatchers.java     |   3 +-
 .../apache/kylin/metadata/model/ColumnDesc.java    |   3 +-
 .../apache/kylin/metadata/model/DataModelDesc.java |  13 +-
 .../apache/kylin/metadata/model/DatabaseDesc.java  |   3 +-
 .../kylin/metadata/model/ExternalFilterDesc.java   |   3 +-
 .../apache/kylin/metadata/model/FunctionDesc.java  |   5 +-
 .../kylin/metadata/model/ModelDimensionDesc.java   |   3 +-
 .../apache/kylin/metadata/model/PartitionDesc.java |  12 +-
 .../org/apache/kylin/metadata/model/TableDesc.java |  18 +-
 .../apache/kylin/metadata/model/TableExtDesc.java  |   3 +-
 .../org/apache/kylin/metadata/model/TblColRef.java |   5 +-
 .../kylin/metadata/model/tool/CalciteParser.java   |   3 +-
 .../kylin/metadata/project/ProjectInstance.java    |   7 +-
 .../kylin/metadata/project/ProjectManager.java     |  38 ++--
 .../source/adhocquery/HivePushDownConverter.java   |   5 +-
 .../kylin/source/datagen/ColumnGenerator.java      |   4 +-
 .../kylin/source/datagen/ModelDataGenerator.java   |  38 ++--
 .../kylin/measure/AggregatorMemEstimateTest.java   |  16 +-
 .../ExtendedColumnSerializerTest.java              |   5 +-
 .../apache/kylin/measure/topn/TopNCounterTest.java |  40 ++--
 .../org/apache/kylin/metrics/MetricsManager.java   |   1 +
 .../apache/kylin/metrics/lib/impl/RecordEvent.java |   3 +-
 .../metrics/lib/impl/RecordEventTimeDetail.java    |  11 +-
 .../kylin/metrics/lib/impl/TimePropertyEnum.java   |   4 +-
 .../kylin/metrics/property/JobPropertyEnum.java    |   4 +-
 .../metrics/property/QueryCubePropertyEnum.java    |   4 +-
 .../kylin/metrics/property/QueryPropertyEnum.java  |   4 +-
 .../metrics/property/QueryRPCPropertyEnum.java     |   4 +-
 dev-support/signatures.txt                         |  25 +++
 .../mr/BatchOptimizeJobCheckpointBuilder.java      |   3 +-
 .../java/org/apache/kylin/engine/mr/CubingJob.java |   8 +-
 .../kylin/engine/mr/LookupSnapshotBuildJob.java    |   3 +-
 .../kylin/engine/mr/common/CubeStatsReader.java    |  10 +-
 .../apache/kylin/engine/mr/steps/CuboidJob.java    |  10 +-
 .../kylin/engine/mr/steps/CuboidReducer.java       |   3 +-
 .../mr/steps/FactDistinctColumnsReducer.java       |  36 ++--
 .../mr/steps/FilterRecommendCuboidDataJob.java     |   3 +-
 .../mr/steps/InMemCuboidFromBaseCuboidJob.java     |   4 +-
 .../kylin/engine/mr/steps/InMemCuboidJob.java      |   3 +-
 .../kylin/engine/mr/steps/InMemCuboidReducer.java  |   3 +-
 .../kylin/engine/mr/steps/MergeCuboidJob.java      |   3 +-
 .../engine/mr/steps/SelfDefineSortableKey.java     |  11 +-
 .../mr/steps/UpdateCubeInfoAfterBuildStep.java     |  21 +-
 .../engine/mr/steps/UpdateOldCuboidShardJob.java   |   3 +-
 .../kylin/engine/mr/SortedColumnReaderTest.java    |  16 +-
 .../kylin/engine/spark/SparkCubingByLayer.java     |  36 ++--
 .../apache/kylin/engine/spark/SparkExecutable.java |  20 +-
 .../kylin/engine/spark/SparkFactDistinct.java      | 105 +++++-----
 .../java/org/apache/kylin/jdbc/KylinClient.java    |   6 +-
 .../apache/kylin/cube/ITDictionaryManagerTest.java |  10 +-
 .../cube/cuboid/algorithm/ITAlgorithmTestBase.java |   7 +-
 .../kylin/provision/BuildCubeWithEngine.java       |  33 ++--
 .../kylin/provision/BuildCubeWithStream.java       |   3 +-
 .../java/org/apache/kylin/provision/MockKafka.java |  13 +-
 .../java/org/apache/kylin/query/H2Database.java    |   7 +-
 .../java/org/apache/kylin/query/KylinTestBase.java |   7 +-
 .../kylin/metrics/lib/impl/hive/HiveProducer.java  |   7 +-
 .../kylin/query/enumerator/HiveEnumerator.java     |   3 +-
 .../kylin/query/schema/OLAPSchemaFactory.java      |   3 +-
 .../kylin/query/security/QueryInterceptor.java     |   3 +-
 .../apache/kylin/query/security/TableLevelACL.java |   9 +-
 .../org/apache/kylin/query/util/QueryUtil.java     |   7 +-
 .../apache/kylin/query/util/TempStatementUtil.java |   8 +-
 .../kylin/rest/controller/AdminController.java     |   3 +-
 .../kylin/rest/controller/BasicController.java     |   7 +-
 .../kylin/rest/controller/CubeController.java      |  13 +-
 .../kylin/rest/controller/ModelController.java     |   8 +-
 .../kylin/rest/controller/ProjectController.java   |   4 +-
 .../kylin/rest/controller/QueryController.java     |   6 +-
 .../kylin/rest/controller/TableController.java     |  35 ++--
 .../kylin/rest/metrics/QueryMetricsFacade.java     |  16 +-
 .../kylin/rest/security/AuthoritiesPopulator.java  |   7 +-
 .../rest/security/KylinAclPermissionEvaluator.java |   5 +-
 .../org/apache/kylin/rest/security/MockHTable.java |  79 +++++---
 .../security/PasswordPlaceholderConfigurer.java    |   3 +-
 .../org/apache/kylin/rest/service/AclService.java  |  13 +-
 .../kylin/rest/service/AclTableMigrationTool.java  |   7 +-
 .../apache/kylin/rest/service/AdminService.java    |   8 +-
 .../org/apache/kylin/rest/service/CubeService.java |  86 +++++----
 .../kylin/rest/service/DashboardService.java       |  87 +++++----
 .../kylin/rest/service/DiagnosisService.java       |   9 +-
 .../apache/kylin/rest/service/EncodingService.java |  12 +-
 .../kylin/rest/service/ExtFilterService.java       |   5 +-
 .../org/apache/kylin/rest/service/JobService.java  |  48 +++--
 .../kylin/rest/service/KafkaConfigService.java     |   7 +-
 .../kylin/rest/service/KylinUserService.java       |   7 +-
 .../apache/kylin/rest/service/ModelService.java    |  16 +-
 .../apache/kylin/rest/service/ProjectService.java  |   5 +-
 .../apache/kylin/rest/service/QueryService.java    |  68 ++++---
 .../kylin/rest/service/StreamingService.java       |   7 +-
 .../rest/service/TableSchemaUpdateChecker.java     |  45 +++--
 .../apache/kylin/rest/service/TableService.java    |  31 +--
 .../org/apache/kylin/rest/bean/BeanValidator.java  |  56 +++---
 .../rest/controller/StreamingControllerTest.java   |  33 ++--
 .../org/apache/kylin/source/hive/DBConnConf.java   |   4 +-
 .../apache/kylin/source/hive/HiveInputBase.java    |   3 +-
 .../kylin/source/hive/HiveMetadataExplorer.java    |  27 +--
 .../HiveColumnCardinalityUpdateJob.java            |  13 +-
 .../java/org/apache/kylin/source/jdbc/CmdStep.java |   3 +-
 .../org/apache/kylin/source/jdbc/JdbcExplorer.java |  42 ++--
 .../apache/kylin/source/jdbc/JdbcHiveMRInput.java  |  19 +-
 .../org/apache/kylin/source/jdbc/JdbcTable.java    |   9 +-
 .../apache/kylin/source/jdbc/JdbcTableReader.java  |   5 +-
 .../source/jdbc/metadata/DefaultJdbcMetadata.java  |   3 +-
 .../source/jdbc/metadata/JdbcMetadataFactory.java  |   3 +-
 .../apache/kylin/source/jdbc/JdbcExplorerTest.java |   5 +-
 .../apache/kylin/source/kafka/KafkaMRInput.java    |   8 +-
 .../apache/kylin/source/kafka/KafkaSparkInput.java |   5 +-
 .../kylin/source/kafka/TimedJsonStreamParser.java  |  11 +-
 .../kafka/hadoop/KafkaInputRecordReader.java       |  17 +-
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java        |  43 +++--
 .../hbase/lookup/LookupTableToHFileJob.java        |   3 +-
 .../kylin/storage/hbase/steps/CreateHTableJob.java |  22 ++-
 .../kylin/storage/hbase/steps/CubeHTableUtil.java  |   3 +-
 .../storage/hbase/steps/HFileOutputFormat3.java    | 215 ++++++++-------------
 .../kylin/storage/hbase/util/CleanHtableCLI.java   |   3 +-
 .../kylin/storage/hbase/util/CubeMigrationCLI.java |   5 +-
 .../storage/hbase/util/DeployCoprocessorCLI.java   |   3 +-
 .../hbase/util/GridTableHBaseBenchmark.java        |  11 +-
 .../storage/hbase/util/HbaseStreamingInput.java    |  14 +-
 .../storage/hbase/util/UpdateHTableHostCLI.java    |   7 +-
 .../storage/hbase/common/HiveJDBCClientTest.java   |  12 +-
 .../hbase/common/coprocessor/FilterBaseTest.java   |   8 +-
 .../storage/hbase/steps/CubeHFileMapperTest.java   |  13 +-
 .../apache/kylin/tool/AbstractInfoExtractor.java   |  15 +-
 .../apache/kylin/tool/AclTableMigrationCLI.java    |   7 +-
 .../org/apache/kylin/tool/CubeMigrationCLI.java    |   5 +-
 .../org/apache/kylin/tool/KylinLogExtractor.java   |   4 +-
 .../metrics/systemcube/CubeInstanceCreator.java    |   2 +-
 .../tool/metrics/systemcube/HiveTableCreator.java  |   3 +-
 .../tool/metrics/systemcube/KylinTableCreator.java |   2 +-
 .../tool/metrics/systemcube/ModelCreator.java      |   2 +-
 .../kylin/tool/metrics/systemcube/SCCreator.java   |   9 +-
 .../org/apache/kylin/tool/KylinConfigCLITest.java  |   8 +-
 199 files changed, 1748 insertions(+), 1209 deletions(-)

diff --git a/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
index 524c2e4..cda850f 100644
--- a/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
+++ b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
@@ -22,12 +22,14 @@ import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
-import java.io.FileReader;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.InputStreamReader;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
 import org.apache.commons.io.IOUtils;
@@ -76,7 +78,7 @@ public class DeployUtil {
             CubeDescManager.getInstance(config()).updateCubeDesc(cube.getDescriptor());//enforce signature updating
         }
     }
-    
+
     public static void deployMetadata() throws IOException {
         deployMetadata(LocalFileMetadataTestCase.LOCALMETA_TEST_DATA);
     }
@@ -92,7 +94,8 @@ public class DeployUtil {
     private static String getPomVersion() {
         try {
             MavenXpp3Reader pomReader = new MavenXpp3Reader();
-            Model model = pomReader.read(new FileReader("../pom.xml"));
+            Model model = pomReader
+                    .read(new InputStreamReader(new FileInputStream("../pom.xml"), StandardCharsets.UTF_8));
             return model.getVersion();
         } catch (Exception e) {
             throw new RuntimeException(e.getMessage(), e);
@@ -138,9 +141,11 @@ public class DeployUtil {
         deployTables(modelName);
     }
 
-    public static void prepareTestDataForStreamingCube(long startTime, long endTime, int numberOfRecords, String cubeName, StreamDataLoader streamDataLoader) throws IOException {
+    public static void prepareTestDataForStreamingCube(long startTime, long endTime, int numberOfRecords,
+            String cubeName, StreamDataLoader streamDataLoader) throws IOException {
         CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
-        List<String> data = StreamingTableDataGenerator.generate(numberOfRecords, startTime, endTime, cubeInstance.getRootFactTable(), cubeInstance.getProject());
+        List<String> data = StreamingTableDataGenerator.generate(numberOfRecords, startTime, endTime,
+                cubeInstance.getRootFactTable(), cubeInstance.getProject());
         //load into kafka
         streamDataLoader.loadIntoKafka(data);
         logger.info("Write {} messages into {}", data.size(), streamDataLoader.toString());
@@ -151,7 +156,8 @@ public class DeployUtil {
         TimedJsonStreamParser timedJsonStreamParser = new TimedJsonStreamParser(tableColumns, null);
         StringBuilder sb = new StringBuilder();
         for (String json : data) {
-            List<String> rowColumns = timedJsonStreamParser.parse(ByteBuffer.wrap(json.getBytes())).get(0).getData();
+            List<String> rowColumns = timedJsonStreamParser
+                    .parse(ByteBuffer.wrap(json.getBytes(StandardCharsets.UTF_8))).get(0).getData();
             sb.append(StringUtils.join(rowColumns, ","));
             sb.append(System.getProperty("line.separator"));
         }
@@ -200,26 +206,26 @@ public class DeployUtil {
 
         Set<TableRef> tables = model.getAllTables();
         Set<String> TABLE_NAMES = new HashSet<String>();
-        for (TableRef tr:tables){
-            if (!tr.getTableDesc().isView()){
+        for (TableRef tr : tables) {
+            if (!tr.getTableDesc().isView()) {
                 String tableName = tr.getTableName();
                 String schema = tr.getTableDesc().getDatabase();
-                String identity = String.format("%s.%s", schema, tableName);
+                String identity = String.format(Locale.ROOT, "%s.%s", schema, tableName);
                 TABLE_NAMES.add(identity);
             }
         }
         TABLE_NAMES.add(TABLE_SELLER_TYPE_DIM_TABLE); // the wrapper view VIEW_SELLER_TYPE_DIM need this table
-        
+
         // scp data files, use the data from hbase, instead of local files
         File tempDir = Files.createTempDir();
         String tempDirAbsPath = tempDir.getAbsolutePath();
         for (String tablename : TABLE_NAMES) {
-            tablename = tablename.toUpperCase();
+            tablename = tablename.toUpperCase(Locale.ROOT);
 
             File localBufferFile = new File(tempDirAbsPath + "/" + tablename + ".csv");
             localBufferFile.createNewFile();
 
-            logger.info(String.format("get resource from hbase:/data/%s.csv", tablename));
+            logger.info(String.format(Locale.ROOT, "get resource from hbase:/data/%s.csv", tablename));
             InputStream hbaseDataStream = metaMgr.getStore().getResource("/data/" + tablename + ".csv").inputStream;
             FileOutputStream localFileStream = new FileOutputStream(localBufferFile);
             IOUtils.copy(hbaseDataStream, localFileStream);
@@ -233,21 +239,21 @@ public class DeployUtil {
 
         ISampleDataDeployer sampleDataDeployer = SourceManager.getSource(model.getRootFactTable().getTableDesc())
                 .getSampleDataDeployer();
-        
+
         // create hive tables
         sampleDataDeployer.createSampleDatabase("EDW");
         for (String tablename : TABLE_NAMES) {
-            logger.info(String.format("get table desc %s", tablename));
+            logger.info(String.format(Locale.ROOT, "get table desc %s", tablename));
             sampleDataDeployer.createSampleTable(metaMgr.getTableDesc(tablename, model.getProject()));
         }
 
         // load data to hive tables
         // LOAD DATA LOCAL INPATH 'filepath' [OVERWRITE] INTO TABLE tablename
         for (String tablename : TABLE_NAMES) {
-            logger.info(String.format("load data into %s", tablename));
+            logger.info(String.format(Locale.ROOT, "load data into %s", tablename));
             sampleDataDeployer.loadSampleData(tablename, tempDirAbsPath);
         }
-        
+
         // create the view automatically here
         sampleDataDeployer.createWrapperView(TABLE_SELLER_TYPE_DIM_TABLE, VIEW_SELLER_TYPE_DIM);
     }
diff --git a/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java b/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
index 1fef07e..59b6ee6 100644
--- a/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
+++ b/assembly/src/test/java/org/apache/kylin/job/streaming/StreamingTableDataGenerator.java
@@ -20,6 +20,7 @@ package org.apache.kylin.job.streaming;
 
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
 
 import org.apache.kylin.common.KylinConfig;
@@ -66,7 +67,7 @@ public class StreamingTableDataGenerator {
             kvs.clear();
             kvs.put("timestamp", String.valueOf(time));
             for (ColumnDesc columnDesc : tableDesc.getColumns()) {
-                String lowerCaseColumnName = columnDesc.getName().toLowerCase();
+                String lowerCaseColumnName = columnDesc.getName().toLowerCase(Locale.ROOT);
                 DataType dataType = columnDesc.getType();
                 if (dataType.isDateTimeFamily()) {
                     //TimedJsonStreamParser will derived minute_start,hour_start,day_start from timestamp
@@ -78,7 +79,7 @@ public class StreamingTableDataGenerator {
                     int v = r.nextInt(10000);
                     kvs.put(lowerCaseColumnName, String.valueOf(v));
                 } else if (dataType.isNumberFamily()) {
-                    String v = String.format("%.4f", r.nextDouble() * 100);
+                    String v = String.format(Locale.ROOT, "%.4f", r.nextDouble() * 100);
                     kvs.put(lowerCaseColumnName, v);
                 }
             }
diff --git a/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java b/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
index 426ebb9..cbe2e4c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/BackwardCompatibilityConfig.java
@@ -73,13 +73,14 @@ public class BackwardCompatibilityConfig {
         for (Entry<Object, Object> kv : props.entrySet()) {
             String key = (String) kv.getKey();
             String value = (String) kv.getValue();
-            
+
             if (key.equals(value))
                 continue; // no change
-            
+
             if (value.contains(key))
-                throw new IllegalStateException("New key '" + value + "' contains old key '" + key + "' causes trouble to repeated find & replace");
-            
+                throw new IllegalStateException("New key '" + value + "' contains old key '" + key
+                        + "' causes trouble to repeated find & replace");
+
             if (value.endsWith("."))
                 old2newPrefix.put(key, value);
             else
@@ -122,7 +123,7 @@ public class BackwardCompatibilityConfig {
         return result;
     }
 
-    public OrderedProperties check(OrderedProperties props){
+    public OrderedProperties check(OrderedProperties props) {
         OrderedProperties result = new OrderedProperties();
         for (Entry<String, String> kv : props.entrySet()) {
             result.setProperty(check(kv.getKey()), kv.getValue());
@@ -147,7 +148,7 @@ public class BackwardCompatibilityConfig {
         // generate sed file
         File sedFile = new File(outputDir, "upgrade-old-config.sed");
         try {
-            out = new PrintWriter(sedFile);
+            out = new PrintWriter(sedFile, "UTF-8");
             for (Entry<String, String> e : bcc.old2new.entrySet()) {
                 out.println("s/" + quote(e.getKey()) + "/" + e.getValue() + "/g");
             }
@@ -161,7 +162,7 @@ public class BackwardCompatibilityConfig {
         // generate sh file
         File shFile = new File(outputDir, "upgrade-old-config.sh");
         try {
-            out = new PrintWriter(shFile);
+            out = new PrintWriter(shFile, "UTF-8");
             out.println("#!/bin/bash");
             Stack<File> stack = new Stack<>();
             stack.push(repoDir);
@@ -180,7 +181,7 @@ public class BackwardCompatibilityConfig {
         } finally {
             IOUtils.closeQuietly(out);
         }
-        
+
         System.out.println("Files generated:");
         System.out.println(shFile);
         System.out.println(sedFile);
@@ -211,6 +212,7 @@ public class BackwardCompatibilityConfig {
         else if (name.endsWith("-site.xml"))
             return false;
         else
-            return name.endsWith(".java") || name.endsWith(".js") || name.endsWith(".sh") || name.endsWith(".properties") || name.endsWith(".xml");
+            return name.endsWith(".java") || name.endsWith(".js") || name.endsWith(".sh")
+                    || name.endsWith(".properties") || name.endsWith(".xml");
     }
 }
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 0041402..a43377d 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.io.Serializable;
 import java.util.Collection;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
@@ -773,7 +774,7 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public String getOverrideHiveTableLocation(String table) {
-        return getOptional("kylin.source.hive.table-location." + table.toUpperCase());
+        return getOptional("kylin.source.hive.table-location." + table.toUpperCase(Locale.ROOT));
     }
 
     public boolean isHiveKeepFlatTable() {
@@ -1173,7 +1174,6 @@ abstract public class KylinConfigBase implements Serializable {
         return getPropertiesByPrefix("kylin.engine.spark-conf-" + configName + ".");
     }
 
-
     public double getDefaultHadoopJobReducerInputMB() {
         return Double.parseDouble(getOptional("kylin.engine.mr.reduce-input-mb", "500"));
     }
@@ -1708,7 +1708,7 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public String getKylinMetricsPrefix() {
-        return getOptional("kylin.metrics.prefix", "KYLIN").toUpperCase();
+        return getOptional("kylin.metrics.prefix", "KYLIN").toUpperCase(Locale.ROOT);
     }
 
     public String getKylinMetricsActiveReservoirDefaultClass() {
diff --git a/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java b/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java
index cd1c2b1..e03d0e8 100644
--- a/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java
+++ b/core-common/src/main/java/org/apache/kylin/common/lock/DistributedLockFactory.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.common.lock;
 
 import java.lang.management.ManagementFactory;
+import java.nio.charset.StandardCharsets;
 
 public abstract class DistributedLockFactory {
 
@@ -35,9 +36,9 @@ public abstract class DistributedLockFactory {
     private static String threadProcessAndHost() {
         return Thread.currentThread().getId() + "-" + processAndHost();
     }
-    
+
     private static String processAndHost() {
-        byte[] bytes = ManagementFactory.getRuntimeMXBean().getName().getBytes();
-        return new String(bytes);
+        byte[] bytes = ManagementFactory.getRuntimeMXBean().getName().getBytes(StandardCharsets.UTF_8);
+        return new String(bytes, StandardCharsets.UTF_8);
     }
 }
diff --git a/core-common/src/main/java/org/apache/kylin/common/metrics/common/MetricsNameBuilder.java b/core-common/src/main/java/org/apache/kylin/common/metrics/common/MetricsNameBuilder.java
index 656a478..8e33152 100644
--- a/core-common/src/main/java/org/apache/kylin/common/metrics/common/MetricsNameBuilder.java
+++ b/core-common/src/main/java/org/apache/kylin/common/metrics/common/MetricsNameBuilder.java
@@ -18,21 +18,23 @@
 
 package org.apache.kylin.common.metrics.common;
 
+import java.util.Locale;
+
 public final class MetricsNameBuilder {
     public final static String METRICS = "metrics:";
     public final static String PROJECT_TEMPLATE = METRICS + "project=%s";
     public final static String CUBE_TEMPLATE = METRICS + "project=%s,cube=%s";
 
     public static String buildMetricName(String prefix, String name) {
-        return String.format(prefix + ",name=%s", name);
+        return String.format(Locale.ROOT, prefix + ",name=%s", name);
     }
 
     public static String buildCubeMetricPrefix(String project) {
-        return String.format(PROJECT_TEMPLATE, project);
+        return String.format(Locale.ROOT, PROJECT_TEMPLATE, project);
     }
 
     public static String buildCubeMetricPrefix(String project, String cube) {
-        return String.format(CUBE_TEMPLATE, project, cube);
+        return String.format(Locale.ROOT, CUBE_TEMPLATE, project, cube);
     }
 
 }
diff --git a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
index e728759..7487930 100644
--- a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
+++ b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/CodahaleMetrics.java
@@ -24,6 +24,7 @@ import java.lang.reflect.InvocationTargetException;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
@@ -450,7 +451,7 @@ public class CodahaleMetrics implements Metrics {
         MetricsReporting reporter = null;
         for (String metricsReportingName : metricsReporterNames) {
             try {
-                reporter = MetricsReporting.valueOf(metricsReportingName.trim().toUpperCase());
+                reporter = MetricsReporting.valueOf(metricsReportingName.trim().toUpperCase(Locale.ROOT));
             } catch (IllegalArgumentException e) {
                 LOGGER.error("Invalid reporter name " + metricsReportingName, e);
                 throw e;
diff --git a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/JsonFileMetricsReporter.java b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/JsonFileMetricsReporter.java
index 7bacaf1..95c5116 100644
--- a/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/JsonFileMetricsReporter.java
+++ b/core-common/src/main/java/org/apache/kylin/common/metrics/metrics2/JsonFileMetricsReporter.java
@@ -22,6 +22,7 @@ import java.io.BufferedWriter;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
@@ -100,7 +101,8 @@ public class JsonFileMetricsReporter implements CodahaleReporter {
                     BufferedWriter bw = null;
                     try {
                         fs.delete(tmpPath, true);
-                        bw = new BufferedWriter(new OutputStreamWriter(fs.create(tmpPath, true)));
+                        bw = new BufferedWriter(
+                                new OutputStreamWriter(fs.create(tmpPath, true), StandardCharsets.UTF_8));
                         bw.write(json);
                         fs.setPermission(tmpPath, FsPermission.createImmutable((short) 0644));
                     } catch (IOException e) {
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
index 450eb57..ca38091 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceTool.java
@@ -22,6 +22,7 @@ import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.NavigableSet;
 import java.util.Set;
@@ -141,7 +142,7 @@ public class ResourceTool {
         StringBuffer sb = new StringBuffer();
         String line;
         try {
-            br = new BufferedReader(new InputStreamReader(is));
+            br = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));
             while ((line = br.readLine()) != null) {
                 System.out.println(line);
                 sb.append(line).append('\n');
@@ -202,8 +203,8 @@ public class ResourceTool {
         copy(srcConfig, dstConfig, "/", copyImmutableResource);
     }
 
-    public static void copyR(ResourceStore src, ResourceStore dst, String path, TreeSet<String> pathsSkipChildrenCheck, boolean copyImmutableResource)
-            throws IOException {
+    public static void copyR(ResourceStore src, ResourceStore dst, String path, TreeSet<String> pathsSkipChildrenCheck,
+            boolean copyImmutableResource) throws IOException {
 
         if (!copyImmutableResource && IMMUTABLE_PREFIX.contains(path)) {
             return;
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java b/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java
index a4877f2..a8c6142 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/RootPersistentEntity.java
@@ -21,6 +21,7 @@ package org.apache.kylin.common.persistence;
 import java.io.Serializable;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.Locale;
 
 import org.apache.commons.lang.time.FastDateFormat;
 import org.apache.kylin.common.KylinVersion;
@@ -45,7 +46,7 @@ abstract public class RootPersistentEntity implements AclEntity, Serializable {
 
     static final String DATE_PATTERN = "yyyy-MM-dd HH:mm:ss z";
     static FastDateFormat format = FastDateFormat.getInstance(DATE_PATTERN);
-    static DateFormat df = new SimpleDateFormat(DATE_PATTERN);
+    static DateFormat df = new SimpleDateFormat(DATE_PATTERN, Locale.ROOT);
 
     public static String formatTime(long millis) {
         return format.format(millis);
@@ -58,7 +59,7 @@ abstract public class RootPersistentEntity implements AclEntity, Serializable {
 
     @JsonProperty("last_modified")
     protected long lastModified;
-    
+
     // if cached and shared, the object MUST NOT be modified (call setXXX() for example)
     protected boolean isCachedAndShared = false;
 
@@ -101,7 +102,7 @@ abstract public class RootPersistentEntity implements AclEntity, Serializable {
     public void updateRandomUuid() {
         setUuid(RandomUtil.randomUUID().toString());
     }
-    
+
     public boolean isCachedAndShared() {
         return isCachedAndShared;
     }
diff --git a/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java b/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
index 11284f6..9f95a7c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
+++ b/core-common/src/main/java/org/apache/kylin/common/restclient/RestClient.java
@@ -22,6 +22,7 @@ import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.regex.Matcher;
@@ -283,7 +284,8 @@ public class RestClient {
         return response;
     }
 
-    public void clearCacheForCubeMigration(String cube, String project, String model, Map<String, String> tableToProjects) throws IOException{
+    public void clearCacheForCubeMigration(String cube, String project, String model,
+            Map<String, String> tableToProjects) throws IOException {
         String url = baseUrl + "/cache/migration";
         HttpPost post = new HttpPost(url);
 
@@ -309,7 +311,8 @@ public class RestClient {
         HttpResponse response = client.execute(put);
         getContent(response);
         if (response.getStatusLine().getStatusCode() != 200) {
-            throw new IOException("Invalid response " + response.getStatusLine().getStatusCode() + " with url " + url + "\n");
+            throw new IOException(
+                    "Invalid response " + response.getStatusLine().getStatusCode() + " with url " + url + "\n");
         }
     }
 
@@ -319,7 +322,8 @@ public class RestClient {
         HttpResponse response = client.execute(get);
         String content = getContent(response);
         if (response.getStatusLine().getStatusCode() != 200) {
-            throw new IOException("Invalid response " + response.getStatusLine().getStatusCode() + " with url " + url + "\n");
+            throw new IOException(
+                    "Invalid response " + response.getStatusLine().getStatusCode() + " with url " + url + "\n");
         }
         return content;
     }
@@ -336,7 +340,8 @@ public class RestClient {
     private void addHttpHeaders(HttpRequestBase method) {
         method.addHeader("Accept", "application/json, text/plain, */*");
         method.addHeader("Content-Type", "application/json");
-        String basicAuth = DatatypeConverter.printBase64Binary((this.userName + ":" + this.password).getBytes());
+        String basicAuth = DatatypeConverter
+                .printBase64Binary((this.userName + ":" + this.password).getBytes(StandardCharsets.UTF_8));
         method.addHeader("Authorization", "Basic " + basicAuth);
     }
 
@@ -384,7 +389,7 @@ public class RestClient {
         BufferedReader rd = null;
         StringBuffer result = new StringBuffer();
         try {
-            reader = new InputStreamReader(response.getEntity().getContent());
+            reader = new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8);
             rd = new BufferedReader(reader);
             String line = null;
             while ((line = rd.readLine()) != null) {
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java b/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java
index fc501ea..33fc31a 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/Bytes.java
@@ -37,6 +37,7 @@ import java.security.SecureRandom;
 import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -284,10 +285,11 @@ public class Bytes {
             len = b.length - off;
         for (int i = off; i < off + len; ++i) {
             int ch = b[i] & 0xFF;
-            if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z') || " `~!@#$%^&*()-_=+[]{}|;:'\",.<>/?".indexOf(ch) >= 0) {
+            if ((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z') || (ch >= 'a' && ch <= 'z')
+                    || " `~!@#$%^&*()-_=+[]{}|;:'\",.<>/?".indexOf(ch) >= 0) {
                 result.append((char) ch);
             } else {
-                result.append(String.format("\\x%02X", ch));
+                result.append(String.format(Locale.ROOT, "\\x%02X", ch));
             }
         }
         return result.toString();
@@ -441,12 +443,14 @@ public class Bytes {
         }
     }
 
-    private static IllegalArgumentException explainWrongLengthOrOffset(final byte[] bytes, final int offset, final int length, final int expectedLength) {
+    private static IllegalArgumentException explainWrongLengthOrOffset(final byte[] bytes, final int offset,
+            final int length, final int expectedLength) {
         String reason;
         if (length != expectedLength) {
             reason = "Wrong length: " + length + ", expected " + expectedLength;
         } else {
-            reason = "offset (" + offset + ") + length (" + length + ") exceed the" + " capacity of the array: " + bytes.length;
+            reason = "offset (" + offset + ") + length (" + length + ") exceed the" + " capacity of the array: "
+                    + bytes.length;
         }
         return new IllegalArgumentException(reason);
     }
@@ -463,7 +467,8 @@ public class Bytes {
      */
     public static int putLong(byte[] bytes, int offset, long val) {
         if (bytes.length - offset < SIZEOF_LONG) {
-            throw new IllegalArgumentException("Not enough room to put a long at" + " offset " + offset + " in a " + bytes.length + " byte array");
+            throw new IllegalArgumentException(
+                    "Not enough room to put a long at" + " offset " + offset + " in a " + bytes.length + " byte array");
         }
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.isAvailable()) {
             return putLongUnsafe(bytes, offset, val);
@@ -489,7 +494,10 @@ public class Bytes {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
             val = Long.reverseBytes(val);
         }
-        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putLong(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET, val);
+        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putLong(bytes,
+                (long) offset
+                        + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET,
+                val);
         return offset + SIZEOF_LONG;
     }
 
@@ -645,9 +653,14 @@ public class Bytes {
      */
     public static int toIntUnsafe(byte[] bytes, int offset) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
-            return Integer.reverseBytes(org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getInt(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
+            return Integer.reverseBytes(
+                    org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                            .getInt(bytes, (long) offset
+                                    + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
         } else {
-            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getInt(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
+            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                    .getInt(bytes, (long) offset
+                            + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
         }
     }
 
@@ -660,9 +673,14 @@ public class Bytes {
      */
     public static short toShortUnsafe(byte[] bytes, int offset) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
-            return Short.reverseBytes(org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getShort(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
+            return Short.reverseBytes(
+                    org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                            .getShort(bytes, (long) offset
+                                    + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
         } else {
-            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getShort(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
+            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                    .getShort(bytes, (long) offset
+                            + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
         }
     }
 
@@ -675,9 +693,14 @@ public class Bytes {
      */
     public static long toLongUnsafe(byte[] bytes, int offset) {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
-            return Long.reverseBytes(org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getLong(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
+            return Long.reverseBytes(
+                    org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                            .getLong(bytes, (long) offset
+                                    + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET));
         } else {
-            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.getLong(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
+            return org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe
+                    .getLong(bytes, (long) offset
+                            + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET);
         }
     }
 
@@ -693,7 +716,8 @@ public class Bytes {
      */
     public static int readAsInt(byte[] bytes, int offset, final int length) {
         if (offset + length > bytes.length) {
-            throw new IllegalArgumentException("offset (" + offset + ") + length (" + length + ") exceed the" + " capacity of the array: " + bytes.length);
+            throw new IllegalArgumentException("offset (" + offset + ") + length (" + length + ") exceed the"
+                    + " capacity of the array: " + bytes.length);
         }
         int n = 0;
         for (int i = offset; i < (offset + length); i++) {
@@ -715,7 +739,8 @@ public class Bytes {
      */
     public static int putInt(byte[] bytes, int offset, int val) {
         if (bytes.length - offset < SIZEOF_INT) {
-            throw new IllegalArgumentException("Not enough room to put an int at" + " offset " + offset + " in a " + bytes.length + " byte array");
+            throw new IllegalArgumentException(
+                    "Not enough room to put an int at" + " offset " + offset + " in a " + bytes.length + " byte array");
         }
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.isAvailable()) {
             return putIntUnsafe(bytes, offset, val);
@@ -741,7 +766,10 @@ public class Bytes {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
             val = Integer.reverseBytes(val);
         }
-        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putInt(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET, val);
+        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putInt(bytes,
+                (long) offset
+                        + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET,
+                val);
         return offset + SIZEOF_INT;
     }
 
@@ -830,7 +858,8 @@ public class Bytes {
      */
     public static int putShort(byte[] bytes, int offset, short val) {
         if (bytes.length - offset < SIZEOF_SHORT) {
-            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a " + bytes.length + " byte array");
+            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a "
+                    + bytes.length + " byte array");
         }
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.isAvailable()) {
             return putShortUnsafe(bytes, offset, val);
@@ -854,7 +883,10 @@ public class Bytes {
         if (org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.littleEndian) {
             val = Short.reverseBytes(val);
         }
-        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putShort(bytes, (long) offset + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET, val);
+        org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.theUnsafe.putShort(bytes,
+                (long) offset
+                        + org.apache.kylin.common.util.Bytes.LexicographicalComparerHolder.UnsafeComparer.BYTE_ARRAY_BASE_OFFSET,
+                val);
         return offset + SIZEOF_SHORT;
     }
 
@@ -873,7 +905,8 @@ public class Bytes {
      */
     public static int putAsShort(byte[] bytes, int offset, int val) {
         if (bytes.length - offset < SIZEOF_SHORT) {
-            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a " + bytes.length + " byte array");
+            throw new IllegalArgumentException("Not enough room to put a short at" + " offset " + offset + " in a "
+                    + bytes.length + " byte array");
         }
         bytes[offset + 1] = (byte) val;
         val >>= 8;
@@ -964,7 +997,8 @@ public class Bytes {
      * @return 0 if equal, < 0 if left is less than right, etc.
      */
     public static int compareTo(byte[] buffer1, int offset1, int length1, byte[] buffer2, int offset2, int length2) {
-        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(buffer1, offset1, length1, buffer2, offset2, length2);
+        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(buffer1, offset1, length1, buffer2, offset2,
+                length2);
     }
 
     interface Comparer<T> {
@@ -1118,8 +1152,8 @@ public class Bytes {
                 }
                 final int minLength = Math.min(length1, length2);
                 final int minWords = minLength / SIZEOF_LONG;
-                final long offset1Adj = offset1 + (long)BYTE_ARRAY_BASE_OFFSET;
-                final long offset2Adj = offset2 + (long)BYTE_ARRAY_BASE_OFFSET;
+                final long offset1Adj = offset1 + (long) BYTE_ARRAY_BASE_OFFSET;
+                final long offset2Adj = offset2 + (long) BYTE_ARRAY_BASE_OFFSET;
 
                 /*
                  * Compare 8 bytes at a time. Benchmarking shows comparing 8 bytes at a
@@ -1202,7 +1236,8 @@ public class Bytes {
         return compareTo(left, right) == 0;
     }
 
-    public static boolean equals(final byte[] left, int leftOffset, int leftLen, final byte[] right, int rightOffset, int rightLen) {
+    public static boolean equals(final byte[] left, int leftOffset, int leftLen, final byte[] right, int rightOffset,
+            int rightLen) {
         // short circuit case
         if (left == right && leftOffset == rightOffset && leftLen == rightLen) {
             return true;
@@ -1221,7 +1256,8 @@ public class Bytes {
         if (left[leftOffset + leftLen - 1] != right[rightOffset + rightLen - 1])
             return false;
 
-        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(left, leftOffset, leftLen, right, rightOffset, rightLen) == 0;
+        return LexicographicalComparerHolder.BEST_COMPARER.compareTo(left, leftOffset, leftLen, right, rightOffset,
+                rightLen) == 0;
     }
 
     /**
@@ -1252,7 +1288,9 @@ public class Bytes {
      * array on the left.
      */
     public static boolean startsWith(byte[] bytes, byte[] prefix) {
-        return bytes != null && prefix != null && bytes.length >= prefix.length && LexicographicalComparerHolder.BEST_COMPARER.compareTo(bytes, 0, prefix.length, prefix, 0, prefix.length) == 0;
+        return bytes != null && prefix != null && bytes.length >= prefix.length
+                && LexicographicalComparerHolder.BEST_COMPARER.compareTo(bytes, 0, prefix.length, prefix, 0,
+                        prefix.length) == 0;
     }
 
     /**
@@ -1640,7 +1678,8 @@ public class Bytes {
     public static void writeStringFixedSize(final DataOutput out, String s, int size) throws IOException {
         byte[] b = toBytes(s);
         if (b.length > size) {
-            throw new IOException("Trying to write " + b.length + " bytes (" + toStringBinary(b) + ") into a field of length " + size);
+            throw new IOException("Trying to write " + b.length + " bytes (" + toStringBinary(b)
+                    + ") into a field of length " + size);
         }
 
         out.writeBytes(s);
@@ -1926,7 +1965,7 @@ public class Bytes {
      */
     public static String toHex(byte[] b) {
         checkArgument(b.length > 0, "length must be greater than 0");
-        return String.format("%x", new BigInteger(1, b));
+        return String.format(Locale.ROOT, "%x", new BigInteger(1, b));
     }
 
     /**
@@ -1939,10 +1978,11 @@ public class Bytes {
         checkArgument(hex.length() > 0, "length must be greater than 0");
         checkArgument(hex.length() % 2 == 0, "length must be a multiple of 2");
         // Make sure letters are upper case
-        hex = hex.toUpperCase();
+        hex = hex.toUpperCase(Locale.ROOT);
         byte[] b = new byte[hex.length() / 2];
         for (int i = 0; i < b.length; i++) {
-            b[i] = (byte) ((toBinaryFromHex((byte) hex.charAt(2 * i)) << 4) + (toBinaryFromHex((byte) hex.charAt((2 * i + 1))) & 0xff));
+            b[i] = (byte) ((toBinaryFromHex((byte) hex.charAt(2 * i)) << 4)
+                    + (toBinaryFromHex((byte) hex.charAt((2 * i + 1))) & 0xff));
         }
         return b;
     }
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java
index 4a54c48..faabbd1 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/BytesUtil.java
@@ -20,6 +20,7 @@ package org.apache.kylin.common.util;
 
 import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
+import java.util.Locale;
 
 import com.google.common.primitives.Shorts;
 
@@ -452,7 +453,7 @@ public class BytesUtil {
         StringBuilder sb = new StringBuilder(length * 4);
         for (int i = 0; i < length; i++) {
             int b = array[offset + i];
-            sb.append(String.format("\\x%02X", b & 0xFF));
+            sb.append(String.format(Locale.ROOT, "\\x%02X", b & 0xFF));
         }
         return sb.toString();
     }
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/CaseInsensitiveString.java b/core-common/src/main/java/org/apache/kylin/common/util/CaseInsensitiveString.java
index 4389aab..3e87226 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/CaseInsensitiveString.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/CaseInsensitiveString.java
@@ -17,6 +17,8 @@
  */
 package org.apache.kylin.common.util;
 
+import java.util.Locale;
+
 /**
  * A string wrapper that makes .equals a caseInsensitive match
  * <p>
@@ -53,7 +55,7 @@ public class CaseInsensitiveString {
 
     @Override
     public int hashCode() {
-        return (str != null) ? str.toUpperCase().hashCode() : 0;
+        return (str != null) ? str.toUpperCase(Locale.ROOT).hashCode() : 0;
     }
 
     @Override
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/CliCommandExecutor.java b/core-common/src/main/java/org/apache/kylin/common/util/CliCommandExecutor.java
index 38b32d5..5fef77b 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/CliCommandExecutor.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/CliCommandExecutor.java
@@ -22,6 +22,7 @@ import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.commons.io.FileUtils;
 import org.slf4j.LoggerFactory;
@@ -131,7 +132,8 @@ public class CliCommandExecutor {
         builder.redirectErrorStream(true);
         Process proc = builder.start();
 
-        BufferedReader reader = new BufferedReader(new InputStreamReader(proc.getInputStream()));
+        BufferedReader reader = new BufferedReader(
+                new InputStreamReader(proc.getInputStream(), StandardCharsets.UTF_8));
         String line;
         StringBuilder result = new StringBuilder();
         while ((line = reader.readLine()) != null && !Thread.currentThread().isInterrupted()) {
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/EncryptUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/EncryptUtil.java
index 482b949..deb54d4 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/EncryptUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/EncryptUtil.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.common.util;
 
+import java.nio.charset.StandardCharsets;
 import org.apache.commons.codec.binary.Base64;
 
 import javax.crypto.Cipher;
@@ -35,7 +36,8 @@ public class EncryptUtil {
             Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding");
             final SecretKeySpec secretKey = new SecretKeySpec(key, "AES");
             cipher.init(Cipher.ENCRYPT_MODE, secretKey);
-            final String encryptedString = Base64.encodeBase64String(cipher.doFinal(strToEncrypt.getBytes()));
+            final String encryptedString = Base64.encodeBase64String(cipher.doFinal(strToEncrypt.getBytes(
+                StandardCharsets.UTF_8)));
             return encryptedString;
         } catch (Exception e) {
             throw new RuntimeException(e.getMessage(), e);
@@ -47,7 +49,7 @@ public class EncryptUtil {
             Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5PADDING");
             final SecretKeySpec secretKey = new SecretKeySpec(key, "AES");
             cipher.init(Cipher.DECRYPT_MODE, secretKey);
-            final String decryptedString = new String(cipher.doFinal(Base64.decodeBase64(strToDecrypt)));
+            final String decryptedString = new String(cipher.doFinal(Base64.decodeBase64(strToDecrypt)), StandardCharsets.UTF_8);
             return decryptedString;
         } catch (Exception e) {
             throw new RuntimeException(e.getMessage(), e);
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/HiveCmdBuilder.java b/core-common/src/main/java/org/apache/kylin/common/util/HiveCmdBuilder.java
index 1c023aa..c04952d 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/HiveCmdBuilder.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/HiveCmdBuilder.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.common.util;
 
 import java.util.ArrayList;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
@@ -47,7 +48,7 @@ public class HiveCmdBuilder {
     }
 
     public String build() {
-        HiveClientMode clientMode = HiveClientMode.valueOf(kylinConfig.getHiveClientMode().toUpperCase());
+        HiveClientMode clientMode = HiveClientMode.valueOf(kylinConfig.getHiveClientMode().toUpperCase(Locale.ROOT));
         String beelineShell = kylinConfig.getHiveBeelineShell();
         String beelineParams = kylinConfig.getHiveBeelineParams();
         if (kylinConfig.getEnableSparkSqlForTableOps()) {
@@ -80,7 +81,7 @@ public class HiveCmdBuilder {
                     hql.append(statement);
                     hql.append("\n");
                 }
-                String createFileCmd = String.format(CREATE_HQL_TMP_FILE_TEMPLATE, tmpHqlPath, hql);
+                String createFileCmd = String.format(Locale.ROOT, CREATE_HQL_TMP_FILE_TEMPLATE, tmpHqlPath, hql);
                 buf.append(createFileCmd);
                 buf.append("\n");
                 buf.append(beelineShell);
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/SSHClient.java b/core-common/src/main/java/org/apache/kylin/common/util/SSHClient.java
index 5e27d9d..2672920 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/SSHClient.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/SSHClient.java
@@ -29,6 +29,7 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.commons.io.IOUtils;
 import org.slf4j.LoggerFactory;
@@ -93,7 +94,7 @@ public class SSHClient {
                 // The access time should be sent here,
                 // but it is not accessible with JavaAPI ;-<
                 command += (" " + (_lfile.lastModified() / 1000) + " 0\n");
-                out.write(command.getBytes());
+                out.write(command.getBytes(StandardCharsets.UTF_8));
                 out.flush();
                 if (checkAck(in) != 0) {
                     throw new Exception("Error in checkAck()");
@@ -111,7 +112,7 @@ public class SSHClient {
                 command += localFile;
             }
             command += "\n";
-            out.write(command.getBytes());
+            out.write(command.getBytes(StandardCharsets.UTF_8));
             out.flush();
             if (checkAck(in) != 0) {
                 throw new Exception("Error in checkAck()");
@@ -200,7 +201,7 @@ public class SSHClient {
                 for (int i = 0;; i++) {
                     in.read(buf, i, 1);
                     if (buf[i] == (byte) 0x0a) {
-                        file = new String(buf, 0, i);
+                        file = new String(buf, 0, i, StandardCharsets.UTF_8);
                         break;
                     }
                 }
@@ -288,7 +289,7 @@ public class SSHClient {
                     if (i < 0)
                         break;
 
-                    String line = new String(tmp, 0, i);
+                    String line = new String(tmp, 0, i, StandardCharsets.UTF_8);
                     text.append(line);
                     if (logAppender != null) {
                         logAppender.log(line);
@@ -299,7 +300,7 @@ public class SSHClient {
                     if (i < 0)
                         break;
 
-                    String line = new String(tmp, 0, i);
+                    String line = new String(tmp, 0, i, StandardCharsets.UTF_8);
                     text.append(line);
                     if (logAppender != null) {
                         logAppender.log(line);
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/SetThreadName.java b/core-common/src/main/java/org/apache/kylin/common/util/SetThreadName.java
index 9e5197e..699445a 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/SetThreadName.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/SetThreadName.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.common.util;
 
 import java.io.Closeable;
+import java.util.Locale;
 
 /**
  *
@@ -39,7 +40,7 @@ public class SetThreadName implements Closeable {
 
     public SetThreadName(String format, Object... args) {
         originThreadName = Thread.currentThread().getName();
-        Thread.currentThread().setName(String.format(format, args) + "-" + Thread.currentThread().getId());
+        Thread.currentThread().setName(String.format(Locale.ROOT, format, args) + "-" + Thread.currentThread().getId());
     }
 
     @Override
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
index e67d756..0b94d9c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
@@ -21,6 +21,7 @@ package org.apache.kylin.common.util;
 import java.util.ArrayList;
 import java.util.Collection;
 
+import java.util.Locale;
 import org.apache.commons.lang.StringUtils;
 
 /**
@@ -79,7 +80,7 @@ public class StringUtil {
         if (source != null) {
             for (int i = 0; i < source.length; i++) {
                 if (source[i] != null) {
-                    target[i] = source[i].toUpperCase();
+                    target[i] = source[i].toUpperCase(Locale.ROOT);
                 }
             }
         }
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/TimeUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/TimeUtil.java
index 7a24c10..73209f5 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/TimeUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/TimeUtil.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.common.util;
 
 import java.util.Calendar;
+import java.util.Locale;
 import java.util.TimeZone;
 
 /**
@@ -43,14 +44,14 @@ public class TimeUtil {
     }
 
     public static long getWeekStart(long ts) {
-        Calendar calendar = Calendar.getInstance(gmt);
+        Calendar calendar = Calendar.getInstance(gmt, Locale.ROOT);
         calendar.setTimeInMillis(getDayStart(ts));
         calendar.add(Calendar.DAY_OF_WEEK, calendar.getFirstDayOfWeek() - calendar.get(Calendar.DAY_OF_WEEK));
         return calendar.getTimeInMillis();
     }
 
     public static long getMonthStart(long ts) {
-        Calendar calendar = Calendar.getInstance(gmt);
+        Calendar calendar = Calendar.getInstance(gmt, Locale.ROOT);
         calendar.setTimeInMillis(ts);
         int year = calendar.get(Calendar.YEAR);
         int month = calendar.get(Calendar.MONTH);
@@ -60,7 +61,7 @@ public class TimeUtil {
     }
 
     public static long getQuarterStart(long ts) {
-        Calendar calendar = Calendar.getInstance(gmt);
+        Calendar calendar = Calendar.getInstance(gmt, Locale.ROOT);
         calendar.setTimeInMillis(ts);
         int year = calendar.get(Calendar.YEAR);
         int month = calendar.get(Calendar.MONTH);
@@ -70,7 +71,7 @@ public class TimeUtil {
     }
 
     public static long getYearStart(long ts) {
-        Calendar calendar = Calendar.getInstance(gmt);
+        Calendar calendar = Calendar.getInstance(gmt, Locale.ROOT);
         calendar.setTimeInMillis(ts);
         int year = calendar.get(Calendar.YEAR);
         calendar.clear();
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java b/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
index e53ad16..f67b2a2 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/ZipFileUtils.java
@@ -20,6 +20,7 @@ package org.apache.kylin.common.util;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.Locale;
 
 import org.apache.commons.compress.archivers.ArchiveException;
 import org.apache.commons.compress.archivers.ArchiveStreamFactory;
@@ -53,6 +54,6 @@ public class ZipFileUtils {
     }
 
     private static boolean validateZipFilename(String filename) {
-        return !StringUtils.isEmpty(filename) && filename.trim().toLowerCase().endsWith(".zip");
+        return !StringUtils.isEmpty(filename) && filename.trim().toLowerCase(Locale.ROOT).endsWith(".zip");
     }
 }
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java b/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java
index 4b81daf..e4c65fb 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/BasicTest.java
@@ -28,6 +28,8 @@ import java.util.Calendar;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -55,19 +57,6 @@ import com.google.common.collect.Maps;
 public class BasicTest {
     protected static final org.slf4j.Logger logger = LoggerFactory.getLogger(BasicTest.class);
 
-    private void log(ByteBuffer a) {
-        Integer x = 4;
-        foo(x);
-    }
-
-    private void foo(Long a) {
-        System.out.printf("a");
-    }
-
-    private void foo(Integer b) {
-        System.out.printf("b");
-    }
-
     private enum MetricType {
         Count, DimensionAsMetric, DistinctCount, Normal
     }
@@ -185,9 +174,9 @@ public class BasicTest {
         long current = System.currentTimeMillis();
         System.out.println(time(current));
 
-        Calendar a = Calendar.getInstance();
-        Calendar b = Calendar.getInstance();
-        Calendar c = Calendar.getInstance();
+        Calendar a = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
+        Calendar b = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
+        Calendar c = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         b.clear();
         c.clear();
 
@@ -244,8 +233,8 @@ public class BasicTest {
     }
 
     private static String time(long t) {
-        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
-        Calendar cal = Calendar.getInstance();
+        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss", Locale.ROOT);
+        Calendar cal = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         cal.setTimeInMillis(t);
         return dateFormat.format(cal.getTime());
     }
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java b/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java
index 6dcdaf5..f0326b5 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/InstallJarIntoMavenTest.java
@@ -19,8 +19,12 @@
 package org.apache.kylin.common.util;
 
 import java.io.File;
-import java.io.FileWriter;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.nio.charset.StandardCharsets;
+import java.util.Locale;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -35,7 +39,8 @@ public class InstallJarIntoMavenTest {
         File folder = new File("/export/home/b_kylin/tmp");
         File out = new File("/export/home/b_kylin/tmp/out.sh");
         out.createNewFile();
-        FileWriter fw = new FileWriter(out);
+
+        Writer fw = new OutputStreamWriter(new FileOutputStream(out), StandardCharsets.UTF_8);
 
         for (File file : folder.listFiles()) {
             String name = file.getName();
@@ -53,7 +58,8 @@ public class InstallJarIntoMavenTest {
             String artifactId = name.substring(0, match.start());
             String version = name.substring(match.start() + 1, lastDot);
 
-            fw.write(String.format("mvn install:install-file -Dfile=%s -DgroupId=%s -DartifactId=%s -Dversion=%s -Dpackaging=jar", name, "org.apache." + groupId, artifactId, version));
+            fw.write(String.format(Locale.ROOT, "mvn install:install-file -Dfile=%s -DgroupId=%s -DartifactId=%s "
+                    + "-Dversion=%s " + "-Dpackaging=jar", name, "org.apache." + groupId, artifactId, version));
             fw.write("\n");
         }
         fw.close();
diff --git a/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java b/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java
index 15f54f9..ced7125 100644
--- a/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/util/TimeUtilTest.java
@@ -21,6 +21,7 @@ package org.apache.kylin.common.util;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
+import java.util.Locale;
 import java.util.TimeZone;
 
 import org.junit.Assert;
@@ -35,8 +36,8 @@ public class TimeUtilTest {
     }
 
     public static long normalizeTime(long timeMillis, NormalizedTimeUnit unit) {
-        Calendar a = Calendar.getInstance();
-        Calendar b = Calendar.getInstance();
+        Calendar a = Calendar.getInstance(TimeZone.getTimeZone("GMT"), Locale.ROOT);
+        Calendar b = Calendar.getInstance(TimeZone.getTimeZone("GMT"), Locale.ROOT);
         b.clear();
 
         a.setTimeInMillis(timeMillis);
@@ -50,7 +51,7 @@ public class TimeUtilTest {
 
     @Test
     public void basicTest() throws ParseException {
-        java.text.DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
+        java.text.DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss", Locale.ROOT);
         dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
 
         long t1 = dateFormat.parse("2012/01/01 00:00:01").getTime();
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
index a2127a8..90cce14 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
@@ -23,6 +23,7 @@ import java.text.SimpleDateFormat;
 import java.util.Collection;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.TimeZone;
@@ -150,7 +151,7 @@ public class CubeSegment implements IBuildable, ISegment, Serializable {
         }
 
         // using time
-        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
+        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss", Locale.ROOT);
         dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
         return dateFormat.format(tsRange.start.v) + "_" + dateFormat.format(tsRange.end.v);
     }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidModeEnum.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidModeEnum.java
index f55c9db..bcd4cf0 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidModeEnum.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/CuboidModeEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.cube.cuboid;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum CuboidModeEnum {
@@ -39,7 +41,7 @@ public enum CuboidModeEnum {
             return null;
         }
         for (CuboidModeEnum mode : CuboidModeEnum.values()) {
-            if (mode.modeName.equals(modeName.toUpperCase())) {
+            if (mode.modeName.equals(modeName.toUpperCase(Locale.ROOT))) {
                 return mode;
             }
         }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/BPUSCalculator.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/BPUSCalculator.java
index e293325..ea91c6c 100755
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/BPUSCalculator.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/BPUSCalculator.java
@@ -18,14 +18,16 @@
 
 package org.apache.kylin.cube.cuboid.algorithm;
 
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Map;
-import java.util.Set;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 
 /**
  * Calculate the benefit based on Benefit Per Unit Space.
@@ -132,8 +134,8 @@ public class BPUSCalculator implements BenefitPolicy {
     @Override
     public boolean ifEfficient(CuboidBenefitModel best) {
         if (best.getBenefit() < getMinBenefitRatio()) {
-            logger.info(String.format("The recommended cuboid %s doesn't meet minimum benifit ratio %f", best,
-                    getMinBenefitRatio()));
+            logger.info(String.format(Locale.ROOT, "The recommended cuboid %s doesn't meet minimum benifit ratio %f",
+                    best, getMinBenefitRatio()));
             return false;
         }
         return true;
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/generic/GeneticAlgorithm.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/generic/GeneticAlgorithm.java
index 27d59fa..a512a5c 100755
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/generic/GeneticAlgorithm.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/generic/GeneticAlgorithm.java
@@ -18,7 +18,10 @@
 
 package org.apache.kylin.cube.cuboid.algorithm.generic;
 
-import com.google.common.collect.Lists;
+import java.util.BitSet;
+import java.util.List;
+import java.util.Locale;
+
 import org.apache.commons.math3.genetics.Chromosome;
 import org.apache.commons.math3.genetics.ElitisticListPopulation;
 import org.apache.commons.math3.genetics.FixedGenerationCount;
@@ -30,8 +33,7 @@ import org.apache.kylin.cube.cuboid.algorithm.CuboidStats;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.BitSet;
-import java.util.List;
+import com.google.common.collect.Lists;
 
 /**
  * Implementation of a genetic algorithm to recommend a list of cuboids.
@@ -101,10 +103,10 @@ public class GeneticAlgorithm extends AbstractRecommendAlgorithm {
             for (Long cuboid : finalList) {
                 Double unitSpace = cuboidStats.getCuboidSize(cuboid);
                 if (unitSpace != null) {
-                    logger.trace(String.format("cuboidId %d and Space: %f", cuboid, unitSpace));
+                    logger.trace(String.format(Locale.ROOT, "cuboidId %d and Space: %f", cuboid, unitSpace));
                     totalSpace += unitSpace;
                 } else {
-                    logger.trace(String.format("mandatory cuboidId %d", cuboid));
+                    logger.trace(String.format(Locale.ROOT, "mandatory cuboidId %d", cuboid));
                 }
             }
             logger.trace("Total Space:" + totalSpace);
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/greedy/GreedyAlgorithm.java b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/greedy/GreedyAlgorithm.java
index 0f2dcc3..7f415de 100755
--- a/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/greedy/GreedyAlgorithm.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/cuboid/algorithm/greedy/GreedyAlgorithm.java
@@ -18,10 +18,14 @@
 
 package org.apache.kylin.cube.cuboid.algorithm.greedy;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import java.util.List;
+import java.util.Locale;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.kylin.cube.cuboid.algorithm.AbstractRecommendAlgorithm;
 import org.apache.kylin.cube.cuboid.algorithm.BenefitPolicy;
 import org.apache.kylin.cube.cuboid.algorithm.CuboidBenefitModel;
@@ -29,12 +33,10 @@ import org.apache.kylin.cube.cuboid.algorithm.CuboidStats;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.atomic.AtomicReference;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 /**
  * A simple implementation of the Greedy Algorithm , it chooses the cuboids which give
@@ -100,7 +102,7 @@ public class GreedyAlgorithm extends AbstractRecommendAlgorithm {
             benefitPolicy.propagateAggregationCost(best.getCuboidId(), selected);
             round++;
             if (logger.isTraceEnabled()) {
-                logger.trace(String.format("Recommend in round %d : %s", round, best.toString()));
+                logger.trace(String.format(Locale.ROOT, "Recommend in round %d : %s", round, best.toString()));
             }
         }
 
@@ -116,7 +118,7 @@ public class GreedyAlgorithm extends AbstractRecommendAlgorithm {
             logger.trace("Excluded cuboidId size:" + excluded.size());
             logger.trace("Excluded cuboidId detail:");
             for (Long cuboid : excluded) {
-                logger.trace(String.format("cuboidId %d and Cost: %d and Space: %f", cuboid,
+                logger.trace(String.format(Locale.ROOT, "cuboidId %d and Cost: %d and Space: %f", cuboid,
                         cuboidStats.getCuboidQueryCost(cuboid), cuboidStats.getCuboidSize(cuboid)));
             }
             logger.trace("Total Space:" + (spaceLimit - remainingSpace));
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
index f8889fa..45756fd 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
@@ -18,16 +18,30 @@
 
 package org.apache.kylin.cube.model;
 
-import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.google.common.base.Joiner;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
+import java.lang.reflect.Method;
+import java.nio.charset.StandardCharsets;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.TreeSet;
+
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.ArrayUtils;
@@ -65,27 +79,16 @@ import org.apache.kylin.metadata.realization.RealizationType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.lang.reflect.Method;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.TreeSet;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
-import static com.google.common.base.Preconditions.checkState;
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 
 /**
  */
@@ -131,7 +134,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         }
 
     }
-    
+
     // ============================================================================
 
     private KylinConfigExt config;
@@ -222,7 +225,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
     public String resourceName() {
         return name;
     }
-    
+
     public boolean isEnableSharding() {
         //in the future may extend to other storage that is shard-able
         return storageType != IStorageAware.ID_HBASE && storageType != IStorageAware.ID_HYBRID;
@@ -285,7 +288,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
     }
 
     public DimensionDesc findDimensionByTable(String lookupTableName) {
-        lookupTableName = lookupTableName.toUpperCase();
+        lookupTableName = lookupTableName.toUpperCase(Locale.ROOT);
         for (DimensionDesc dim : dimensions)
             if (dim.getTableRef() != null && dim.getTableRef().getTableIdentity().equals(lookupTableName))
                 return dim;
@@ -577,10 +580,10 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
                 }
             }
 
-            String signatureInput = sigString.toString().replaceAll("\\s+", "").toLowerCase();
+            String signatureInput = sigString.toString().replaceAll("\\s+", "").toLowerCase(Locale.ROOT);
 
-            byte[] signature = md.digest(signatureInput.getBytes());
-            String ret = new String(Base64.encodeBase64(signature));
+            byte[] signature = md.digest(signatureInput.getBytes(StandardCharsets.UTF_8));
+            String ret = new String(Base64.encodeBase64(signature), StandardCharsets.UTF_8);
             return ret;
         } catch (NoSuchAlgorithmException | JsonProcessingException e) {
             throw new RuntimeException("Failed to calculate signature");
@@ -652,7 +655,8 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
                 Class<?> hbaseMappingAdapterClass = Class.forName(hbaseMappingAdapterName);
                 Method initMethod = hbaseMappingAdapterClass.getMethod("initHBaseMapping", CubeDesc.class);
                 initMethod.invoke(null, this);
-                Method initMeasureReferenceToColumnFamilyMethod = hbaseMappingAdapterClass.getMethod("initMeasureReferenceToColumnFamilyWithChecking", CubeDesc.class);
+                Method initMeasureReferenceToColumnFamilyMethod = hbaseMappingAdapterClass
+                        .getMethod("initMeasureReferenceToColumnFamilyWithChecking", CubeDesc.class);
                 initMeasureReferenceToColumnFamilyMethod.invoke(null, this);
             } catch (Exception e) {
                 throw new RuntimeException("Error during adapting hbase mapping", e);
@@ -844,7 +848,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         List<String> notifyList = getNotifyList();
         if (notifyList != null && !notifyList.isEmpty()) {
             EmailValidator emailValidator = EmailValidator.getInstance();
-            for (String email: notifyList) {
+            for (String email : notifyList) {
                 if (!emailValidator.isValid(email)) {
                     throw new IllegalArgumentException("Email [" + email + "] is not validation.");
                 }
@@ -1064,10 +1068,10 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         }
 
         for (MeasureDesc m : measures) {
-            m.setName(m.getName().toUpperCase());
+            m.setName(m.getName().toUpperCase(Locale.ROOT));
 
             if (m.getDependentMeasureRef() != null) {
-                m.setDependentMeasureRef(m.getDependentMeasureRef().toUpperCase());
+                m.setDependentMeasureRef(m.getDependentMeasureRef().toUpperCase(Locale.ROOT));
             }
 
             FunctionDesc func = m.getFunction();
@@ -1215,7 +1219,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
     public void setAutoMergeTimeRanges(long[] autoMergeTimeRanges) {
         this.autoMergeTimeRanges = autoMergeTimeRanges;
     }
-    
+
     public boolean isBroken() {
         return !errors.isEmpty();
     }
@@ -1416,7 +1420,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         }
         return desc.isExtSnapshotTable();
     }
-    
+
     public List<String> getAllExtLookupSnapshotTypes() {
         List<String> result = Lists.newArrayList();
         for (SnapshotTableDesc snapshotTableDesc : snapshotTableDescList) {
@@ -1463,7 +1467,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         }
         return null;
     }
-    
+
     public List<TblColRef> getAllGlobalDictColumns() {
         List<TblColRef> globalDictCols = new ArrayList<TblColRef>();
         List<DictionaryDesc> dictionaryDescList = getDictionaries();
@@ -1474,7 +1478,8 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
 
         for (DictionaryDesc dictionaryDesc : dictionaryDescList) {
             String cls = dictionaryDesc.getBuilderClass();
-            if (GlobalDictionaryBuilder.class.getName().equals(cls) || SegmentAppendTrieDictBuilder.class.getName().equals(cls))
+            if (GlobalDictionaryBuilder.class.getName().equals(cls)
+                    || SegmentAppendTrieDictBuilder.class.getName().equals(cls))
                 globalDictCols.add(dictionaryDesc.getColumnRef());
         }
         return globalDictCols;
@@ -1483,7 +1488,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
     public boolean isShrunkenDictFromGlobalEnabled() {
         return config.isShrunkenDictFromGlobalEnabled() && !getAllGlobalDictColumns().isEmpty();
     }
-    
+
     // UHC (ultra high cardinality column): contain the ShardByColumns and the GlobalDictionaryColumns
     public List<TblColRef> getAllUHCColumns() {
         List<TblColRef> uhcColumns = new ArrayList<TblColRef>();
@@ -1492,7 +1497,6 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         return uhcColumns;
     }
 
-
     public String getProject() {
         return getModel().getProject();
     }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
index 467a294..93def8b 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
@@ -20,6 +20,7 @@ package org.apache.kylin.cube.model;
 
 import java.io.Serializable;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.cube.CubeSegment;
@@ -78,9 +79,9 @@ public class CubeJoinedFlatTableDesc implements IJoinedFlatTableDesc, Serializab
 
     protected String makeTableName(CubeDesc cubeDesc, CubeSegment cubeSegment) {
         if (cubeSegment == null) {
-            return MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeDesc.getName().toLowerCase();
+            return MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeDesc.getName().toLowerCase(Locale.ROOT);
         } else {
-            return MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeDesc.getName().toLowerCase() + "_"
+            return MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeDesc.getName().toLowerCase(Locale.ROOT) + "_"
                     + cubeSegment.getUuid().replaceAll("-", "_");
         }
     }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/DictionaryDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/DictionaryDesc.java
index ca2183a..a700e10 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/DictionaryDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/DictionaryDesc.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.cube.model;
 
+import java.util.Locale;
+
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 
@@ -45,11 +47,11 @@ public class DictionaryDesc implements java.io.Serializable {
     void init(CubeDesc cubeDesc) {
         DataModelDesc model = cubeDesc.getModel();
 
-        column = column.toUpperCase();
+        column = column.toUpperCase(Locale.ROOT);
         colRef = model.findColumn(column);
 
         if (reuseColumn != null) {
-            reuseColumn = reuseColumn.toUpperCase();
+            reuseColumn = reuseColumn.toUpperCase(Locale.ROOT);
             reuseColRef = model.findColumn(reuseColumn);
         }
     }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java
index c8fff26..dbbd4e8 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/DimensionDesc.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.cube.model;
 
 import java.util.Arrays;
+import java.util.Locale;
 
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.model.JoinDesc;
@@ -56,7 +57,7 @@ public class DimensionDesc implements java.io.Serializable {
         DataModelDesc model = cubeDesc.getModel();
 
         if (name != null)
-            name = name.toUpperCase();
+            name = name.toUpperCase(Locale.ROOT);
 
         tableRef = model.findTable(table);
         table = tableRef.getAlias();
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java
index 77b4b14..be8ed6a 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/HBaseMappingDesc.java
@@ -21,14 +21,16 @@ package org.apache.kylin.cube.model;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.LinkedList;
+import java.util.Locale;
 
-import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
-import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.kylin.common.util.StringUtil;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.MeasureDesc;
 
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
 /**
  */
 @JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
@@ -78,10 +80,10 @@ public class HBaseMappingDesc implements java.io.Serializable {
         cubeRef = cubeDesc;
 
         for (HBaseColumnFamilyDesc cf : columnFamily) {
-            cf.setName(cf.getName().toUpperCase());
+            cf.setName(cf.getName().toUpperCase(Locale.ROOT));
 
             for (HBaseColumnDesc c : cf.getColumns()) {
-                c.setQualifier(c.getQualifier().toUpperCase());
+                c.setQualifier(c.getQualifier().toUpperCase(Locale.ROOT));
                 StringUtil.toUpperCaseArray(c.getMeasureRefs(), c.getMeasureRefs());
             }
         }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java b/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java
index 65719a5..f78e92b 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/util/CubingUtils.java
@@ -21,6 +21,7 @@ package org.apache.kylin.cube.util;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -53,7 +54,8 @@ public class CubingUtils {
 
     private static Logger logger = LoggerFactory.getLogger(CubingUtils.class);
 
-    public static Map<Long, HLLCounter> sampling(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDescIn, Iterable<List<String>> streams) {
+    public static Map<Long, HLLCounter> sampling(CubeDesc cubeDesc, IJoinedFlatTableDesc flatDescIn,
+            Iterable<List<String>> streams) {
         final CubeJoinedFlatTableEnrich flatDesc = new CubeJoinedFlatTableEnrich(flatDescIn, cubeDesc);
         final int rowkeyLength = cubeDesc.getRowkey().getRowKeyColumns().length;
         final Set<Long> allCuboidIds = cubeDesc.getInitialCuboidScheduler().getAllCuboidIds();
@@ -105,8 +107,10 @@ public class CubingUtils {
         return result;
     }
 
-    public static Map<TblColRef, Dictionary<String>> buildDictionary(final CubeInstance cubeInstance, Iterable<List<String>> recordList) throws IOException {
-        final List<TblColRef> columnsNeedToBuildDictionary = cubeInstance.getDescriptor().listDimensionColumnsExcludingDerived(true);
+    public static Map<TblColRef, Dictionary<String>> buildDictionary(final CubeInstance cubeInstance,
+            Iterable<List<String>> recordList) throws IOException {
+        final List<TblColRef> columnsNeedToBuildDictionary = cubeInstance.getDescriptor()
+                .listDimensionColumnsExcludingDerived(true);
         final HashMap<Integer, TblColRef> tblColRefMap = Maps.newHashMap();
         int index = 0;
         for (TblColRef column : columnsNeedToBuildDictionary) {
@@ -126,14 +130,16 @@ public class CubingUtils {
         }
         for (TblColRef tblColRef : valueMap.keySet()) {
             Set<String> values = valueMap.get(tblColRef);
-            Dictionary<String> dict = DictionaryGenerator.buildDictionary(tblColRef.getType(), new IterableDictionaryValueEnumerator(values));
+            Dictionary<String> dict = DictionaryGenerator.buildDictionary(tblColRef.getType(),
+                    new IterableDictionaryValueEnumerator(values));
             result.put(tblColRef, dict);
         }
         return result;
     }
 
     @SuppressWarnings("unchecked")
-    public static Map<TblColRef, Dictionary<String>> writeDictionary(CubeSegment cubeSegment, Map<TblColRef, Dictionary<String>> dictionaryMap, long startOffset, long endOffset) {
+    public static Map<TblColRef, Dictionary<String>> writeDictionary(CubeSegment cubeSegment,
+            Map<TblColRef, Dictionary<String>> dictionaryMap, long startOffset, long endOffset) {
         Map<TblColRef, Dictionary<String>> realDictMap = Maps.newHashMap();
 
         for (Map.Entry<TblColRef, Dictionary<String>> entry : dictionaryMap.entrySet()) {
@@ -141,7 +147,7 @@ public class CubingUtils {
             final Dictionary<String> dictionary = entry.getValue();
             IReadableTable.TableSignature signature = new IReadableTable.TableSignature();
             signature.setLastModifiedTime(System.currentTimeMillis());
-            signature.setPath(String.format("streaming_%s_%s", startOffset, endOffset));
+            signature.setPath(String.format(Locale.ROOT, "streaming_%s_%s", startOffset, endOffset));
             signature.setSize(endOffset - startOffset);
             DictionaryInfo dictInfo = new DictionaryInfo(tblColRef.getColumnDesc(), tblColRef.getDatatype(), signature);
             logger.info("writing dictionary for TblColRef:" + tblColRef.toString());
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
index 5872dd8..eec3d70 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
@@ -29,12 +29,12 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeSet;
 
-import com.google.common.collect.Lists;
 import org.apache.kylin.common.util.Array;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -56,6 +56,7 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
+import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
 /**
@@ -95,7 +96,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
 
     private String getColInAggrGroup(AggregationGroup g, String name) {
         for (String c : g.getIncludes()) {
-            if (c.toLowerCase().contains(name.toLowerCase()))
+            if (c.toLowerCase(Locale.ROOT).contains(name.toLowerCase(Locale.ROOT)))
                 return c;
         }
         throw new IllegalStateException();
@@ -158,7 +159,7 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
     private List<MeasureDesc> dropPercentile(List<MeasureDesc> measures) {
         ArrayList<MeasureDesc> result = new ArrayList<>();
         for (MeasureDesc m : measures) {
-            if (!m.getFunction().getExpression().toUpperCase().contains("PERCENTILE"))
+            if (!m.getFunction().getExpression().toUpperCase(Locale.ROOT).contains("PERCENTILE"))
                 result.add(m);
         }
         return result;
@@ -377,14 +378,15 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
         metaFile.renameTo(new File(path.substring(0, path.length() - 4)));
 
         thrown.expect(IllegalArgumentException.class);
-        thrown.expectMessage("Too many rowkeys (78) in CubeDesc, please try to reduce dimension number or adopt derived dimensions");
+        thrown.expectMessage(
+                "Too many rowkeys (78) in CubeDesc, please try to reduce dimension number or adopt derived dimensions");
         getTestConfig().clearManagers();
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("ut_78_rowkeys");
         cubeDesc.init(getTestConfig());
     }
 
     @Test
-    public void testValidateNotifyList() throws Exception{
+    public void testValidateNotifyList() throws Exception {
         thrown.expect(IllegalArgumentException.class);
         thrown.expectMessage("Email [test] is not validation.");
 
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java b/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java
index 074d973..540933f 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/common/BytesSplitterTest.java
@@ -20,6 +20,8 @@ package org.apache.kylin.cube.common;
 
 import static org.junit.Assert.assertEquals;
 
+import java.nio.charset.StandardCharsets;
+
 import org.apache.kylin.common.util.BytesSplitter;
 import org.junit.Test;
 
@@ -32,23 +34,32 @@ public class BytesSplitterTest {
     @Test
     public void test() {
         BytesSplitter bytesSplitter = new BytesSplitter(10, 15);
-        byte[] input = "2013-02-17Collectibles".getBytes();
+        byte[] input = "2013-02-17Collectibles".getBytes(StandardCharsets.UTF_8);
         bytesSplitter.split(input, input.length, (byte) 127);
 
         assertEquals(2, bytesSplitter.getBufferSize());
-        assertEquals("2013-02-17", new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length));
-        assertEquals("Collectibles", new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length));
+        assertEquals("2013-02-17",
+                new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length, StandardCharsets.UTF_8));
+        assertEquals("Collectibles",
+                new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length, StandardCharsets.UTF_8)
+                );
     }
 
     @Test
     public void testNullValue() {
         BytesSplitter bytesSplitter = new BytesSplitter(10, 15);
-        byte[] input = "2013-02-17Collectibles".getBytes();
+        byte[] input = "2013-02-17Collectibles".getBytes(StandardCharsets.UTF_8);
         bytesSplitter.split(input, input.length, (byte) 127);
 
         assertEquals(3, bytesSplitter.getBufferSize());
-        assertEquals("2013-02-17", new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length));
-        assertEquals("", new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length));
-        assertEquals("Collectibles", new String(bytesSplitter.getSplitBuffers()[2].value, 0, bytesSplitter.getSplitBuffers()[2].length));
+        assertEquals("2013-02-17",
+                new String(bytesSplitter.getSplitBuffers()[0].value, 0, bytesSplitter.getSplitBuffers()[0].length, StandardCharsets.UTF_8)
+                );
+        assertEquals("",
+                new String(bytesSplitter.getSplitBuffers()[1].value, 0, bytesSplitter.getSplitBuffers()[1].length,
+                    StandardCharsets.UTF_8));
+        assertEquals("Collectibles",
+                new String(bytesSplitter.getSplitBuffers()[2].value, 0, bytesSplitter.getSplitBuffers()[2].length,
+                    StandardCharsets.UTF_8));
     }
 }
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/cuboid/TreeCuboidSchedulerTest.java b/core-cube/src/test/java/org/apache/kylin/cube/cuboid/TreeCuboidSchedulerTest.java
index 41fa807..79c9426 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/cuboid/TreeCuboidSchedulerTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/cuboid/TreeCuboidSchedulerTest.java
@@ -20,7 +20,9 @@ package org.apache.kylin.cube.cuboid;
 
 import static org.junit.Assert.assertEquals;
 
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -39,7 +41,7 @@ public class TreeCuboidSchedulerTest {
         long basicCuboid = getBaseCuboid(10);
         List<Long> cuboids = genRandomCuboids(basicCuboid, 200);
         CuboidTree cuboidTree = CuboidTree.createFromCuboids(cuboids);
-        PrintWriter out = new PrintWriter(System.out);
+        PrintWriter out = new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8));
         cuboidTree.print(out);
         out.flush();
     }
@@ -51,7 +53,7 @@ public class TreeCuboidSchedulerTest {
         long testCuboid = cuboids.get(10);
         System.out.println(cuboids);
         CuboidTree cuboidTree = CuboidTree.createFromCuboids(cuboids);
-        PrintWriter out = new PrintWriter(System.out);
+        PrintWriter out = new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8));
         cuboidTree.print(out);
         out.flush();
 
@@ -62,7 +64,7 @@ public class TreeCuboidSchedulerTest {
     @Test
     public void testFindBestMatchCuboid() {
         CuboidTree cuboidTree = createCuboidTree1();
-        PrintWriter out = new PrintWriter(System.out);
+        PrintWriter out = new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8));
         cuboidTree.print(out);
         out.flush();
 
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java
index f749fb4..57ea26c 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheMemSizeTest.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
 import java.util.SortedMap;
 import java.util.TreeMap;
@@ -93,10 +94,10 @@ public class AggregationCacheMemSizeTest {
     }
 
     enum Settings {
-        WITHOUT_MEM_HUNGRY,     // only test basic aggrs
-        WITH_HLLC,              // basic aggrs + hllc
-        WITH_LOW_CARD_BITMAP,   // basic aggrs + bitmap
-        WITH_HIGH_CARD_BITMAP   // basic aggrs + bitmap
+        WITHOUT_MEM_HUNGRY, // only test basic aggrs
+        WITH_HLLC, // basic aggrs + hllc
+        WITH_LOW_CARD_BITMAP, // basic aggrs + bitmap
+        WITH_HIGH_CARD_BITMAP // basic aggrs + bitmap
     }
 
     private MeasureAggregator<?>[] createNoMemHungryAggrs() {
@@ -132,19 +133,19 @@ public class AggregationCacheMemSizeTest {
         aggregators.addAll(Arrays.asList(createNoMemHungryAggrs()));
 
         switch (settings) {
-            case WITHOUT_MEM_HUNGRY:
-                break;
-            case WITH_HLLC:
-                aggregators.add(createHLLCAggr());
-                break;
-            case WITH_LOW_CARD_BITMAP:
-                aggregators.add(createBitmapAggr(true));
-                break;
-            case WITH_HIGH_CARD_BITMAP:
-                aggregators.add(createBitmapAggr(false));
-                break;
-            default:
-                break;
+        case WITHOUT_MEM_HUNGRY:
+            break;
+        case WITH_HLLC:
+            aggregators.add(createHLLCAggr());
+            break;
+        case WITH_LOW_CARD_BITMAP:
+            aggregators.add(createBitmapAggr(true));
+            break;
+        case WITH_HIGH_CARD_BITMAP:
+            aggregators.add(createBitmapAggr(false));
+            break;
+        default:
+            break;
         }
 
         return aggregators.toArray(new MeasureAggregator[aggregators.size()]);
@@ -158,10 +159,10 @@ public class AggregationCacheMemSizeTest {
             bitmapAggrs[i].aggregate(bitmaps[i]);
         }
 
-        System.out.printf("%-15s %-10s %-10s\n", "cardinality", "estimate", "actual");
+        System.out.printf(Locale.ROOT, "%-15s %-10s %-10s\n", "cardinality", "estimate", "actual");
         for (BitmapAggregator aggr : bitmapAggrs) {
-            System.out.printf("%-15d %-10d %-10d\n",
-                    aggr.getState().getCount(), aggr.getMemBytesEstimate(), meter.measureDeep(aggr));
+            System.out.printf(Locale.ROOT, "%-15d %-10d %-10d\n", aggr.getState().getCount(),
+                    aggr.getMemBytesEstimate(), meter.measureDeep(aggr));
         }
     }
 
@@ -190,8 +191,8 @@ public class AggregationCacheMemSizeTest {
         long actualMillis = 0;
 
         System.out.println("Settings: " + settings);
-        System.out.printf("%15s %15s %15s %15s %15s\n",
-                "Size", "Estimate(bytes)", "Actual(bytes)", "Estimate(ms)", "Actual(ms)");
+        System.out.printf(Locale.ROOT, "%15s %15s %15s %15s %15s\n", "Size", "Estimate(bytes)", "Actual(bytes)",
+                "Estimate(ms)", "Actual(ms)");
 
         for (int i = 0; i < inputCount; i++) {
             byte[] key = new byte[10];
@@ -199,7 +200,7 @@ public class AggregationCacheMemSizeTest {
             MeasureAggregator[] values = createAggrs(settings);
             map.put(key, values);
 
-            if ((i+1) % reportInterval == 0) {
+            if ((i + 1) % reportInterval == 0) {
                 stopwatch.start();
                 long estimateBytes = GTAggregateScanner.estimateSizeOfAggrCache(key, values, map.size());
                 estimateMillis += stopwatch.elapsedMillis();
@@ -210,8 +211,8 @@ public class AggregationCacheMemSizeTest {
                 actualMillis += stopwatch.elapsedMillis();
                 stopwatch.reset();
 
-                System.out.printf("%,15d %,15d %,15d %,15d %,15d\n",
-                        map.size(), estimateBytes, actualBytes, estimateMillis, actualMillis);
+                System.out.printf(Locale.ROOT, "%,15d %,15d %,15d %,15d %,15d\n", map.size(), estimateBytes,
+                        actualBytes, estimateMillis, actualMillis);
             }
         }
         System.out.println("---------------------------------------\n");
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java b/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java
index 9e68eb4..bcead85 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/AppendTrieDictionary.java
@@ -22,6 +22,7 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.util.Arrays;
+import java.util.Locale;
 import java.util.Objects;
 import java.util.TreeMap;
 import java.util.concurrent.ExecutionException;
@@ -60,9 +61,10 @@ import com.google.common.cache.RemovalNotification;
  *
  * @author sunyerui
  */
-@SuppressWarnings({"rawtypes", "unchecked", "serial"})
+@SuppressWarnings({ "rawtypes", "unchecked", "serial" })
 public class AppendTrieDictionary<T> extends CacheDictionary<T> {
-    public static final byte[] HEAD_MAGIC = new byte[]{0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74}; // "AppendTrieDict"
+    public static final byte[] HEAD_MAGIC = new byte[] { 0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65,
+            0x44, 0x69, 0x63, 0x74 }; // "AppendTrieDict"
     public static final int HEAD_SIZE_I = HEAD_MAGIC.length;
     private static final Logger logger = LoggerFactory.getLogger(AppendTrieDictionary.class);
 
@@ -85,19 +87,23 @@ public class AppendTrieDictionary<T> extends CacheDictionary<T> {
         final Path latestVersionPath = globalDictStore.getVersionDir(latestVersion);
         this.metadata = globalDictStore.getMetadata(latestVersion);
         this.bytesConvert = metadata.bytesConverter;
-        this.dictCache = CacheBuilder.newBuilder().softValues().removalListener(new RemovalListener<AppendDictSliceKey, AppendDictSlice>() {
-            @Override
-            public void onRemoval(RemovalNotification<AppendDictSliceKey, AppendDictSlice> notification) {
-                logger.info("Evict slice with key {} and value {} caused by {}, size {}/{}", notification.getKey(), notification.getValue(), notification.getCause(), dictCache.size(), metadata.sliceFileMap.size());
-            }
-        }).build(new CacheLoader<AppendDictSliceKey, AppendDictSlice>() {
-            @Override
-            public AppendDictSlice load(AppendDictSliceKey key) throws Exception {
-                AppendDictSlice slice = globalDictStore.readSlice(latestVersionPath.toString(), metadata.sliceFileMap.get(key));
-                logger.trace("Load slice with key {} and value {}", key, slice);
-                return slice;
-            }
-        });
+        this.dictCache = CacheBuilder.newBuilder().softValues()
+                .removalListener(new RemovalListener<AppendDictSliceKey, AppendDictSlice>() {
+                    @Override
+                    public void onRemoval(RemovalNotification<AppendDictSliceKey, AppendDictSlice> notification) {
+                        logger.info("Evict slice with key {} and value {} caused by {}, size {}/{}",
+                                notification.getKey(), notification.getValue(), notification.getCause(),
+                                dictCache.size(), metadata.sliceFileMap.size());
+                    }
+                }).build(new CacheLoader<AppendDictSliceKey, AppendDictSlice>() {
+                    @Override
+                    public AppendDictSlice load(AppendDictSliceKey key) throws Exception {
+                        AppendDictSlice slice = globalDictStore.readSlice(latestVersionPath.toString(),
+                                metadata.sliceFileMap.get(key));
+                        logger.trace("Load slice with key {} and value {}", key, slice);
+                        return slice;
+                    }
+                });
     }
 
     @Override
@@ -162,7 +168,8 @@ public class AppendTrieDictionary<T> extends CacheDictionary<T> {
 
     @Override
     public void dump(PrintStream out) {
-        out.println(String.format("Total %d values and %d slices", metadata.nValues, metadata.sliceFileMap.size()));
+        out.println(String.format(Locale.ROOT, "Total %d values and %d slices", metadata.nValues,
+                metadata.sliceFileMap.size()));
     }
 
     @Override
@@ -184,7 +191,7 @@ public class AppendTrieDictionary<T> extends CacheDictionary<T> {
 
     @Override
     public String toString() {
-        return String.format("AppendTrieDictionary(%s)", baseDir);
+        return String.format(Locale.ROOT, "AppendTrieDictionary(%s)", baseDir);
     }
 
     @Override
@@ -221,7 +228,8 @@ public class AppendTrieDictionary<T> extends CacheDictionary<T> {
         if (paths.length == 2) {
             return kylinConfig.getHdfsWorkingDirectory() + "/resources/SegmentDict/" + paths[1];
         } else {
-            throw new RuntimeException("the basic directory of global dictionary only support the format which contains '/resources/GlobalDict/' or '/resources/SegmentDict/'");
+            throw new RuntimeException(
+                    "the basic directory of global dictionary only support the format which contains '/resources/GlobalDict/' or '/resources/SegmentDict/'");
         }
     }
 
@@ -230,7 +238,7 @@ public class AppendTrieDictionary<T> extends CacheDictionary<T> {
      *
      * @param flag
      */
-   void setSaveAbsolutePath(Boolean flag) {
+    void setSaveAbsolutePath(Boolean flag) {
         this.isSaveAbsolutePath = flag;
     }
 }
\ No newline at end of file
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java
index 00410e7..c86bcb8 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/GlobalDictionaryBuilder.java
@@ -19,6 +19,8 @@
 package org.apache.kylin.dict;
 
 import java.io.IOException;
+
+import java.util.Locale;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.lock.DistributedLock;
 import org.apache.kylin.common.util.Dictionary;
@@ -58,7 +60,8 @@ public class GlobalDictionaryBuilder implements IDictionaryBuilder {
             this.builder = new AppendTrieDictionaryBuilder(baseDir, maxEntriesPerSlice, true);
         } catch (Throwable e) {
             lock.unlock(getLockPath(sourceColumn));
-            throw new RuntimeException(String.format("Failed to create global dictionary on %s ", sourceColumn), e);
+            throw new RuntimeException(
+                    String.format(Locale.ROOT, "Failed to create global dictionary on %s ", sourceColumn), e);
         }
         this.baseId = baseId;
     }
@@ -69,7 +72,8 @@ public class GlobalDictionaryBuilder implements IDictionaryBuilder {
             if (lock.lock(getLockPath(sourceColumn))) {
                 logger.info("processed {} values for {}", counter, sourceColumn);
             } else {
-                throw new RuntimeException("Failed to create global dictionary on " + sourceColumn + " This client doesn't keep the lock");
+                throw new RuntimeException(
+                        "Failed to create global dictionary on " + sourceColumn + " This client doesn't keep the lock");
             }
         }
 
@@ -81,7 +85,8 @@ public class GlobalDictionaryBuilder implements IDictionaryBuilder {
             builder.addValue(value);
         } catch (Throwable e) {
             lock.unlock(getLockPath(sourceColumn));
-            throw new RuntimeException(String.format("Failed to create global dictionary on %s ", sourceColumn), e);
+            throw new RuntimeException(
+                    String.format(Locale.ROOT, "Failed to create global dictionary on %s ", sourceColumn), e);
         }
 
         return true;
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/ShrunkenDictionary.java b/core-dictionary/src/main/java/org/apache/kylin/dict/ShrunkenDictionary.java
index 35c995e..584d58e 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/ShrunkenDictionary.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/ShrunkenDictionary.java
@@ -22,6 +22,7 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.common.util.Dictionary;
@@ -107,7 +108,7 @@ public class ShrunkenDictionary<T> extends Dictionary<T> {
     }
 
     public void dump(PrintStream out) {
-        out.println(String.format("Total %d values for ShrunkenDictionary", valueToIdMap.size()));
+        out.println(String.format(Locale.ROOT, "Total %d values for ShrunkenDictionary", valueToIdMap.size()));
     }
 
     public void write(DataOutput out) throws IOException {
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java
index ee3a2c2..c25e9b0 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictNode.java
@@ -18,11 +18,6 @@
 
 package org.apache.kylin.dict.global;
 
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.common.util.BytesUtil;
-import org.apache.kylin.dict.AppendTrieDictionary;
-import org.apache.kylin.dict.TrieDictionary;
-
 import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
@@ -32,6 +27,12 @@ import java.util.ArrayList;
 import java.util.IdentityHashMap;
 import java.util.LinkedList;
 
+import java.util.Locale;
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.common.util.BytesUtil;
+import org.apache.kylin.dict.AppendTrieDictionary;
+import org.apache.kylin.dict.TrieDictionary;
+
 public class AppendDictNode {
     public byte[] part;
     public int id = -1;
@@ -195,12 +196,14 @@ public class AppendDictNode {
     }
 
     private void build_overwriteChildOffset(int parentOffset, int childOffset, int sizeChildOffset, byte[] trieBytes) {
-        int flags = (int) trieBytes[parentOffset] & (TrieDictionary.BIT_IS_LAST_CHILD | TrieDictionary.BIT_IS_END_OF_VALUE);
+        int flags = (int) trieBytes[parentOffset]
+                & (TrieDictionary.BIT_IS_LAST_CHILD | TrieDictionary.BIT_IS_END_OF_VALUE);
         BytesUtil.writeUnsigned(childOffset, trieBytes, parentOffset, sizeChildOffset);
         trieBytes[parentOffset] |= flags;
     }
 
-    private int build_writeNode(AppendDictNode n, int offset, boolean isLastChild, int sizeChildOffset, int sizeId, byte[] trieBytes) {
+    private int build_writeNode(AppendDictNode n, int offset, boolean isLastChild, int sizeChildOffset, int sizeId,
+            byte[] trieBytes) {
         int o = offset;
 
         // childOffset
@@ -212,7 +215,8 @@ public class AppendDictNode {
 
         // nValueBytes
         if (n.part.length > 255)
-            throw new RuntimeException("Value length is " + n.part.length + " and larger than 255: " + Bytes.toStringBinary(n.part));
+            throw new RuntimeException(
+                    "Value length is " + n.part.length + " and larger than 255: " + Bytes.toStringBinary(n.part));
         BytesUtil.writeUnsigned(n.part.length, trieBytes, o, 1);
         o++;
 
@@ -238,7 +242,8 @@ public class AppendDictNode {
 
     @Override
     public String toString() {
-        return String.format("DictNode[root=%s, nodes=%d, firstValue=%s]", Bytes.toStringBinary(part), childrenCount, Bytes.toStringBinary(firstValue()));
+        return String.format(Locale.ROOT, "DictNode[root=%s, nodes=%d, firstValue=%s]", Bytes.toStringBinary(part),
+                childrenCount, Bytes.toStringBinary(firstValue()));
     }
 
     static class Stats {
@@ -332,7 +337,8 @@ public class AppendDictNode {
             s.mbpn_sizeValueTotal = s.nValueBytesCompressed + s.nValues * s.mbpn_sizeId;
             s.mbpn_sizeNoValueBytes = 1;
             s.mbpn_sizeChildOffset = 5;
-            s.mbpn_footprint = s.mbpn_sizeValueTotal + s.mbpn_nNodes * (s.mbpn_sizeNoValueBytes + s.mbpn_sizeChildOffset);
+            s.mbpn_footprint = s.mbpn_sizeValueTotal
+                    + s.mbpn_nNodes * (s.mbpn_sizeNoValueBytes + s.mbpn_sizeChildOffset);
             while (true) { // minimize the offset size to match the footprint
                 int t = s.mbpn_sizeValueTotal + s.mbpn_nNodes * (s.mbpn_sizeNoValueBytes + s.mbpn_sizeChildOffset - 1);
                 // *4 because 2 MSB of offset is used for isEndOfValue & isEndChild flag
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java
index 4e820e0..7972bd3 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/AppendDictSlice.java
@@ -18,9 +18,6 @@
 
 package org.apache.kylin.dict.global;
 
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.common.util.BytesUtil;
-
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInput;
@@ -29,8 +26,13 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.HashSet;
 
+import java.util.Locale;
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.common.util.BytesUtil;
+
 public class AppendDictSlice {
-    static final byte[] HEAD_MAGIC = new byte[] { 0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74 }; // "AppendTrieDict"
+    static final byte[] HEAD_MAGIC = new byte[] { 0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65, 0x44,
+            0x69, 0x63, 0x74 }; // "AppendTrieDict"
     static final int HEAD_SIZE_I = HEAD_MAGIC.length;
     static final int BIT_IS_LAST_CHILD = 0x80;
     static final int BIT_IS_END_OF_VALUE = 0x40;
@@ -58,7 +60,8 @@ public class AppendDictSlice {
             throw new IllegalArgumentException("Wrong file type (magic does not match)");
 
         try {
-            DataInputStream headIn = new DataInputStream(new ByteArrayInputStream(trieBytes, HEAD_SIZE_I, trieBytes.length - HEAD_SIZE_I));
+            DataInputStream headIn = new DataInputStream(
+                    new ByteArrayInputStream(trieBytes, HEAD_SIZE_I, trieBytes.length - HEAD_SIZE_I));
             this.headSize = headIn.readShort();
             this.bodyLen = headIn.readInt();
             this.nValues = headIn.readInt();
@@ -104,7 +107,8 @@ public class AppendDictSlice {
             if (checkFlag(nodeOffset, BIT_IS_END_OF_VALUE)) {
                 break;
             }
-            nodeOffset = headSize + (int) (BytesUtil.readLong(trieBytes, nodeOffset, sizeChildOffset) & childOffsetMask);
+            nodeOffset = headSize
+                    + (int) (BytesUtil.readLong(trieBytes, nodeOffset, sizeChildOffset) & childOffsetMask);
             if (nodeOffset == headSize) {
                 break;
             }
@@ -155,7 +159,8 @@ public class AppendDictSlice {
                 } else if (comp < 0) { // try next child
                     if (checkFlag(c, BIT_IS_LAST_CHILD))
                         return -1;
-                    c = p + BytesUtil.readUnsigned(trieBytes, p - 1, 1) + (checkFlag(c, BIT_IS_END_OF_VALUE) ? sizeOfId : 0);
+                    c = p + BytesUtil.readUnsigned(trieBytes, p - 1, 1)
+                            + (checkFlag(c, BIT_IS_END_OF_VALUE) ? sizeOfId : 0);
                 } else { // children are ordered by their first value byte
                     return -1;
                 }
@@ -261,7 +266,8 @@ public class AppendDictSlice {
 
     @Override
     public String toString() {
-        return String.format("DictSlice[firstValue=%s, values=%d, bytes=%d]", Bytes.toStringBinary(getFirstValue()), nValues, bodyLen);
+        return String.format(Locale.ROOT, "DictSlice[firstValue=%s, values=%d, bytes=%d]",
+                Bytes.toStringBinary(getFirstValue()), nValues, bodyLen);
     }
 
     @Override
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java
index ec79f2c..f286085 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictHDFSStore.java
@@ -23,6 +23,7 @@ import static com.google.common.base.Preconditions.checkState;
 
 import java.io.IOException;
 import java.util.Arrays;
+import java.util.Locale;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.TreeSet;
@@ -133,7 +134,7 @@ public class GlobalDictHDFSStore extends GlobalDictStore {
     @Override
     public Long[] listAllVersions() throws IOException {
         if (!fileSystem.exists(basePath)) {
-            return new Long[0];  // for the removed SegmentAppendTrieDictBuilder
+            return new Long[0]; // for the removed SegmentAppendTrieDictBuilder
         }
 
         FileStatus[] versionDirs = fileSystem.listStatus(basePath, new PathFilter() {
@@ -261,9 +262,12 @@ public class GlobalDictHDFSStore extends GlobalDictStore {
             return baseDir;
         }
 
-        checkArgument(baseDir.startsWith(srcConfig.getHdfsWorkingDirectory()), "Please check why current directory {} doesn't belong to source working directory {}", baseDir, srcConfig.getHdfsWorkingDirectory());
+        checkArgument(baseDir.startsWith(srcConfig.getHdfsWorkingDirectory()),
+                "Please check why current directory {} doesn't belong to source working directory {}", baseDir,
+                srcConfig.getHdfsWorkingDirectory());
 
-        final String dstBaseDir = baseDir.replaceFirst(srcConfig.getHdfsWorkingDirectory(), dstConfig.getHdfsWorkingDirectory());
+        final String dstBaseDir = baseDir.replaceFirst(srcConfig.getHdfsWorkingDirectory(),
+                dstConfig.getHdfsWorkingDirectory());
 
         Long[] versions = listAllVersions();
         if (versions.length == 0) { // empty dict, nothing to copy
@@ -271,7 +275,8 @@ public class GlobalDictHDFSStore extends GlobalDictStore {
         }
 
         Path srcVersionDir = getVersionDir(versions[versions.length - 1]);
-        Path dstVersionDir = new Path(srcVersionDir.toString().replaceFirst(srcConfig.getHdfsWorkingDirectory(), dstConfig.getHdfsWorkingDirectory()));
+        Path dstVersionDir = new Path(srcVersionDir.toString().replaceFirst(srcConfig.getHdfsWorkingDirectory(),
+                dstConfig.getHdfsWorkingDirectory()));
         FileSystem dstFS = dstVersionDir.getFileSystem(conf);
         if (dstFS.exists(dstVersionDir)) {
             dstFS.delete(dstVersionDir, true);
@@ -421,13 +426,14 @@ public class GlobalDictHDFSStore extends GlobalDictStore {
         public void sanityCheck(Path dir, GlobalDictMetadata metadata) throws IOException {
             for (Map.Entry<AppendDictSliceKey, String> entry : metadata.sliceFileMap.entrySet()) {
                 if (!fs.exists(new Path(dir, entry.getValue()))) {
-                    throw new RuntimeException("The slice file " + entry.getValue() + " for the key: " + entry.getKey() + " must be existed!");
+                    throw new RuntimeException("The slice file " + entry.getValue() + " for the key: " + entry.getKey()
+                            + " must be existed!");
                 }
             }
         }
 
         public static String sliceFileName(AppendDictSliceKey key) {
-            return String.format("%s%d_%d", SLICE_PREFIX, System.currentTimeMillis(), key.hashCode());
+            return String.format(Locale.ROOT, "%s%d_%d", SLICE_PREFIX, System.currentTimeMillis(), key.hashCode());
         }
     }
 }
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/global/SegmentAppendTrieDictBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/global/SegmentAppendTrieDictBuilder.java
index c5b61b5..770b0bc 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/global/SegmentAppendTrieDictBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/global/SegmentAppendTrieDictBuilder.java
@@ -20,6 +20,7 @@ package org.apache.kylin.dict.global;
 
 import java.io.IOException;
 
+import java.util.Locale;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Dictionary;
 import org.apache.kylin.common.util.RandomUtil;
@@ -50,7 +51,8 @@ public class SegmentAppendTrieDictBuilder implements IDictionaryBuilder {
 
         //use UUID to make each segment dict in different HDFS dir and support concurrent build
         //use timestamp to make the segment dict easily to delete
-        String baseDir = hdfsDir + "resources/SegmentDict" + dictInfo.getResourceDir() + "/" + RandomUtil.randomUUID().toString() + "_" + System.currentTimeMillis()+ "/";
+        String baseDir = hdfsDir + "resources/SegmentDict" + dictInfo.getResourceDir() + "/"
+                + RandomUtil.randomUUID().toString() + "_" + System.currentTimeMillis() + "/";
 
         this.builder = new AppendTrieDictionaryBuilder(baseDir, maxEntriesPerSlice, false);
         this.baseId = baseId;
@@ -65,7 +67,8 @@ public class SegmentAppendTrieDictBuilder implements IDictionaryBuilder {
         try {
             builder.addValue(value);
         } catch (Throwable e) {
-            throw new RuntimeException(String.format("Failed to create global dictionary on %s ", sourceColumn), e);
+            throw new RuntimeException(
+                    String.format(Locale.ROOT, "Failed to create global dictionary on %s ", sourceColumn), e);
         }
 
         return true;
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/AppendTrieDictionaryTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/AppendTrieDictionaryTest.java
index ea61c61..7e5421a 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/AppendTrieDictionaryTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/AppendTrieDictionaryTest.java
@@ -40,6 +40,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Random;
 import java.util.TreeMap;
@@ -70,7 +71,8 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
     public void beforeTest() {
         staticCreateTestMetadata();
         KylinConfig.getInstanceFromEnv().setProperty("kylin.dictionary.append-entry-size", "50000");
-        BASE_DIR = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "/resources/GlobalDict" + RESOURCE_DIR + "/";
+        BASE_DIR = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "/resources/GlobalDict" + RESOURCE_DIR
+                + "/";
         LOCAL_BASE_DIR = getLocalWorkingDirectory() + "/resources/GlobalDict" + RESOURCE_DIR + "/";
     }
 
@@ -88,11 +90,19 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
         }
     }
 
-    private static final String[] words = new String[]{"paint", "par", "part", "parts", "partition", "partitions", "party", "partie", "parties", "patient", "taste", "tar", "trie", "try", "tries", "字典", "字典树", "字母", // non-ascii characters
+    private static final String[] words = new String[] { "paint", "par", "part", "parts", "partition", "partitions",
+            "party", "partie", "parties", "patient", "taste", "tar", "trie", "try", "tries", "字典", "字典树", "字母", // non-ascii characters
             "", // empty
-            "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii", "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii [...]
-            "paintjkjdfklajkdljfkdsajklfjklsadjkjekjrklewjrklewjklrjklewjkljkljkljkljweklrjewkljrklewjrlkjewkljrkljkljkjlkjjkljkljkljkljlkjlkjlkjljdfadfads" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddd [...]
-                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk" + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk",
+            "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii",
+            "paiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiipaiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii [...]
+            "paintjkjdfklajkdljfkdsajklfjklsadjkjekjrklewjrklewjklrjklewjkljkljkljkljweklrjewkljrklewjrlkjewkljrkljkljkjlkjjkljkljkljkljlkjlkjlkjljdfadfads"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk"
+                    + "dddddddddddddddddddddddddddddddddddddddddddddddddkfjadslkfjdsakljflksadjklfjklsjfkljwelkrjewkljrklewjklrjelkwjrklewjrlkjwkljerklkljlkjrlkwejrk",
             "paint", "tar", "try", // some dup
     };
 
@@ -172,7 +182,8 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
         dict.dump(System.out);
     }
 
-    private void testStringDictAppend(ArrayList<String> list, ArrayList<String> notfound, boolean shuffleList) throws IOException {
+    private void testStringDictAppend(ArrayList<String> list, ArrayList<String> notfound, boolean shuffleList)
+            throws IOException {
         Random rnd = new Random(System.currentTimeMillis());
         ArrayList<String> strList = new ArrayList<String>();
         strList.addAll(list);
@@ -198,8 +209,10 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
             String str = strList.get(checkIndex);
             byte[] bytes = converter.convertToBytes(str);
             int id = dict.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
-            assertNotEquals(String.format("Value %s not exist", str), -1, id);
-            assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
+            assertNotEquals(String.format(Locale.ROOT, "Value %s not exist", str), -1, id);
+            assertFalse(
+                    String.format(Locale.ROOT, "Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)),
+                    checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
             checkMap.put(id, str);
         }
 
@@ -218,12 +231,13 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
             String str = strList.get(checkIndex);
             byte[] bytes = converter.convertToBytes(str);
             int id = dict.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
-            assertNotEquals(String.format("Value %s not exist", str), -1, id);
+            assertNotEquals(String.format(Locale.ROOT, "Value %s not exist", str), -1, id);
             if (checkIndex < firstAppend) {
                 assertEquals("Except id " + id + " for " + str + " but " + checkMap.get(id), str, checkMap.get(id));
             } else {
                 // check second append str, should be new id
-                assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
+                assertFalse(String.format(Locale.ROOT, "Id %d for %s should be empty, but is %s", id, str,
+                        checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
                 checkMap.put(id, str);
             }
         }
@@ -243,12 +257,13 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
             String str = strList.get(checkIndex);
             byte[] bytes = converter.convertToBytes(str);
             int id = dict.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
-            assertNotEquals(String.format("Value %s not exist", str), -1, id);
+            assertNotEquals(String.format(Locale.ROOT, "Value %s not exist", str), -1, id);
             if (checkIndex < secondAppend) {
                 assertEquals("Except id " + id + " for " + str + " but " + checkMap.get(id), str, checkMap.get(id));
             } else {
                 // check third append str, should be new id
-                assertFalse(String.format("Id %d for %s should be empty, but is %s", id, str, checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
+                assertFalse(String.format(Locale.ROOT, "Id %d for %s should be empty, but is %s", id, str,
+                        checkMap.get(id)), checkMap.containsKey(id) && !str.equals(checkMap.get(id)));
                 checkMap.put(id, str);
             }
         }
@@ -264,12 +279,13 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
         for (String str : strList) {
             byte[] bytes = converter.convertToBytes(str);
             int id = dict.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
-            assertNotEquals(String.format("Value %s not exist", str), -1, id);
+            assertNotEquals(String.format(Locale.ROOT, "Value %s not exist", str), -1, id);
             assertEquals("Except id " + id + " for " + str + " but " + checkMap.get(id), str, checkMap.get(id));
         }
     }
 
-    private static AppendTrieDictionary<String> testSerialize(AppendTrieDictionary<String> dict, BytesConverter converter) {
+    private static AppendTrieDictionary<String> testSerialize(AppendTrieDictionary<String> dict,
+            BytesConverter converter) {
         try {
             ByteArrayOutputStream bout = new ByteArrayOutputStream();
             DataOutputStream dataout = new DataOutputStream(bout);
@@ -347,14 +363,14 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
     public void testSerialize() throws IOException {
         AppendTrieDictionaryBuilder builder = createBuilder();
         AppendTrieDictionary dict = builder.build(0);
-        
+
         ByteArrayOutputStream bout = new ByteArrayOutputStream();
         DataOutputStream dataout = new DataOutputStream(bout);
         dict.write(dataout);
         dataout.close();
         ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());
         DataInputStream datain = new DataInputStream(bin);
-        
+
         assertNull(new Path(datain.readUTF()).toUri().getScheme());
         datain.close();
     }
@@ -369,7 +385,7 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
         AppendTrieDictionary dict = builder.build(0);
         TreeMap checkMap = new TreeMap();
         BytesConverter converter = new StringBytesConverter();
-        for (String str: strList) {
+        for (String str : strList) {
             byte[] bytes = converter.convertToBytes(str);
             int id = dict.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
             checkMap.put(id, str);
@@ -388,7 +404,7 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
         for (String str : strList) {
             byte[] bytes = converter.convertToBytes(str);
             int id = r.getIdFromValueBytesWithoutCache(bytes, 0, bytes.length, 0);
-            assertNotEquals(String.format("Value %s not exist", str), -1, id);
+            assertNotEquals(String.format(Locale.ROOT, "Value %s not exist", str), -1, id);
             assertEquals("Except id " + id + " for " + str + " but " + checkMap.get(id), str, checkMap.get(id));
         }
     }
@@ -566,7 +582,8 @@ public class AppendTrieDictionaryTest extends LocalFileMetadataTestCase {
         Path v2IndexFile = new Path(versionPath, V2_INDEX_NAME);
 
         fs.delete(v2IndexFile, true);
-        GlobalDictHDFSStore.IndexFormat indexFormatV1 = new GlobalDictHDFSStore.IndexFormatV1(fs, HadoopUtil.getCurrentConfiguration());
+        GlobalDictHDFSStore.IndexFormat indexFormatV1 = new GlobalDictHDFSStore.IndexFormatV1(fs,
+                HadoopUtil.getCurrentConfiguration());
         indexFormatV1.writeIndexFile(versionPath, metadata);
 
         //convert v2 fileName format to v1 fileName format
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
index 6e8e655..4f5ddff 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
@@ -24,6 +24,7 @@ import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.math.BigDecimal;
+import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.List;
 import java.util.Random;
@@ -45,7 +46,8 @@ import com.google.common.collect.Sets;
  */
 public class NumberDictionaryTest extends LocalFileMetadataTestCase {
 
-    Number2BytesConverter.NumberBytesCodec codec = new Number2BytesConverter.NumberBytesCodec(MAX_DIGITS_BEFORE_DECIMAL_POINT);
+    Number2BytesConverter.NumberBytesCodec codec = new Number2BytesConverter.NumberBytesCodec(
+            MAX_DIGITS_BEFORE_DECIMAL_POINT);
     Random rand = new Random();
 
     @Before
@@ -78,7 +80,8 @@ public class NumberDictionaryTest extends LocalFileMetadataTestCase {
         String[] ints = new String[] { "", "0", "5", "100", "13" };
 
         // check "" is treated as NULL, not a code of dictionary
-        Dictionary<?> dict = DictionaryGenerator.buildDictionary(DataType.getType("integer"), new IterableDictionaryValueEnumerator(ints));
+        Dictionary<?> dict = DictionaryGenerator.buildDictionary(DataType.getType("integer"),
+                new IterableDictionaryValueEnumerator(ints));
         assertEquals(4, dict.getSize());
 
         final int id = ((NumberDictionary<String>) dict).getIdFromValue("");
@@ -95,7 +98,8 @@ public class NumberDictionaryTest extends LocalFileMetadataTestCase {
         //test resolved jira-1800
         checkCodec("-0.0045454354354354359999999999877218", "-9999999999999999999.9954545645645645640000000000122781;");
         checkCodec("-0.009999999999877218", "-9999999999999999999.990000000000122781;");
-        checkCodec("12343434372493274.438403840384023840253554345345345345", "00012343434372493274.438403840384023840253554345345345345");
+        checkCodec("12343434372493274.438403840384023840253554345345345345",
+                "00012343434372493274.438403840384023840253554345345345345");
         assertEquals("00000000000000000052.57", encodeNumber("52.5700"));
         assertEquals("00000000000000000000", encodeNumber("0.00"));
         assertEquals("00000000000000000000", encodeNumber("0.0"));
@@ -141,15 +145,15 @@ public class NumberDictionaryTest extends LocalFileMetadataTestCase {
 
         // test exact match
         NumberDictionary<String> dict = builder.build(0);
-//        for (int i = 0; i < sorted.size(); i++) {
-//            String dictNum = dict.getValueFromId(i);
-//            System.out.println(sorted.get(i) + "\t" + dictNum);
-//        }
+        //        for (int i = 0; i < sorted.size(); i++) {
+        //            String dictNum = dict.getValueFromId(i);
+        //            System.out.println(sorted.get(i) + "\t" + dictNum);
+        //        }
 
         for (int i = 0; i < sorted.size(); i++) {
             String dictNum = dict.getValueFromId(i);
             assertEquals(sorted.get(i), new BigDecimal(dictNum));
-            assertEquals(sorted.get(i), new BigDecimal(new String(dict.getValueByteFromId(i))));
+            assertEquals(sorted.get(i), new BigDecimal(new String(dict.getValueByteFromId(i), StandardCharsets.UTF_8)));
         }
 
         // test rounding
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java
index 1f63111..3e50224 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/TrieDictionaryForestTest.java
@@ -33,6 +33,7 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -754,7 +755,7 @@ public class TrieDictionaryForestTest {
     private void evaluateDataSize(ArrayList<String> list) {
         long size = 0;
         for (String str : list)
-            size += str.getBytes().length;
+            size += str.getBytes(StandardCharsets.UTF_8).length;
         System.out.println("test data size : " + size / (1024 * 1024) + " MB");
     }
 
@@ -763,7 +764,7 @@ public class TrieDictionaryForestTest {
         Iterator<String> itr = rs.iterator();
         long bytesCount = 0;
         while (itr.hasNext())
-            bytesCount += itr.next().getBytes().length;
+            bytesCount += itr.next().getBytes(StandardCharsets.UTF_8).length;
         System.out.println("test data size : " + bytesCount / (1024 * 1024) + " MB");
     }
 
diff --git a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
index 86659d9..ff48244 100644
--- a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
@@ -23,9 +23,9 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
-import com.google.common.collect.Lists;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeSegment;
@@ -41,6 +41,8 @@ import org.apache.kylin.metadata.model.SegmentRange;
 import org.apache.kylin.metadata.model.TableRef;
 import org.apache.kylin.metadata.model.TblColRef;
 
+import com.google.common.collect.Lists;
+
 /**
  *
  */
@@ -143,7 +145,7 @@ public class JoinedFlatTable {
             if (i > 0) {
                 sql.append(",");
             }
-            String colTotalName = String.format("%s.%s", col.getTableRef().getTableName(), col.getName());
+            String colTotalName = String.format(Locale.ROOT, "%s.%s", col.getTableRef().getTableName(), col.getName());
             if (skipAsList.contains(colTotalName)) {
                 sql.append(col.getExpressionInSourceDB() + sep);
             } else {
@@ -173,7 +175,7 @@ public class JoinedFlatTable {
                     if (pk.length != fk.length) {
                         throw new RuntimeException("Invalid join condition of lookup table:" + lookupDesc);
                     }
-                    String joinType = join.getType().toUpperCase();
+                    String joinType = join.getType().toUpperCase(Locale.ROOT);
 
                     sql.append(joinType + " JOIN " + dimTable.getTableIdentity() + " as " + dimTable.getAlias() + sep);
                     sql.append("ON ");
@@ -245,7 +247,7 @@ public class JoinedFlatTable {
     }
 
     private static String getHiveDataType(String javaDataType) {
-        String originDataType = javaDataType.toLowerCase();
+        String originDataType = javaDataType.toLowerCase(Locale.ROOT);
         String hiveDataType;
         if (originDataType.startsWith("varchar")) {
             hiveDataType = "string";
diff --git a/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java b/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java
index 9ba602f..c85d11a 100644
--- a/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java
+++ b/core-job/src/main/java/org/apache/kylin/job/engine/JobEngineConfig.java
@@ -21,6 +21,7 @@ package org.apache.kylin.job.engine;
 import java.io.File;
 import java.io.IOException;
 
+import java.util.Locale;
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.OptionsHelper;
@@ -52,7 +53,7 @@ public class JobEngineConfig {
     private String getHadoopJobConfFilePath(String suffix, boolean appendSuffix) throws IOException {
         String hadoopJobConfFile;
         if (suffix != null && appendSuffix) {
-            hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + suffix.toLowerCase() + ".xml");
+            hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + "_" + suffix.toLowerCase(Locale.ROOT) + ".xml");
         } else {
             hadoopJobConfFile = (HADOOP_JOB_CONF_FILENAME + ".xml");
         }
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
index 45c37b5..09b7b8e 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/ExecutableManager.java
@@ -26,6 +26,7 @@ import java.io.IOException;
 import java.util.HashMap;
 import java.util.IllegalFormatException;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.lang3.StringUtils;
@@ -511,7 +512,7 @@ public class ExecutableManager {
         if (info.containsKey(YARN_APP_ID) && !StringUtils.isEmpty(config.getJobTrackingURLPattern())) {
             String pattern = config.getJobTrackingURLPattern();
             try {
-                String newTrackingURL = String.format(pattern, info.get(YARN_APP_ID));
+                String newTrackingURL = String.format(Locale.ROOT, pattern, info.get(YARN_APP_ID));
                 info.put(YARN_APP_URL, newTrackingURL);
             } catch (IllegalFormatException ife) {
                 logger.error("Illegal tracking url pattern: " + config.getJobTrackingURLPattern());
diff --git a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
index 6d40be8..6bbc2ce 100644
--- a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
+++ b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.job.impl.threadpool;
 
+import java.util.Locale;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
@@ -54,7 +55,7 @@ public class DefaultScheduler implements Scheduler<AbstractExecutable>, Connecti
         }
         return INSTANCE;
     }
-    
+
     public synchronized static DefaultScheduler createInstance() {
         destroyInstance();
         INSTANCE = new DefaultScheduler();
@@ -75,7 +76,7 @@ public class DefaultScheduler implements Scheduler<AbstractExecutable>, Connecti
     }
 
     // ============================================================================
-    
+
     private JobLock jobLock;
     private ExecutableManager executableManager;
     private FetcherRunner fetcher;
@@ -144,7 +145,7 @@ public class DefaultScheduler implements Scheduler<AbstractExecutable>, Connecti
         jobLock = lock;
 
         String serverMode = jobEngineConfig.getConfig().getServerMode();
-        if (!("job".equals(serverMode.toLowerCase()) || "all".equals(serverMode.toLowerCase()))) {
+        if (!("job".equals(serverMode.toLowerCase(Locale.ROOT)) || "all".equals(serverMode.toLowerCase(Locale.ROOT)))) {
             logger.info("server mode: " + serverMode + ", no need to run job scheduler");
             return;
         }
diff --git a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java
index cb4d815..d6f9fe2 100644
--- a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java
+++ b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DistributedScheduler.java
@@ -20,6 +20,7 @@ package org.apache.kylin.job.impl.threadpool;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.util.Locale;
 import java.util.Set;
 import java.util.concurrent.CopyOnWriteArraySet;
 import java.util.concurrent.ExecutorService;
@@ -61,7 +62,7 @@ import com.google.common.collect.Maps;
  */
 public class DistributedScheduler implements Scheduler<AbstractExecutable>, ConnectionStateListener {
     private static final Logger logger = LoggerFactory.getLogger(DistributedScheduler.class);
-    
+
     public static final String ZOOKEEPER_LOCK_PATH = "/job_engine/lock"; // note ZookeeperDistributedLock will ensure zk path prefix: /${kylin.env.zookeeper-base-path}/metadata
 
     public static DistributedScheduler getInstance(KylinConfig config) {
@@ -74,7 +75,7 @@ public class DistributedScheduler implements Scheduler<AbstractExecutable>, Conn
     }
 
     // ============================================================================
-    
+
     private ExecutableManager executableManager;
     private FetcherRunner fetcher;
     private ScheduledExecutorService fetcherPool;
@@ -189,7 +190,7 @@ public class DistributedScheduler implements Scheduler<AbstractExecutable>, Conn
     @Override
     public synchronized void init(JobEngineConfig jobEngineConfig, JobLock jobLock) throws SchedulerException {
         String serverMode = jobEngineConfig.getConfig().getServerMode();
-        if (!("job".equals(serverMode.toLowerCase()) || "all".equals(serverMode.toLowerCase()))) {
+        if (!("job".equals(serverMode.toLowerCase(Locale.ROOT)) || "all".equals(serverMode.toLowerCase(Locale.ROOT)))) {
             logger.info("server mode: " + serverMode + ", no need to run job scheduler");
             return;
         }
diff --git a/core-job/src/main/java/org/apache/kylin/job/metrics/JobMetricsFacade.java b/core-job/src/main/java/org/apache/kylin/job/metrics/JobMetricsFacade.java
index dd18c91..5a8caa8 100644
--- a/core-job/src/main/java/org/apache/kylin/job/metrics/JobMetricsFacade.java
+++ b/core-job/src/main/java/org/apache/kylin/job/metrics/JobMetricsFacade.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.job.metrics;
 
+import java.util.Locale;
+
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metrics.MetricsManager;
 import org.apache.kylin.metrics.lib.impl.RecordEvent;
@@ -112,7 +114,7 @@ public class JobMetricsFacade {
         public void setWrapper(String user, String projectName, String cubeName, String jobId, String jobType,
                 String cubingType) {
             this.user = user;
-            this.projectName = projectName == null ? null : projectName.toUpperCase();
+            this.projectName = projectName == null ? null : projectName.toUpperCase(Locale.ROOT);
             this.cubeName = cubeName;
             this.jobId = jobId;
             this.jobType = jobType;
diff --git a/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java b/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java
index dcc8d47..39c42d7 100644
--- a/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/dimension/DictionaryDimEnc.java
@@ -23,6 +23,7 @@ import java.io.ObjectInput;
 import java.io.ObjectOutput;
 import java.io.Serializable;
 import java.nio.ByteBuffer;
+import java.util.Locale;
 
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.common.util.Dictionary;
@@ -100,7 +101,8 @@ public class DictionaryDimEnc extends DimensionEncoding implements Serializable
             for (int i = outputOffset; i < outputOffset + fixedLen; i++) {
                 output[i] = defaultByte;
             }
-            logger.error("Can't translate value " + valueStr + " to dictionary ID, roundingFlag " + roundingFlag + ". Using default value " + String.format("\\x%02X", defaultByte));
+            logger.error("Can't translate value " + valueStr + " to dictionary ID, roundingFlag " + roundingFlag + "."
+                    + " Using default value " + String.format(Locale.ROOT, "\\x%02X", defaultByte));
         }
     }
 
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java
index 9699d2e..d16a705 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/MeasureTypeFactory.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.measure;
 
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.common.KylinConfig;
@@ -133,11 +134,11 @@ abstract public class MeasureTypeFactory<T> {
         // register factories & data type serializers
         for (MeasureTypeFactory<?> factory : factoryInsts) {
             String funcName = factory.getAggrFunctionName();
-            if (!funcName.equals(funcName.toUpperCase()))
+            if (!funcName.equals(funcName.toUpperCase(Locale.ROOT)))
                 throw new IllegalArgumentException(
                         "Aggregation function name '" + funcName + "' must be in upper case");
             String dataTypeName = factory.getAggrDataTypeName();
-            if (!dataTypeName.equals(dataTypeName.toLowerCase()))
+            if (!dataTypeName.equals(dataTypeName.toLowerCase(Locale.ROOT)))
                 throw new IllegalArgumentException(
                         "Aggregation data type name '" + dataTypeName + "' must be in lower case");
             Class<? extends DataTypeSerializer<?>> serializer = factory.getAggrDataTypeSerializer();
@@ -164,7 +165,7 @@ abstract public class MeasureTypeFactory<T> {
             return;
 
         for (String udaf : udafs.keySet()) {
-            udaf = udaf.toUpperCase();
+            udaf = udaf.toUpperCase(Locale.ROOT);
             if (udaf.equals(FunctionDesc.FUNC_COUNT_DISTINCT))
                 continue; // skip built-in function
 
@@ -200,7 +201,7 @@ abstract public class MeasureTypeFactory<T> {
     }
 
     public static MeasureType<?> create(String funcName, DataType dataType) {
-        funcName = funcName.toUpperCase();
+        funcName = funcName.toUpperCase(Locale.ROOT);
 
         List<MeasureTypeFactory<?>> factory = factories.get(funcName);
         if (factory == null)
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
index c95cd1c..b38299a 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnMeasureType.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.measure.extendedcolumn;
 
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
@@ -114,7 +115,8 @@ public class ExtendedColumnMeasureType extends MeasureType<ByteArray> {
     }
 
     @Override
-    public CapabilityResult.CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions, Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, final MeasureDesc measureDesc) {
+    public CapabilityResult.CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions,
+            Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, final MeasureDesc measureDesc) {
         TblColRef extendedCol = getExtendedColumn(measureDesc.getFunction());
 
         if (!unmatchedDimensions.contains(extendedCol)) {
@@ -144,9 +146,11 @@ public class ExtendedColumnMeasureType extends MeasureType<ByteArray> {
         return true;
     }
 
-    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo returnTupleInfo, Map<TblColRef, Dictionary<String>> dictionaryMap) {
+    public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo returnTupleInfo,
+            Map<TblColRef, Dictionary<String>> dictionaryMap) {
         final TblColRef extended = getExtendedColumn(function);
-        final int extendedColumnInTupleIdx = returnTupleInfo.hasColumn(extended) ? returnTupleInfo.getColumnIndex(extended) : -1;
+        final int extendedColumnInTupleIdx = returnTupleInfo.hasColumn(extended)
+                ? returnTupleInfo.getColumnIndex(extended) : -1;
 
         if (extendedColumnInTupleIdx == -1) {
             throw new RuntimeException("Extended column is not required in returnTupleInfo");
@@ -216,7 +220,8 @@ public class ExtendedColumnMeasureType extends MeasureType<ByteArray> {
             }
 
             @Override
-            public ByteArray valueOf(String[] values, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> dictionaryMap) {
+            public ByteArray valueOf(String[] values, MeasureDesc measureDesc,
+                    Map<TblColRef, Dictionary<String>> dictionaryMap) {
                 if (values.length <= 1)
                     throw new IllegalArgumentException();
 
@@ -229,7 +234,8 @@ public class ExtendedColumnMeasureType extends MeasureType<ByteArray> {
                 if (bytes.length <= dataType.getPrecision()) {
                     return new ByteArray(bytes);
                 } else {
-                    return new ByteArray(truncateWhenUTF8(literal, dataType.getPrecision()).getBytes());
+                    return new ByteArray(
+                            truncateWhenUTF8(literal, dataType.getPrecision()).getBytes(StandardCharsets.UTF_8));
                 }
             }
         };
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
index f3959fa..28d0ec7 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/TableMetadataManager.java
@@ -23,6 +23,7 @@ import java.io.InputStream;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -230,9 +231,9 @@ public class TableMetadataManager {
      */
     private String getTableIdentity(String tableName) {
         if (!tableName.contains("."))
-            return "DEFAULT." + tableName.toUpperCase();
+            return "DEFAULT." + tableName.toUpperCase(Locale.ROOT);
         else
-            return tableName.toUpperCase();
+            return tableName.toUpperCase(Locale.ROOT);
     }
 
     public void saveSourceTable(TableDesc srcTable, String prj) throws IOException {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java
index bcbbe49..4d4b87f 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/BooleanSerializer.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.metadata.datatype;
 
 import java.nio.ByteBuffer;
+import java.util.Locale;
 
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.BooleanUtils;
@@ -59,8 +60,9 @@ public class BooleanSerializer extends DataTypeSerializer<Long> {
     @Override
     public Long valueOf(String str) {
         if (str == null)
-           return Long.valueOf(0L);
+            return Long.valueOf(0L);
         else
-            return Long.valueOf(BooleanUtils.toInteger(ArrayUtils.contains(TRUE_VALUE_SET, str.toLowerCase())));
+            return Long
+                    .valueOf(BooleanUtils.toInteger(ArrayUtils.contains(TRUE_VALUE_SET, str.toLowerCase(Locale.ROOT))));
     }
 }
\ No newline at end of file
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
index efc4784..7580ad0 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/datatype/DataType.java
@@ -23,6 +23,7 @@ import java.nio.ByteBuffer;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedHashSet;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
@@ -169,7 +170,7 @@ public class DataType implements Serializable {
     }
 
     private DataType(String datatype) {
-        datatype = datatype.trim().toLowerCase();
+        datatype = datatype.trim().toLowerCase(Locale.ROOT);
         datatype = replaceLegacy(datatype);
 
         Pattern pattern = TYPE_PATTERN;
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java
index aa9cd3d..9082c1f 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/BuiltInFunctionTupleFilter.java
@@ -24,6 +24,7 @@ import java.lang.reflect.Method;
 import java.nio.ByteBuffer;
 import java.util.Collection;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.common.util.BytesUtil;
@@ -66,7 +67,7 @@ public class BuiltInFunctionTupleFilter extends FunctionTupleFilter {
         this.methodParams = Lists.newArrayList();
 
         if (name != null) {
-            this.name = name.toUpperCase();
+            this.name = name.toUpperCase(Locale.ROOT);
             initMethod();
         }
     }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
index e156174..95f7ea6 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/BuiltInMethod.java
@@ -22,6 +22,7 @@ import static org.apache.kylin.metadata.filter.function.LikeMatchers.LikeMatcher
 
 import java.lang.reflect.Method;
 import java.util.HashMap;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.lang3.reflect.MethodUtils;
@@ -130,14 +131,14 @@ public enum BuiltInMethod {
     public static String upper(String s) {
         if (s == null)
             return null;
-        return s.toUpperCase();
+        return s.toUpperCase(Locale.ROOT);
     }
 
     /** SQL LOWER(string) function. */
     public static String lower(String s) {
         if (s == null)
             return null;
-        return s.toLowerCase();
+        return s.toLowerCase(Locale.ROOT);
     }
 
     /** SQL left || right */
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java
index 861e530..8114201 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/Functions.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.metadata.filter.function;
 
 import java.lang.reflect.InvocationTargetException;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.metadata.filter.BuiltInFunctionTupleFilter;
@@ -44,7 +45,7 @@ public class Functions {
             throw new IllegalStateException("Function name cannot be null");
         }
 
-        name = name.toUpperCase();
+        name = name.toUpperCase(Locale.ROOT);
 
         if (SUPPORTED_UDF.containsKey(name)) {
             try {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java
index ef1e412..0d681c0 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/filter/function/LikeMatchers.java
@@ -17,6 +17,7 @@
  */
 package org.apache.kylin.metadata.filter.function;
 
+import java.util.Locale;
 import java.util.regex.Pattern;
 
 import org.apache.commons.lang.StringUtils;
@@ -36,7 +37,7 @@ public class LikeMatchers {
         private Pattern p;
 
         private DefaultLikeMatcher(String patternStr) {
-            patternStr = patternStr.toLowerCase();
+            patternStr = patternStr.toLowerCase(Locale.ROOT);
             final String regex = Like.sqlToRegexLike(patternStr, null);
             p = Pattern.compile(regex);
         }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
index b8de534..2b3f066 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ColumnDesc.java
@@ -20,6 +20,7 @@ package org.apache.kylin.metadata.model;
 
 import java.io.Serializable;
 
+import java.util.Locale;
 import org.apache.kylin.metadata.datatype.DataType;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
@@ -202,7 +203,7 @@ public class ColumnDesc implements Serializable {
         this.table = table;
 
         if (name != null)
-            name = name.toUpperCase();
+            name = name.toUpperCase(Locale.ROOT);
 
         if (id != null)
             zeroBasedIndex = Integer.parseInt(id) - 1;
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
index 9d655a4..f46bff4 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
@@ -25,6 +25,7 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Queue;
 import java.util.Set;
@@ -282,7 +283,7 @@ public class DataModelDesc extends RootPersistentEntity {
 
     public TblColRef findColumn(String table, String column) throws IllegalArgumentException {
         TableRef tableRef = findTable(table);
-        TblColRef result = tableRef.getColumn(column.toUpperCase());
+        TblColRef result = tableRef.getColumn(column.toUpperCase(Locale.ROOT));
         if (result == null)
             throw new IllegalArgumentException("Column not found by " + table + "." + column);
         return result;
@@ -292,7 +293,7 @@ public class DataModelDesc extends RootPersistentEntity {
         TblColRef result = null;
         String input = column;
 
-        column = column.toUpperCase();
+        column = column.toUpperCase(Locale.ROOT);
         int cut = column.lastIndexOf('.');
         if (cut > 0) {
             // table specified
@@ -314,7 +315,7 @@ public class DataModelDesc extends RootPersistentEntity {
 
     // find by unique name, that must uniquely identifies a table in the model
     public TableRef findTable(String table) throws IllegalArgumentException {
-        TableRef result = tableNameMap.get(table.toUpperCase());
+        TableRef result = tableNameMap.get(table.toUpperCase(Locale.ROOT));
         if (result == null) {
             throw new IllegalArgumentException("Table not found by " + table);
         }
@@ -388,7 +389,7 @@ public class DataModelDesc extends RootPersistentEntity {
             throw new IllegalStateException("root fact table should not be empty");
         }
 
-        rootFactTable = rootFactTable.toUpperCase();
+        rootFactTable = rootFactTable.toUpperCase(Locale.ROOT);
         if (tables.containsKey(rootFactTable) == false)
             throw new IllegalStateException("Root fact table does not exist:" + rootFactTable);
 
@@ -399,7 +400,7 @@ public class DataModelDesc extends RootPersistentEntity {
         factTableRefs.add(rootFactTableRef);
 
         for (JoinTableDesc join : joinTables) {
-            join.setTable(join.getTable().toUpperCase());
+            join.setTable(join.getTable().toUpperCase(Locale.ROOT));
 
             if (tables.containsKey(join.getTable()) == false)
                 throw new IllegalStateException("Join table does not exist:" + join.getTable());
@@ -409,7 +410,7 @@ public class DataModelDesc extends RootPersistentEntity {
             if (alias == null) {
                 alias = tableDesc.getName();
             }
-            alias = alias.toUpperCase();
+            alias = alias.toUpperCase(Locale.ROOT);
             join.setAlias(alias);
 
             boolean isLookup = join.getKind() == TableKind.LOOKUP;
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DatabaseDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DatabaseDesc.java
index 1b33f35..4f014da 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DatabaseDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DatabaseDesc.java
@@ -23,6 +23,7 @@ import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 
 /**
  * @author xjiang
@@ -35,7 +36,7 @@ public class DatabaseDesc implements Serializable {
      * @return the name
      */
     public String getName() {
-        return name == null ? "null" : name.toUpperCase();
+        return name == null ? "null" : name.toUpperCase(Locale.ROOT);
     }
 
     /**
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java
index 7ef84aa..2c8f6fe 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ExternalFilterDesc.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.metadata.model;
 
+import java.util.Locale;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.RootPersistentEntity;
@@ -90,7 +91,7 @@ public class ExternalFilterDesc extends RootPersistentEntity implements ISourceA
 
     public void init() {
         if (name != null)
-            name = name.toUpperCase();
+            name = name.toUpperCase(Locale.ROOT);
     }
 
     @Override
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java
index a3cdd7f..72d956a 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/FunctionDesc.java
@@ -22,6 +22,7 @@ import java.io.Serializable;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -47,7 +48,7 @@ public class FunctionDesc implements Serializable {
 
     public static FunctionDesc newInstance(String expression, ParameterDesc param, String returnType) {
         FunctionDesc r = new FunctionDesc();
-        r.expression = (expression == null) ? null : expression.toUpperCase();
+        r.expression = (expression == null) ? null : expression.toUpperCase(Locale.ROOT);
         r.parameter = param;
         r.returnType = returnType;
         r.returnDataType = DataType.getType(returnType);
@@ -91,7 +92,7 @@ public class FunctionDesc implements Serializable {
     private boolean isDimensionAsMetric = false;
 
     public void init(DataModelDesc model) {
-        expression = expression.toUpperCase();
+        expression = expression.toUpperCase(Locale.ROOT);
         if (expression.equals(PercentileMeasureType.FUNC_PERCENTILE)) {
             expression = PercentileMeasureType.FUNC_PERCENTILE_APPROX; // for backward compatibility
         }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
index 209da7d..caadbd6 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ModelDimensionDesc.java
@@ -21,6 +21,7 @@ package org.apache.kylin.metadata.model;
 import java.io.Serializable;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.kylin.common.util.StringUtil;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
@@ -54,7 +55,7 @@ public class ModelDimensionDesc implements Serializable {
     }
 
     void init(DataModelDesc model) {
-        table = table.toUpperCase();
+        table = table.toUpperCase(Locale.ROOT);
         if (columns != null) {
             StringUtil.toUpperCaseArray(columns, columns);
         }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
index 2cfa681..dcb37ec 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.metadata.model;
 
 import java.io.Serializable;
+import java.util.Locale;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.util.ClassUtil;
@@ -199,7 +200,8 @@ public class PartitionDesc implements Serializable {
             StringBuilder builder = new StringBuilder();
 
             if (partDesc.partitionColumnIsYmdInt()) {
-                buildSingleColumnRangeCondAsYmdInt(builder, partitionDateColumn, startInclusive, endExclusive, partDesc.getPartitionDateFormat());
+                buildSingleColumnRangeCondAsYmdInt(builder, partitionDateColumn, startInclusive, endExclusive,
+                        partDesc.getPartitionDateFormat());
             } else if (partDesc.partitionColumnIsTimeMillis()) {
                 buildSingleColumnRangeCondAsTimeMillis(builder, partitionDateColumn, startInclusive, endExclusive);
             } else if (partitionDateColumn != null && partitionTimeColumn == null) {
@@ -230,8 +232,8 @@ public class PartitionDesc implements Serializable {
             builder.append(partitionColumnName + " >= "
                     + DateFormat.formatToDateStr(startInclusive, partitionColumnDateFormat));
             builder.append(" AND ");
-            builder.append(partitionColumnName + " < "
-                    + DateFormat.formatToDateStr(endExclusive, partitionColumnDateFormat));
+            builder.append(
+                    partitionColumnName + " < " + DateFormat.formatToDateStr(endExclusive, partitionColumnDateFormat));
         }
 
         private static void buildSingleColumnRangeCondition(StringBuilder builder, TblColRef partitionColumn,
@@ -308,8 +310,8 @@ public class PartitionDesc implements Serializable {
             TblColRef partitionColumn = partDesc.getPartitionDateColumnRef();
             String tableAlias = partitionColumn.getTableAlias();
 
-            String concatField = String.format("CONCAT(%s.YEAR,'-',%s.MONTH,'-',%s.DAY)", tableAlias, tableAlias,
-                    tableAlias);
+            String concatField = String.format(Locale.ROOT, "CONCAT(%s.YEAR,'-',%s.MONTH,'-',%s.DAY)", tableAlias,
+                    tableAlias, tableAlias);
             StringBuilder builder = new StringBuilder();
 
             if (startInclusive > 0) {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
index 3f9a774..d8e3b02 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
@@ -21,6 +21,7 @@ package org.apache.kylin.metadata.model;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -53,7 +54,7 @@ public class TableDesc extends RootPersistentEntity implements ISourceAware {
     public static String concatRawResourcePath(String nameOnPath) {
         return ResourceStore.TABLE_RESOURCE_ROOT + "/" + nameOnPath + ".json";
     }
-    
+
     public static String makeResourceName(String tableIdentity, String prj) {
         return prj == null ? tableIdentity : tableIdentity + "--" + prj;
     }
@@ -134,7 +135,7 @@ public class TableDesc extends RootPersistentEntity implements ISourceAware {
     public String resourceName() {
         return makeResourceName(getIdentity(), getProject());
     }
-    
+
     public TableDesc appendColumns(ColumnDesc[] computedColumns, boolean makeCopy) {
         if (computedColumns == null || computedColumns.length == 0) {
             return this;
@@ -152,7 +153,7 @@ public class TableDesc extends RootPersistentEntity implements ISourceAware {
                 if (existingColumns[i].getName().equalsIgnoreCase(computedColumns[j].getName())) {
                     // if we're adding a computed column twice, it should be allowed without producing duplicates
                     if (!existingColumns[i].isComputedColumn()) {
-                        throw new IllegalArgumentException(String.format(
+                        throw new IllegalArgumentException(String.format(Locale.ROOT,
                                 "There is already a column named %s on table %s, please change your computed column name",
                                 new Object[] { computedColumns[j].getName(), this.getIdentity() }));
                     } else {
@@ -195,7 +196,7 @@ public class TableDesc extends RootPersistentEntity implements ISourceAware {
         if (isBorrowedFromGlobal()) {
             return concatResourcePath(getIdentity(), null);
         }
-        
+
         return concatResourcePath(getIdentity(), project);
     }
 
@@ -209,7 +210,8 @@ public class TableDesc extends RootPersistentEntity implements ISourceAware {
 
     public String getIdentity() {
         if (identity == null) {
-            identity = String.format("%s.%s", this.getDatabase().toUpperCase(), this.getName()).toUpperCase();
+            identity = String.format(Locale.ROOT, "%s.%s", this.getDatabase().toUpperCase(Locale.ROOT), this.getName())
+                    .toUpperCase(Locale.ROOT);
         }
         return identity;
     }
@@ -295,12 +297,12 @@ public class TableDesc extends RootPersistentEntity implements ISourceAware {
     public void init(KylinConfig config, String project) {
         this.project = project;
         this.config = config;
-        
+
         if (name != null)
-            name = name.toUpperCase();
+            name = name.toUpperCase(Locale.ROOT);
 
         if (getDatabase() != null)
-            setDatabase(getDatabase().toUpperCase());
+            setDatabase(getDatabase().toUpperCase(Locale.ROOT));
 
         if (columns != null) {
             Arrays.sort(columns, new Comparator<ColumnDesc>() {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
index 28ea238..74cc2db 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableExtDesc.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -202,7 +203,7 @@ public class TableExtDesc extends RootPersistentEntity {
         this.project = project;
 
         if (this.tableIdentity != null)
-            this.tableIdentity = this.tableIdentity.toUpperCase();
+            this.tableIdentity = this.tableIdentity.toUpperCase(Locale.ROOT);
     }
 
     public void setLastModifiedTime(long lastModifiedTime) {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TblColRef.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TblColRef.java
index ee33e8a..918eedf 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/TblColRef.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/TblColRef.java
@@ -22,6 +22,7 @@ import static com.google.common.base.Preconditions.checkArgument;
 
 import java.io.Serializable;
 
+import java.util.Locale;
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.metadata.datatype.DataType;
 
@@ -259,12 +260,12 @@ public class TblColRef implements Serializable {
         if (column.getTable() == null) {
             return "NULL";
         } else {
-            return column.getTable().getIdentity().toUpperCase();
+            return column.getTable().getIdentity().toUpperCase(Locale.ROOT);
         }
     }
 
     // return DB.TABLE.COLUMN
     public String getColumWithTableAndSchema() {
-        return (getTableWithSchema() + "." + column.getName()).toUpperCase();
+        return (getTableWithSchema() + "." + column.getName()).toUpperCase(Locale.ROOT);
     }
 }
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/tool/CalciteParser.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/tool/CalciteParser.java
index d48100c..759e573 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/tool/CalciteParser.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/tool/CalciteParser.java
@@ -21,6 +21,7 @@ package org.apache.kylin.metadata.model.tool;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -69,7 +70,7 @@ public class CalciteParser {
         //n = 1 is getting column
         //n = 2 is getting table's alias, if has.
         //n = 3 is getting database name, if has.
-        return id.names.get(id.names.size() - n).replace("\"", "").toUpperCase();
+        return id.names.get(id.names.size() - n).replace("\"", "").toUpperCase(Locale.ROOT);
     }
 
     public static void ensureNoAliasInExpr(String expr) {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
index 45622f3..d12e927 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectInstance.java
@@ -21,6 +21,7 @@ package org.apache.kylin.metadata.project;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 import java.util.TreeSet;
 
@@ -242,11 +243,11 @@ public class ProjectInstance extends RootPersistentEntity implements ISourceAwar
     }
 
     public boolean containsTable(String tableName) {
-        return tables.contains(tableName.toUpperCase());
+        return tables.contains(tableName.toUpperCase(Locale.ROOT));
     }
 
     public void removeTable(String tableName) {
-        tables.remove(tableName.toUpperCase());
+        tables.remove(tableName.toUpperCase(Locale.ROOT));
     }
 
     public void addExtFilter(String extFilterName) {
@@ -258,7 +259,7 @@ public class ProjectInstance extends RootPersistentEntity implements ISourceAwar
     }
 
     public void addTable(String tableName) {
-        tables.add(tableName.toUpperCase());
+        tables.add(tableName.toUpperCase(Locale.ROOT));
     }
 
     public Set<String> getTables() {
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
index 13a61cf..bad9773 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
@@ -18,9 +18,16 @@
 
 package org.apache.kylin.metadata.project;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -41,14 +48,9 @@ import org.apache.kylin.metadata.realization.RealizationType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 
 public class ProjectManager {
     private static final Logger logger = LoggerFactory.getLogger(ProjectManager.class);
@@ -66,11 +68,11 @@ public class ProjectManager {
 
     private KylinConfig config;
     private ProjectL2Cache l2Cache;
-    
+
     // project name => ProjectInstance
     private CaseInsensitiveStringCache<ProjectInstance> projectMap;
     private CachedCrudAssist<ProjectInstance> crud;
-    
+
     // protects concurrent operations around the cached map, to avoid for example
     // writing an entity in the middle of reloading it (dirty read)
     private AutoReadWriteLock prjMapLock = new AutoReadWriteLock();
@@ -245,7 +247,7 @@ public class ProjectManager {
                 throw new IllegalArgumentException("Project " + newProjectName + " does not exist.");
             }
             prj.addModel(modelName);
-            
+
             return save(prj);
         }
     }
@@ -366,7 +368,7 @@ public class ProjectManager {
             save(projectInstance);
         }
     }
-    
+
     private ProjectInstance save(ProjectInstance prj) throws IOException {
         crud.save(prj);
         clearL2Cache(prj.getName());
@@ -459,15 +461,15 @@ public class ProjectManager {
     }
 
     public Set<IRealization> getRealizationsByTable(String project, String tableName) {
-        return l2Cache.getRealizationsByTable(project, tableName.toUpperCase());
+        return l2Cache.getRealizationsByTable(project, tableName.toUpperCase(Locale.ROOT));
     }
 
     public List<MeasureDesc> listEffectiveRewriteMeasures(String project, String factTable) {
-        return l2Cache.listEffectiveRewriteMeasures(project, factTable.toUpperCase(), true);
+        return l2Cache.listEffectiveRewriteMeasures(project, factTable.toUpperCase(Locale.ROOT), true);
     }
 
     public List<MeasureDesc> listEffectiveMeasures(String project, String factTable) {
-        return l2Cache.listEffectiveRewriteMeasures(project, factTable.toUpperCase(), false);
+        return l2Cache.listEffectiveRewriteMeasures(project, factTable.toUpperCase(Locale.ROOT), false);
     }
 
     KylinConfig getConfig() {
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java
index 5fbb86f..d9c4577 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java
@@ -21,6 +21,7 @@ import static com.google.common.base.Predicates.equalTo;
 import static com.google.common.base.Predicates.not;
 
 import java.util.HashMap;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
@@ -92,7 +93,7 @@ public class HivePushDownConverter implements IPushDownConverter {
             String castStr = castMatcher.group();
             String type = castMatcher.group(2);
             String supportedType = "";
-            switch (type.toUpperCase()) {
+            switch (type.toUpperCase(Locale.ROOT)) {
             case "INTEGER":
                 supportedType = "int";
                 break;
@@ -132,7 +133,7 @@ public class HivePushDownConverter implements IPushDownConverter {
             if (aliasMatcher.find()) {
                 String aliasCandidate = aliasMatcher.group(1);
 
-                if (aliasCandidate != null && !sqlKeyWordsExceptAS.contains(aliasCandidate.toUpperCase())) {
+                if (aliasCandidate != null && !sqlKeyWordsExceptAS.contains(aliasCandidate.toUpperCase(Locale.ROOT))) {
                     continue;
                 }
 
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java
index f086ae9..b6c48fd 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ColumnGenerator.java
@@ -20,12 +20,14 @@ package org.apache.kylin.source.datagen;
 
 import java.io.IOException;
 import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
 import java.util.TreeSet;
 
@@ -182,7 +184,7 @@ public class ColumnGenerator {
         }
 
         private String formatNumber(double i) {
-            return new DecimalFormat(format).format(i);
+            return new DecimalFormat(format, DecimalFormatSymbols.getInstance(Locale.ROOT)).format(i);
         }
 
         private int randomInt() {
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java
index b7e8d3a..ca90206 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/datagen/ModelDataGenerator.java
@@ -26,11 +26,13 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedHashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
 import org.apache.commons.io.IOUtils;
@@ -41,9 +43,9 @@ import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.metadata.datatype.DataType;
 import org.apache.kylin.metadata.model.ColumnDesc;
 import org.apache.kylin.metadata.model.DataModelDesc;
+import org.apache.kylin.metadata.model.DataModelManager;
 import org.apache.kylin.metadata.model.JoinDesc;
 import org.apache.kylin.metadata.model.JoinTableDesc;
-import org.apache.kylin.metadata.model.DataModelManager;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.slf4j.Logger;
@@ -67,7 +69,7 @@ public class ModelDataGenerator {
     private ModelDataGenerator(DataModelDesc model, int nRows, ResourceStore outputStore) {
         this(model, nRows, outputStore, "/data");
     }
-    
+
     private ModelDataGenerator(DataModelDesc model, int nRows, ResourceStore outputStore, String outputPath) {
         this.model = model;
         this.targetRows = nRows;
@@ -81,13 +83,14 @@ public class ModelDataGenerator {
 
         JoinTableDesc[] allTables = model.getJoinTables();
         for (int i = allTables.length - 1; i >= -1; i--) { // reverse order needed for FK generation
-            TableDesc table = (i == -1) ? model.getRootFactTable().getTableDesc() : allTables[i].getTableRef().getTableDesc();
+            TableDesc table = (i == -1) ? model.getRootFactTable().getTableDesc()
+                    : allTables[i].getTableRef().getTableDesc();
             allTableDesc.add(table);
-            
+
             if (generated.contains(table))
                 continue;
 
-            logger.info(String.format("generating data for %s", table));
+            logger.info(String.format(Locale.ROOT, "generating data for %s", table));
             boolean gen = generateTable(table);
 
             if (gen)
@@ -103,7 +106,7 @@ public class ModelDataGenerator {
             return false;
 
         ByteArrayOutputStream bout = new ByteArrayOutputStream();
-        PrintWriter pout = new PrintWriter(new OutputStreamWriter(bout, "UTF-8"));
+        PrintWriter pout = new PrintWriter(new OutputStreamWriter(bout, StandardCharsets.UTF_8));
 
         generateTableInternal(table, config, pout);
 
@@ -148,7 +151,7 @@ public class ModelDataGenerator {
     private void generateDDL(Set<TableDesc> tables) throws IOException {
 
         ByteArrayOutputStream bout = new ByteArrayOutputStream();
-        PrintWriter pout = new PrintWriter(new OutputStreamWriter(bout, "UTF-8"));
+        PrintWriter pout = new PrintWriter(new OutputStreamWriter(bout, StandardCharsets.UTF_8));
 
         generateDatabaseDDL(tables, pout);
         generateCreateTableDDL(tables, pout);
@@ -178,7 +181,7 @@ public class ModelDataGenerator {
         for (TableDesc t : tables) {
             if (t.isView())
                 continue;
-            
+
             out.print("DROP TABLE IF EXISTS " + normHiveIdentifier(t.getIdentity()) + ";\n");
 
             out.print("CREATE TABLE " + normHiveIdentifier(t.getIdentity()) + "(" + "\n");
@@ -219,15 +222,16 @@ public class ModelDataGenerator {
                 out.print("-- " + t.getIdentity() + " is view \n");
                 continue;
             }
-            
-            out.print("LOAD DATA LOCAL INPATH '" + t.getIdentity() + ".csv' OVERWRITE INTO TABLE " + normHiveIdentifier(t.getIdentity()) + ";\n");
+
+            out.print("LOAD DATA LOCAL INPATH '" + t.getIdentity() + ".csv' OVERWRITE INTO TABLE "
+                    + normHiveIdentifier(t.getIdentity()) + ";\n");
         }
     }
 
     public boolean existsInStore(TableDesc table) throws IOException {
         return outputStore.exists(path(table));
     }
-    
+
     public boolean isPK(ColumnDesc col) {
         for (JoinTableDesc joinTable : model.getJoinTables()) {
             JoinDesc join = joinTable.getJoin();
@@ -238,7 +242,7 @@ public class ModelDataGenerator {
         }
         return false;
     }
-    
+
     public List<String> getPkValuesIfIsFk(ColumnDesc fk) throws IOException {
         JoinTableDesc[] joinTables = model.getJoinTables();
         for (int i = 0; i < joinTables.length; i++) {
@@ -269,7 +273,8 @@ public class ModelDataGenerator {
 
         List<String> r = new ArrayList<>();
 
-        BufferedReader in = new BufferedReader(new InputStreamReader(outputStore.getResource(path(pk.getTable())).inputStream, "UTF-8"));
+        BufferedReader in = new BufferedReader(
+                new InputStreamReader(outputStore.getResource(path(pk.getTable())).inputStream, "UTF-8"));
         try {
             String line;
             while ((line = in.readLine()) != null) {
@@ -305,11 +310,12 @@ public class ModelDataGenerator {
         String modelName = args[0];
         int nRows = Integer.parseInt(args[1]);
         String outputDir = args.length > 2 ? args[2] : null;
-        
+
         KylinConfig conf = KylinConfig.getInstanceFromEnv();
         DataModelDesc model = DataModelManager.getInstance(conf).getDataModelDesc(modelName);
-        ResourceStore store = outputDir == null ? ResourceStore.getStore(conf) : ResourceStore.getStore(mockup(outputDir));
-        
+        ResourceStore store = outputDir == null ? ResourceStore.getStore(conf)
+                : ResourceStore.getStore(mockup(outputDir));
+
         ModelDataGenerator gen = new ModelDataGenerator(model, nRows, store);
         gen.generate();
     }
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
index 4e67d22..7ccec63 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
@@ -20,6 +20,7 @@ package org.apache.kylin.measure;
 
 import java.math.BigDecimal;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -84,11 +85,8 @@ public class AggregatorMemEstimateTest extends LocalFileMetadataTestCase {
         decimalMax.aggregate(decimal);
         decimalSum.aggregate(decimal);
 
-        return Lists.newArrayList(
-                longMin, longMax, longSum,
-                doubleMin, doubleMax, doubleSum,
-                decimalMin, decimalMax, decimalSum
-        );
+        return Lists.newArrayList(longMin, longMax, longSum, doubleMin, doubleMax, doubleSum, decimalMin, decimalMax,
+                decimalSum);
     }
 
     private String getAggregatorName(Class<? extends MeasureAggregator> clazz) {
@@ -111,7 +109,8 @@ public class AggregatorMemEstimateTest extends LocalFileMetadataTestCase {
         }
         bitmapAggregator.aggregate(bitmapCounter);
 
-        ExtendedColumnMeasureType extendedColumnType = new ExtendedColumnMeasureType("EXTENDED_COLUMN", DataType.getType("extendedcolumn(100)"));
+        ExtendedColumnMeasureType extendedColumnType = new ExtendedColumnMeasureType("EXTENDED_COLUMN",
+                DataType.getType("extendedcolumn(100)"));
         MeasureAggregator<ByteArray> extendedColumnAggregator = extendedColumnType.newAggregator();
         extendedColumnAggregator.aggregate(new ByteArray(100));
 
@@ -120,10 +119,11 @@ public class AggregatorMemEstimateTest extends LocalFileMetadataTestCase {
         aggregators.add(bitmapAggregator);
         aggregators.add(extendedColumnAggregator);
 
-        System.out.printf("%40s %10s %10s\n", "Class", "Estimate", "Actual");
+        System.out.printf(Locale.ROOT, "%40s %10s %10s\n", "Class", "Estimate", "Actual");
         for (MeasureAggregator aggregator : aggregators) {
             String clzName = getAggregatorName(aggregator.getClass());
-            System.out.printf("%40s %10d %10d\n", clzName, aggregator.getMemBytesEstimate(), meter.measureDeep(aggregator));
+            System.out.printf(Locale.ROOT, "%40s %10d %10d\n", clzName, aggregator.getMemBytesEstimate(),
+                    meter.measureDeep(aggregator));
         }
     }
 
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnSerializerTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnSerializerTest.java
index 835f7f2..c5b4cd4 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnSerializerTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/extendedcolumn/ExtendedColumnSerializerTest.java
@@ -20,6 +20,7 @@ package org.apache.kylin.measure.extendedcolumn;
 
 import java.nio.ByteBuffer;
 
+import java.nio.charset.StandardCharsets;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -75,7 +76,7 @@ public class ExtendedColumnSerializerTest extends LocalFileMetadataTestCase {
         serializer.serialize(array, buffer);
         buffer.flip();
         ByteArray des = serializer.deserialize(buffer);
-        Assert.assertTrue(new ByteArray(text.getBytes()).equals(des));
+        Assert.assertTrue(new ByteArray(text.getBytes(StandardCharsets.UTF_8)).equals(des));
     }
 
     @Test
@@ -89,6 +90,6 @@ public class ExtendedColumnSerializerTest extends LocalFileMetadataTestCase {
         serializer.serialize(array, buffer);
         buffer.flip();
         ByteArray des = serializer.deserialize(buffer);
-        Assert.assertTrue(new ByteArray(StringUtils.repeat("h", 20).getBytes()).equals(des));
+        Assert.assertTrue(new ByteArray(StringUtils.repeat("h", 20).getBytes(StandardCharsets.UTF_8)).equals(des));
     }
 }
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java
index 3eba78b..2105d37 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/topn/TopNCounterTest.java
@@ -20,9 +20,13 @@ package org.apache.kylin.measure.topn;
 
 import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
@@ -76,7 +80,7 @@ public class TopNCounterTest {
 
         if (tempFile.exists())
             FileUtils.forceDelete(tempFile);
-        FileWriter fw = new FileWriter(tempFile);
+        Writer fw = new OutputStreamWriter(new FileOutputStream(tempFile), StandardCharsets.UTF_8);
         try {
             for (int i = 0; i < TOTAL_RECORDS; i++) {
                 keyIndex = zipf.sample() - 1;
@@ -97,10 +101,12 @@ public class TopNCounterTest {
     //@Test
     public void testSingleSpaceSaving() throws IOException {
         String dataFile = prepareTestDate();
-        TopNCounterTest.SpaceSavingConsumer spaceSavingCounter = new TopNCounterTest.SpaceSavingConsumer(TOP_K * SPACE_SAVING_ROOM);
+        TopNCounterTest.SpaceSavingConsumer spaceSavingCounter = new TopNCounterTest.SpaceSavingConsumer(
+                TOP_K * SPACE_SAVING_ROOM);
         TopNCounterTest.HashMapConsumer accurateCounter = new TopNCounterTest.HashMapConsumer();
 
-        for (TopNCounterTest.TestDataConsumer consumer : new TopNCounterTest.TestDataConsumer[] { spaceSavingCounter, accurateCounter }) {
+        for (TopNCounterTest.TestDataConsumer consumer : new TopNCounterTest.TestDataConsumer[] { spaceSavingCounter,
+                accurateCounter }) {
             feedDataToConsumer(dataFile, consumer, 0, TOTAL_RECORDS);
         }
 
@@ -109,7 +115,8 @@ public class TopNCounterTest {
         compareResult(spaceSavingCounter, accurateCounter);
     }
 
-    private void compareResult(TopNCounterTest.TestDataConsumer firstConsumer, TopNCounterTest.TestDataConsumer secondConsumer) {
+    private void compareResult(TopNCounterTest.TestDataConsumer firstConsumer,
+            TopNCounterTest.TestDataConsumer secondConsumer) {
         List<Pair<String, Double>> topResult1 = firstConsumer.getTopN(TOP_K);
         outputMsg("Get topN, Space saving takes " + firstConsumer.getSpentTime() / 1000 + " seconds");
         List<Pair<String, Double>> realSequence = secondConsumer.getTopN(TOP_K);
@@ -123,8 +130,10 @@ public class TopNCounterTest {
                 //            if (topResult1.get(i).getFirst().equals(realSequence.get(i).getFirst()) && topResult1.get(i).getSecond().doubleValue() == realSequence.get(i).getSecond().doubleValue()) {
                 outputMsg("Passed; key:" + topResult1.get(i).getFirst() + ", value:" + topResult1.get(i).getSecond());
             } else {
-                outputMsg("Failed; space saving key:" + topResult1.get(i).getFirst() + ", value:" + topResult1.get(i).getSecond());
-                outputMsg("Failed; correct key:" + realSequence.get(i).getFirst() + ", value:" + realSequence.get(i).getSecond());
+                outputMsg("Failed; space saving key:" + topResult1.get(i).getFirst() + ", value:"
+                        + topResult1.get(i).getSecond());
+                outputMsg("Failed; correct key:" + realSequence.get(i).getFirst() + ", value:"
+                        + realSequence.get(i).getSecond());
                 error++;
             }
         }
@@ -167,7 +176,8 @@ public class TopNCounterTest {
 
     }
 
-    private TopNCounterTest.SpaceSavingConsumer[] singleMerge(TopNCounterTest.SpaceSavingConsumer[] consumers) throws IOException, ClassNotFoundException {
+    private TopNCounterTest.SpaceSavingConsumer[] singleMerge(TopNCounterTest.SpaceSavingConsumer[] consumers)
+            throws IOException, ClassNotFoundException {
         List<TopNCounterTest.SpaceSavingConsumer> list = Lists.newArrayList();
         if (consumers.length == 1)
             return consumers;
@@ -183,7 +193,8 @@ public class TopNCounterTest {
 
     }
 
-    private TopNCounterTest.SpaceSavingConsumer[] binaryMerge(TopNCounterTest.SpaceSavingConsumer[] consumers) throws IOException, ClassNotFoundException {
+    private TopNCounterTest.SpaceSavingConsumer[] binaryMerge(TopNCounterTest.SpaceSavingConsumer[] consumers)
+            throws IOException, ClassNotFoundException {
         List<TopNCounterTest.SpaceSavingConsumer> list = Lists.newArrayList();
         if (consumers.length == 1)
             return consumers;
@@ -199,9 +210,11 @@ public class TopNCounterTest {
         return binaryMerge(list.toArray(new TopNCounterTest.SpaceSavingConsumer[list.size()]));
     }
 
-    private void feedDataToConsumer(String dataFile, TopNCounterTest.TestDataConsumer consumer, int startLine, int endLine) throws IOException {
+    private void feedDataToConsumer(String dataFile, TopNCounterTest.TestDataConsumer consumer, int startLine,
+            int endLine) throws IOException {
         long startTime = System.currentTimeMillis();
-        BufferedReader bufferedReader = new BufferedReader(new FileReader(dataFile));
+        BufferedReader bufferedReader = new BufferedReader(
+                new InputStreamReader(new FileInputStream(dataFile), StandardCharsets.UTF_8));
 
         int lineNum = 0;
         String line = bufferedReader.readLine();
@@ -214,7 +227,8 @@ public class TopNCounterTest {
         }
 
         bufferedReader.close();
-        outputMsg("feed data to " + consumer.getClass().getCanonicalName() + " take time (seconds): " + (System.currentTimeMillis() - startTime) / 1000);
+        outputMsg("feed data to " + consumer.getClass().getCanonicalName() + " take time (seconds): "
+                + (System.currentTimeMillis() - startTime) / 1000);
     }
 
     private void outputMsg(String msg) {
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/MetricsManager.java b/core-metrics/src/main/java/org/apache/kylin/metrics/MetricsManager.java
index bcfb275..aff8cc9 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/MetricsManager.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/MetricsManager.java
@@ -83,6 +83,7 @@ public class MetricsManager {
             }
         }
         scSink = systemCubeSink;
+        System.gc();
     }
 
     private static void setSourceReporterBindProps(
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEvent.java b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEvent.java
index 93da2f4..6452d78 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEvent.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEvent.java
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.io.Serializable;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.Map;
 import java.util.Set;
@@ -206,7 +207,7 @@ public class RecordEvent implements Record, Map<String, Object>, Serializable {
 
     @Override
     public byte[] getKey() {
-        return (getHost() + "-" + getTime() + "-" + getID()).getBytes();
+        return (getHost() + "-" + getTime() + "-" + getID()).getBytes(StandardCharsets.UTF_8);
     }
 
     @Override
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEventTimeDetail.java b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEventTimeDetail.java
index ff97b9b..c611d0f 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEventTimeDetail.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/RecordEventTimeDetail.java
@@ -20,6 +20,7 @@ package org.apache.kylin.metrics.lib.impl;
 
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
+import java.util.Locale;
 import java.util.TimeZone;
 
 import org.apache.kylin.common.KylinConfig;
@@ -43,24 +44,24 @@ public class RecordEventTimeDetail {
     public final String week_begin_date;
 
     public RecordEventTimeDetail(long timeStamp) {
-        Calendar calendar = Calendar.getInstance(timeZone);
+        Calendar calendar = Calendar.getInstance(timeZone, Locale.ROOT);
         calendar.setTimeInMillis(timeStamp);
 
         SimpleDateFormat dateFormat = dateFormatThreadLocal.get();
         if (dateFormat == null) {
-            dateFormat = new SimpleDateFormat("yyyy-MM-dd");
+            dateFormat = new SimpleDateFormat("yyyy-MM-dd", Locale.ROOT);
             dateFormat.setTimeZone(timeZone);
             dateFormatThreadLocal.set(dateFormat);
         }
         SimpleDateFormat timeFormat = timeFormatThreadLocal.get();
         if (timeFormat == null) {
-            timeFormat = new SimpleDateFormat("HH:mm:ss");
+            timeFormat = new SimpleDateFormat("HH:mm:ss", Locale.ROOT);
             timeFormat.setTimeZone(timeZone);
             timeFormatThreadLocal.set(timeFormat);
         }
 
-        String yearStr = String.format("%04d", calendar.get(Calendar.YEAR));
-        String monthStr = String.format("%02d", calendar.get(Calendar.MONTH) + 1);
+        String yearStr = String.format(Locale.ROOT, "%04d", calendar.get(Calendar.YEAR));
+        String monthStr = String.format(Locale.ROOT, "%02d", calendar.get(Calendar.MONTH) + 1);
         this.year_begin_date = yearStr + "-01-01";
         this.month_begin_date = yearStr + "-" + monthStr + "-01";
         this.date = dateFormat.format(calendar.getTime());
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/TimePropertyEnum.java b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/TimePropertyEnum.java
index 1336843..c013b4c 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/TimePropertyEnum.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/TimePropertyEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.metrics.lib.impl;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum TimePropertyEnum {
@@ -36,7 +38,7 @@ public enum TimePropertyEnum {
             return null;
         }
         for (TimePropertyEnum property : TimePropertyEnum.values()) {
-            if (property.propertyName.equals(propertyName.toUpperCase())) {
+            if (property.propertyName.equals(propertyName.toUpperCase(Locale.ROOT))) {
                 return property;
             }
         }
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/property/JobPropertyEnum.java b/core-metrics/src/main/java/org/apache/kylin/metrics/property/JobPropertyEnum.java
index 64d13ac..3ca567e 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/property/JobPropertyEnum.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/property/JobPropertyEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.metrics.property;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum JobPropertyEnum {
@@ -41,7 +43,7 @@ public enum JobPropertyEnum {
             return null;
         }
         for (JobPropertyEnum property : JobPropertyEnum.values()) {
-            if (property.propertyName.equals(name.toUpperCase())) {
+            if (property.propertyName.equals(name.toUpperCase(Locale.ROOT))) {
                 return property;
             }
         }
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryCubePropertyEnum.java b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryCubePropertyEnum.java
index d2d5bb4..21477dc 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryCubePropertyEnum.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryCubePropertyEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.metrics.property;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum QueryCubePropertyEnum {
@@ -39,7 +41,7 @@ public enum QueryCubePropertyEnum {
             return null;
         }
         for (QueryCubePropertyEnum property : QueryCubePropertyEnum.values()) {
-            if (property.propertyName.equals(name.toUpperCase())) {
+            if (property.propertyName.equals(name.toUpperCase(Locale.ROOT))) {
                 return property;
             }
         }
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryPropertyEnum.java b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryPropertyEnum.java
index 3f016b0..20da4ce 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryPropertyEnum.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryPropertyEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.metrics.property;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum QueryPropertyEnum {
@@ -37,7 +39,7 @@ public enum QueryPropertyEnum {
             return null;
         }
         for (QueryPropertyEnum property : QueryPropertyEnum.values()) {
-            if (property.propertyName.equals(name.toUpperCase())) {
+            if (property.propertyName.equals(name.toUpperCase(Locale.ROOT))) {
                 return property;
             }
         }
diff --git a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryRPCPropertyEnum.java b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryRPCPropertyEnum.java
index 049b9ed..4366f0d 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryRPCPropertyEnum.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/property/QueryRPCPropertyEnum.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.metrics.property;
 
+import java.util.Locale;
+
 import com.google.common.base.Strings;
 
 public enum QueryRPCPropertyEnum {
@@ -36,7 +38,7 @@ public enum QueryRPCPropertyEnum {
             return null;
         }
         for (QueryRPCPropertyEnum property : QueryRPCPropertyEnum.values()) {
-            if (property.propertyName.equals(name.toUpperCase())) {
+            if (property.propertyName.equals(name.toUpperCase(Locale.ROOT))) {
                 return property;
             }
         }
diff --git a/dev-support/signatures.txt b/dev-support/signatures.txt
new file mode 100644
index 0000000..c6f4d54
--- /dev/null
+++ b/dev-support/signatures.txt
@@ -0,0 +1,25 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to you under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Signatures of APIs to avoid.
+# Cribbed from Elasticsearch
+
+java.lang.Character#codePointBefore(char[],int) @ Implicit start offset is error-prone when the char[] is a buffer and the first chars are random chars
+java.lang.Character#codePointAt(char[],int) @ Implicit end offset is error-prone when the char[] is a buffer and the last chars are random chars
+
+@defaultMessage Please do not try to kill the world
+java.lang.Runtime#exit(int)
+
+# End signatures.txt
\ No newline at end of file
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchOptimizeJobCheckpointBuilder.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchOptimizeJobCheckpointBuilder.java
index 1a779d2..a06fd46 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchOptimizeJobCheckpointBuilder.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchOptimizeJobCheckpointBuilder.java
@@ -21,6 +21,7 @@ package org.apache.kylin.engine.mr;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
@@ -35,7 +36,7 @@ import com.google.common.base.Preconditions;
 
 public class BatchOptimizeJobCheckpointBuilder {
 
-    protected static SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss");
+    protected static SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss", Locale.ROOT);
 
     final protected CubeInstance cube;
     final protected String submitter;
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
index 7f7191d..fb1a7f4 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
@@ -21,6 +21,7 @@ package org.apache.kylin.engine.mr;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.TimeZone;
 import java.util.regex.Matcher;
@@ -86,7 +87,7 @@ public class CubingJob extends DefaultChainedExecutable {
                 return null;
             }
             for (CubingJobTypeEnum jobTypeEnum : CubingJobTypeEnum.values()) {
-                if (jobTypeEnum.name.equals(name.toUpperCase())) {
+                if (jobTypeEnum.name.equals(name.toUpperCase(Locale.ROOT))) {
                     return jobTypeEnum;
                 }
             }
@@ -136,7 +137,7 @@ public class CubingJob extends DefaultChainedExecutable {
         }
 
         CubingJob result = new CubingJob();
-        SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss");
+        SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss", Locale.ROOT);
         format.setTimeZone(TimeZone.getTimeZone(config.getTimeZone()));
         result.setDeployEnvName(kylinConfig.getDeployEnv());
         result.setProjectName(projList.get(0).getName());
@@ -279,8 +280,7 @@ public class CubingJob extends DefaultChainedExecutable {
             jobStats.setJobStats(findSourceSizeBytes(), findCubeSizeBytes(), getDuration(), getMapReduceWaitTime(),
                     getPerBytesTimeCost(findSourceSizeBytes(), getDuration()));
             if (CubingJobTypeEnum.getByName(getJobType()) == CubingJobTypeEnum.BUILD) {
-                jobStats.setJobStepStats(
-                        getTaskDurationByName(ExecutableConstants.STEP_NAME_FACT_DISTINCT_COLUMNS),
+                jobStats.setJobStepStats(getTaskDurationByName(ExecutableConstants.STEP_NAME_FACT_DISTINCT_COLUMNS),
                         getTaskDurationByName(ExecutableConstants.STEP_NAME_BUILD_DICTIONARY),
                         getTaskDurationByName(ExecutableConstants.STEP_NAME_BUILD_IN_MEM_CUBE),
                         getTaskDurationByName(ExecutableConstants.STEP_NAME_CONVERT_CUBOID_TO_HFILE));
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/LookupSnapshotBuildJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/LookupSnapshotBuildJob.java
index 6865ce3..3ebd7d2 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/LookupSnapshotBuildJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/LookupSnapshotBuildJob.java
@@ -21,6 +21,7 @@ package org.apache.kylin.engine.mr;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.List;
+import java.util.Locale;
 import java.util.TimeZone;
 
 import org.apache.kylin.common.KylinConfig;
@@ -58,7 +59,7 @@ public class LookupSnapshotBuildJob extends DefaultChainedExecutable {
         }
 
         LookupSnapshotBuildJob result = new LookupSnapshotBuildJob();
-        SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss");
+        SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss", Locale.ROOT);
         format.setTimeZone(TimeZone.getTimeZone(kylinConfig.getTimeZone()));
         result.setDeployEnvName(kylinConfig.getDeployEnv());
         result.setProjectName(projList.get(0).getName());
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
index 6b8934a..102995e 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/CubeStatsReader.java
@@ -18,15 +18,20 @@
 
 package org.apache.kylin.engine.mr.common;
 
+import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
@@ -361,7 +366,7 @@ public class CubeStatsReader {
     }
 
     private static String formatDouble(double input) {
-        return new DecimalFormat("#.##").format(input);
+        return new DecimalFormat("#.##", DecimalFormatSymbols.getInstance(Locale.ROOT)).format(input);
     }
 
     public static class CubeStatsResult {
@@ -423,7 +428,8 @@ public class CubeStatsReader {
         CubeInstance cube = CubeManager.getInstance(config).getCube(args[0]);
         List<CubeSegment> segments = cube.getSegments();
 
-        PrintWriter out = new PrintWriter(System.out);
+        PrintWriter out = new PrintWriter(
+                new BufferedWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8)));
         for (CubeSegment seg : segments) {
             try {
                 new CubeStatsReader(seg, config).print(out);
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
index d7da2c2..0e93ee7 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
+
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
@@ -97,7 +99,7 @@ public class CuboidJob extends AbstractHadoopJob {
             parseOptions(options, args);
 
             String output = getOptionValue(OPTION_OUTPUT_PATH);
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             int nCuboidLevel = Integer.parseInt(getOptionValue(OPTION_NCUBOID_LEVEL));
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
             String cubingJobId = getOptionValue(OPTION_CUBING_JOB_ID);
@@ -113,7 +115,8 @@ public class CuboidJob extends AbstractHadoopJob {
             cuboidScheduler = CuboidSchedulerUtil.getCuboidSchedulerByMode(segment, cuboidModeName);
 
             if (checkSkip(cubingJobId, nCuboidLevel)) {
-                logger.info("Skip job " + getOptionValue(OPTION_JOB_NAME) + " for " + segmentID + "[" + segmentID + "]");
+                logger.info(
+                        "Skip job " + getOptionValue(OPTION_JOB_NAME) + " for " + segmentID + "[" + segmentID + "]");
                 return 0;
             }
 
@@ -166,7 +169,8 @@ public class CuboidJob extends AbstractHadoopJob {
 
         if ("FLAT_TABLE".equals(input)) {
             // base cuboid case
-            IMRTableInputFormat flatTableInputFormat = MRUtil.getBatchCubingInputSide(cubeSeg).getFlatTableInputFormat();
+            IMRTableInputFormat flatTableInputFormat = MRUtil.getBatchCubingInputSide(cubeSeg)
+                    .getFlatTableInputFormat();
             flatTableInputFormat.configureJob(job);
         } else {
             // n-dimension cuboid case
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java
index 2dd9f00..a7fa2cd 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidReducer.java
@@ -23,6 +23,7 @@ import java.nio.ByteBuffer;
 import java.util.List;
 
 import com.google.common.collect.Lists;
+import java.util.Locale;
 import org.apache.hadoop.io.Text;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeManager;
@@ -62,7 +63,7 @@ public class CuboidReducer extends KylinReducer<Text, Text, Text, Text> {
     @Override
     protected void doSetup(Context context) throws IOException {
         super.bindCurrentConfiguration(context.getConfiguration());
-        cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase();
+        cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(Locale.ROOT);
 
         // only used in Build job, not in Merge job
         cuboidLevel = context.getConfiguration().getInt(BatchConstants.CFG_CUBE_CUBOID_LEVEL, 0);
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
index 61ba247..27b5208 100755
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
@@ -21,6 +21,7 @@ package org.apache.kylin.engine.mr.steps;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -98,7 +99,7 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
         taskId = context.getTaskAttemptID().getTaskID().getId();
 
         reducerMapping = new FactDistinctColumnsReducerMapping(cube);
-        
+
         logger.info("reducer no " + taskId + ", role play " + reducerMapping.getRolePlayOfReducer(taskId));
 
         if (reducerMapping.isCuboidRowCounterReducer(taskId)) {
@@ -132,7 +133,8 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
     }
 
     @Override
-    public void doReduce(SelfDefineSortableKey skey, Iterable<Text> values, Context context) throws IOException, InterruptedException {
+    public void doReduce(SelfDefineSortableKey skey, Iterable<Text> values, Context context)
+            throws IOException, InterruptedException {
         Text key = skey.getText();
         if (isStatistics) {
             // for hll
@@ -219,12 +221,12 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
             // output written to baseDir/colName/colName.dci-r-00000 (etc)
             String dimRangeFileName = col.getIdentity() + "/" + col.getName() + DIMENSION_COL_INFO_FILE_POSTFIX;
 
-            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(), new Text(minValue.getBytes()),
-                    dimRangeFileName);
-            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(), new Text(maxValue.getBytes()),
-                    dimRangeFileName);
-            logger.info("write dimension range info for col : " + col.getName() + "  minValue:" + minValue + " maxValue:"
-                    + maxValue);
+            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(),
+                    new Text(minValue.getBytes(StandardCharsets.UTF_8)), dimRangeFileName);
+            mos.write(BatchConstants.CFG_OUTPUT_PARTITION, NullWritable.get(),
+                    new Text(maxValue.getBytes(StandardCharsets.UTF_8)), dimRangeFileName);
+            logger.info("write dimension range info for col : " + col.getName() + "  minValue:" + minValue
+                    + " maxValue:" + maxValue);
         }
     }
 
@@ -232,11 +234,13 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
         // output written to baseDir/colName/colName.rldict-r-00000 (etc)
         String dictFileName = col.getIdentity() + "/" + col.getName() + DICT_FILE_POSTFIX;
 
-        try (ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream outputStream = new DataOutputStream(baos);) {
+        try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
+                DataOutputStream outputStream = new DataOutputStream(baos);) {
             outputStream.writeUTF(dict.getClass().getName());
             dict.write(outputStream);
 
-            mos.write(BatchConstants.CFG_OUTPUT_DICT, NullWritable.get(), new ArrayPrimitiveWritable(baos.toByteArray()), dictFileName);
+            mos.write(BatchConstants.CFG_OUTPUT_DICT, NullWritable.get(),
+                    new ArrayPrimitiveWritable(baos.toByteArray()), dictFileName);
         }
     }
 
@@ -252,19 +256,23 @@ public class FactDistinctColumnsReducer extends KylinReducer<SelfDefineSortableK
             grandTotal += hll.getCountEstimate();
         }
         double mapperOverlapRatio = grandTotal == 0 ? 0 : (double) totalRowsBeforeMerge / grandTotal;
-        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-1), new BytesWritable(Bytes.toBytes(mapperOverlapRatio)), statisticsFileName);
+        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-1),
+                new BytesWritable(Bytes.toBytes(mapperOverlapRatio)), statisticsFileName);
 
         // mapper number at key -2
-        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-2), new BytesWritable(Bytes.toBytes(baseCuboidRowCountInMappers.size())), statisticsFileName);
+        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(-2),
+                new BytesWritable(Bytes.toBytes(baseCuboidRowCountInMappers.size())), statisticsFileName);
 
         // sampling percentage at key 0
-        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(0L), new BytesWritable(Bytes.toBytes(samplingPercentage)), statisticsFileName);
+        mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(0L),
+                new BytesWritable(Bytes.toBytes(samplingPercentage)), statisticsFileName);
 
         for (long i : allCuboids) {
             valueBuf.clear();
             cuboidHLLMap.get(i).writeRegisters(valueBuf);
             valueBuf.flip();
-            mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(i), new BytesWritable(valueBuf.array(), valueBuf.limit()), statisticsFileName);
+            mos.write(BatchConstants.CFG_OUTPUT_STATISTICS, new LongWritable(i),
+                    new BytesWritable(valueBuf.array(), valueBuf.limit()), statisticsFileName);
         }
     }
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FilterRecommendCuboidDataJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FilterRecommendCuboidDataJob.java
index b56434b..2fbbc73 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FilterRecommendCuboidDataJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FilterRecommendCuboidDataJob.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
@@ -52,7 +53,7 @@ public class FilterRecommendCuboidDataJob extends AbstractHadoopJob {
             parseOptions(options, args);
 
             job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
             Path input = new Path(getOptionValue(OPTION_INPUT_PATH));
             Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidFromBaseCuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidFromBaseCuboidJob.java
index 7bfa33a..63e651c 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidFromBaseCuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidFromBaseCuboidJob.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
+
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
@@ -79,7 +81,7 @@ public class InMemCuboidFromBaseCuboidJob extends AbstractHadoopJob {
             options.addOption(OPTION_NEED_UPDATE_BASE_CUBOID_SHARD);
             parseOptions(options, args);
 
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
             String output = getOptionValue(OPTION_OUTPUT_PATH);
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
index f8874fe..02dc71c 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
@@ -73,7 +74,7 @@ public class InMemCuboidJob extends AbstractHadoopJob {
             options.addOption(OPTION_DICTIONARY_SHRUNKEN_PATH);
             parseOptions(options, args);
 
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
             String output = getOptionValue(OPTION_OUTPUT_PATH);
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java
index 5aa2c69..d36545d 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidReducer.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.hadoop.io.Text;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
@@ -59,7 +60,7 @@ public class InMemCuboidReducer extends KylinReducer<ByteArrayWritable, ByteArra
         super.bindCurrentConfiguration(context.getConfiguration());
         KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata();
 
-        String cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase();
+        String cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(Locale.ROOT);
         CubeInstance cube = CubeManager.getInstance(config).getCube(cubeName);
         CubeDesc cubeDesc = cube.getDescriptor();
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
index 63d0619..e144ec3 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
@@ -45,7 +46,7 @@ public class MergeCuboidJob extends CuboidJob {
 
             String input = getOptionValue(OPTION_INPUT_PATH);
             String output = getOptionValue(OPTION_OUTPUT_PATH);
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
 
             CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java
index c75abea..756d8ee 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/SelfDefineSortableKey.java
@@ -20,6 +20,7 @@ package org.apache.kylin.engine.mr.steps;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
@@ -28,9 +29,7 @@ import org.apache.kylin.metadata.datatype.DataType;
 public class SelfDefineSortableKey implements WritableComparable<SelfDefineSortableKey> {
 
     public enum TypeFlag {
-        NONE_NUMERIC_TYPE,
-        INTEGER_FAMILY_TYPE,
-        DOUBLE_FAMILY_TYPE
+        NONE_NUMERIC_TYPE, INTEGER_FAMILY_TYPE, DOUBLE_FAMILY_TYPE
     }
 
     private byte typeId; //non-numeric(0000 0000) int(0000 0001) other numberic(0000 0010)
@@ -50,7 +49,7 @@ public class SelfDefineSortableKey implements WritableComparable<SelfDefineSorta
         this.typeId = typeId;
         this.rawKey = key;
         if (isNumberFamily()) {
-            String valueStr = new String(key.getBytes(), 1, key.getLength() - 1);
+            String valueStr = new String(key.getBytes(), 1, key.getLength() - 1, StandardCharsets.UTF_8);
             if (isIntegerFamily()) {
                 this.keyInObj = Long.parseLong(valueStr);
             } else {
@@ -61,7 +60,6 @@ public class SelfDefineSortableKey implements WritableComparable<SelfDefineSorta
         }
     }
 
-
     public void init(Text key, DataType type) {
         init(key, getTypeIdByDatatype(type));
     }
@@ -113,7 +111,6 @@ public class SelfDefineSortableKey implements WritableComparable<SelfDefineSorta
         return (typeId == TypeFlag.INTEGER_FAMILY_TYPE.ordinal());
     }
 
-
     public byte getTypeIdByDatatype(DataType type) {
         if (!type.isNumberFamily()) {
             return (byte) TypeFlag.NONE_NUMERIC_TYPE.ordinal();
@@ -129,5 +126,3 @@ public class SelfDefineSortableKey implements WritableComparable<SelfDefineSorta
     }
 
 }
-
-
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
index fdb19db..a84a3a1 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
@@ -21,6 +21,7 @@ package org.apache.kylin.engine.mr.steps;
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -66,7 +67,8 @@ public class UpdateCubeInfoAfterBuildStep extends AbstractExecutable {
     @Override
     protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
         final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
-        final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams())).latestCopyForWrite();
+        final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()))
+                .latestCopyForWrite();
         final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
 
         CubingJob cubingJob = (CubingJob) getManager().getJob(CubingExecutableUtil.getCubingJobId(this.getParams()));
@@ -92,7 +94,8 @@ public class UpdateCubeInfoAfterBuildStep extends AbstractExecutable {
         }
     }
 
-    private void saveExtSnapshotIfNeeded(CubeManager cubeManager, CubeInstance cube, CubeSegment segment) throws IOException {
+    private void saveExtSnapshotIfNeeded(CubeManager cubeManager, CubeInstance cube, CubeSegment segment)
+            throws IOException {
         String extLookupSnapshotStr = this.getParam(BatchConstants.ARG_EXT_LOOKUP_SNAPSHOTS_INFO);
         if (extLookupSnapshotStr == null || extLookupSnapshotStr.isEmpty()) {
             return;
@@ -142,7 +145,7 @@ public class UpdateCubeInfoAfterBuildStep extends AbstractExecutable {
             for (Path outputFile : outputFiles) {
                 try {
                     is = fs.open(outputFile);
-                    isr = new InputStreamReader(is);
+                    isr = new InputStreamReader(is, StandardCharsets.UTF_8);
                     bufferedReader = new BufferedReader(isr);
                     minValues.add(bufferedReader.readLine());
                     maxValues.add(bufferedReader.readLine());
@@ -157,10 +160,14 @@ public class UpdateCubeInfoAfterBuildStep extends AbstractExecutable {
             String maxValue = order.max(maxValues);
             logger.info("updateSegment step. {} minValue:" + minValue + " maxValue:" + maxValue, dimColRef.getName());
 
-            if (segment.isOffsetCube() && partitionCol != null && partitionCol.getIdentity().equals(dimColRef.getIdentity())) {
-                logger.info("update partition. {} timeMinValue:" + minValue + " timeMaxValue:" + maxValue, dimColRef.getName());
-                if (DateFormat.stringToMillis(minValue) != timeMinValue && DateFormat.stringToMillis(maxValue) != timeMaxValue) {
-                    segment.setTSRange(new TSRange(DateFormat.stringToMillis(minValue), DateFormat.stringToMillis(maxValue) + 1));
+            if (segment.isOffsetCube() && partitionCol != null
+                    && partitionCol.getIdentity().equals(dimColRef.getIdentity())) {
+                logger.info("update partition. {} timeMinValue:" + minValue + " timeMaxValue:" + maxValue,
+                        dimColRef.getName());
+                if (DateFormat.stringToMillis(minValue) != timeMinValue
+                        && DateFormat.stringToMillis(maxValue) != timeMaxValue) {
+                    segment.setTSRange(
+                            new TSRange(DateFormat.stringToMillis(minValue), DateFormat.stringToMillis(maxValue) + 1));
                 }
             }
             segment.getDimensionRangeInfoMap().put(dimColRef.getIdentity(), new DimensionRangeInfo(minValue, maxValue));
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateOldCuboidShardJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateOldCuboidShardJob.java
index b71e459..80c483e 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateOldCuboidShardJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateOldCuboidShardJob.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.util.Locale;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
@@ -54,7 +55,7 @@ public class UpdateOldCuboidShardJob extends AbstractHadoopJob {
             parseOptions(options, args);
 
             job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String segmentID = getOptionValue(OPTION_SEGMENT_ID);
             Path input = new Path(getOptionValue(OPTION_INPUT_PATH));
             Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java
index 29e2f65..aea6b0b 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/SortedColumnReaderTest.java
@@ -23,8 +23,11 @@ import static org.junit.Assert.assertTrue;
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
 import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -100,7 +103,7 @@ public class SortedColumnReaderTest extends LocalFileMetadataTestCase {
         }
         ArrayList<BufferedWriter> bws = new ArrayList<>();
         for (File f : allFiles) {
-            bws.add(new BufferedWriter(new FileWriter(f)));
+            bws.add(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f), StandardCharsets.UTF_8)));
         }
         System.out.println(data.size());
         for (String str : data) {
@@ -205,7 +208,7 @@ public class SortedColumnReaderTest extends LocalFileMetadataTestCase {
         }
         ArrayList<BufferedWriter> bws = new ArrayList<>();
         for (File f : allFiles) {
-            bws.add(new BufferedWriter(new FileWriter(f)));
+            bws.add(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f), StandardCharsets.UTF_8)));
         }
         System.out.println(data.size());
         for (String str : data) {
@@ -278,7 +281,7 @@ public class SortedColumnReaderTest extends LocalFileMetadataTestCase {
         }
         ArrayList<BufferedWriter> bws = new ArrayList<>();
         for (File f : allFiles) {
-            bws.add(new BufferedWriter(new FileWriter(f)));
+            bws.add(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f), StandardCharsets.UTF_8)));
         }
         System.out.println(data.size());
         for (String str : data) {
@@ -302,7 +305,8 @@ public class SortedColumnReaderTest extends LocalFileMetadataTestCase {
         ArrayList<String> result = new ArrayList<>();
         File dir = new File(dirPath);
         for (File f : dir.listFiles()) {
-            BufferedReader br = new BufferedReader(new FileReader(f));
+            BufferedReader br = new BufferedReader(
+                    new InputStreamReader(new FileInputStream(f), StandardCharsets.UTF_8));
             String str = br.readLine();
             while (str != null) {
                 result.add(str);
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
index ad1c235..f3b0a13 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubingByLayer.java
@@ -17,6 +17,13 @@
 */
 package org.apache.kylin.engine.spark;
 
+import java.io.Serializable;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
@@ -62,13 +69,8 @@ import org.apache.spark.api.java.function.PairFunction;
 import org.apache.spark.storage.StorageLevel;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import scala.Tuple2;
 
-import java.io.Serializable;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
+import scala.Tuple2;
 
 /**
  * Spark application to build cube with the "by-layer" algorithm. Only support source data from Hive; Metadata in HBase.
@@ -87,8 +89,8 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
             .isRequired(true).withDescription("Cube output path").create(BatchConstants.ARG_OUTPUT);
     public static final Option OPTION_INPUT_TABLE = OptionBuilder.withArgName("hiveTable").hasArg().isRequired(true)
             .withDescription("Hive Intermediate Table").create("hiveTable");
-    public static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_INPUT).hasArg().isRequired(true)
-            .withDescription("Hive Intermediate Table PATH").create(BatchConstants.ARG_INPUT);
+    public static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_INPUT).hasArg()
+            .isRequired(true).withDescription("Hive Intermediate Table PATH").create(BatchConstants.ARG_INPUT);
 
     private Options options;
 
@@ -162,7 +164,8 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
 
         boolean isSequenceFile = JoinedFlatTable.SEQUENCEFILE.equalsIgnoreCase(envConfig.getFlatTableStorageFormat());
 
-        final JavaPairRDD<ByteArray, Object[]> encodedBaseRDD = SparkUtil.hiveRecordInputRDD(isSequenceFile, sc, inputPath, hiveTable)
+        final JavaPairRDD<ByteArray, Object[]> encodedBaseRDD = SparkUtil
+                .hiveRecordInputRDD(isSequenceFile, sc, inputPath, hiveTable)
                 .mapToPair(new EncodeBaseCuboid(cubeName, segmentId, metaUrl, sConf));
 
         Long totalCount = 0L;
@@ -170,7 +173,8 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
             totalCount = encodedBaseRDD.count();
         }
 
-        final BaseCuboidReducerFunction2 baseCuboidReducerFunction = new BaseCuboidReducerFunction2(cubeName, metaUrl, sConf);
+        final BaseCuboidReducerFunction2 baseCuboidReducerFunction = new BaseCuboidReducerFunction2(cubeName, metaUrl,
+                sConf);
         BaseCuboidReducerFunction2 reducerFunction2 = baseCuboidReducerFunction;
         if (allNormalMeasure == false) {
             reducerFunction2 = new CuboidReducerFunction2(cubeName, metaUrl, sConf, needAggr);
@@ -205,8 +209,6 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
         //HadoopUtil.deleteHDFSMeta(metaUrl);
     }
 
-
-
     protected JavaPairRDD<ByteArray, Object[]> prepareOutput(JavaPairRDD<ByteArray, Object[]> rdd, KylinConfig config,
             CubeSegment segment, int level) {
         return rdd;
@@ -342,7 +344,8 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
     static public class CuboidReducerFunction2 extends BaseCuboidReducerFunction2 {
         private boolean[] needAggr;
 
-        public CuboidReducerFunction2(String cubeName, String metaUrl, SerializableConfiguration conf, boolean[] needAggr) {
+        public CuboidReducerFunction2(String cubeName, String metaUrl, SerializableConfiguration conf,
+                boolean[] needAggr) {
             super(cubeName, metaUrl, conf);
             this.needAggr = needAggr;
         }
@@ -386,7 +389,8 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
 
         public void init() {
             KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
-            try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig.setAndUnsetThreadLocalConfig(kConfig)) {
+            try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
+                    .setAndUnsetThreadLocalConfig(kConfig)) {
                 CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
                 this.cubeSegment = cubeInstance.getSegmentById(segmentId);
                 this.cubeDesc = cubeInstance.getDescriptor();
@@ -436,8 +440,8 @@ public class SparkCubingByLayer extends AbstractApplication implements Serializa
         Long count2 = getRDDCountSum(rdd, countMeasureIndex);
         if (count2 != totalCount * thisCuboidNum) {
             throw new IllegalStateException(
-                    String.format("Sanity check failed, level %s, total count(*) is %s; cuboid number %s", thisLevel,
-                            count2, thisCuboidNum));
+                    String.format(Locale.ROOT, "Sanity check failed, level %s, total count(*) is %s; cuboid number %s",
+                            thisLevel, count2, thisCuboidNum));
         } else {
             logger.info("sanity check success for level " + thisLevel + ", count(*) is " + (count2 / thisCuboidNum));
         }
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
index a354909..fcf5484 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedHashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.Callable;
@@ -194,9 +195,8 @@ public class SparkExecutable extends AbstractExecutable {
     protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
         ExecutableManager mgr = getManager();
         Map<String, String> extra = mgr.getOutput(getId()).getExtra();
-        String sparkJobId = extra.get(ExecutableConstants.SPARK_JOB_ID);
-        if (!StringUtils.isEmpty(sparkJobId)) {
-            return onResumed(sparkJobId, mgr);
+        if (extra.containsKey(ExecutableConstants.SPARK_JOB_ID)) {
+            return onResumed(extra.get(ExecutableConstants.SPARK_JOB_ID), mgr);
         } else {
             String cubeName = this.getParam(SparkCubingByLayer.OPTION_CUBE_NAME.getOpt());
             CubeInstance cube = CubeManager.getInstance(context.getConfig()).getCube(cubeName);
@@ -256,8 +256,8 @@ public class SparkExecutable extends AbstractExecutable {
             }
 
             stringBuilder.append("--jars %s %s %s");
-            final String cmd = String.format(stringBuilder.toString(), hadoopConf, KylinConfig.getSparkHome(), jars,
-                    jobJar, formatArgs());
+            final String cmd = String.format(Locale.ROOT, stringBuilder.toString(), hadoopConf,
+                    KylinConfig.getSparkHome(), jars, jobJar, formatArgs());
             logger.info("cmd: " + cmd);
             final ExecutorService executorService = Executors.newSingleThreadExecutor();
             final CliCommandExecutor exec = new CliCommandExecutor();
@@ -371,7 +371,7 @@ public class SparkExecutable extends AbstractExecutable {
     private String getAppState(String appId) throws IOException {
         CliCommandExecutor executor = KylinConfig.getInstanceFromEnv().getCliCommandExecutor();
         PatternedLogger patternedLogger = new PatternedLogger(logger);
-        String stateCmd = String.format("yarn application -status %s", appId);
+        String stateCmd = String.format(Locale.ROOT, "yarn application -status %s", appId);
         executor.execute(stateCmd, patternedLogger);
         Map<String, String> info = patternedLogger.getInfo();
         return info.get(ExecutableConstants.YARN_APP_STATE);
@@ -379,7 +379,7 @@ public class SparkExecutable extends AbstractExecutable {
 
     private void killApp(String appId) throws IOException, InterruptedException {
         CliCommandExecutor executor = KylinConfig.getInstanceFromEnv().getCliCommandExecutor();
-        String killCmd = String.format("yarn application -kill %s", appId);
+        String killCmd = String.format(Locale.ROOT, "yarn application -kill %s", appId);
         executor.execute(killCmd);
     }
 
@@ -421,7 +421,8 @@ public class SparkExecutable extends AbstractExecutable {
             // cube statistics is not available for new segment
             dumpList.add(segment.getStatisticsResourcePath());
         }
-        JobRelatedMetaUtil.dumpAndUploadKylinPropsAndMetadata(dumpList, (KylinConfigExt) segment.getConfig(), this.getParam(SparkCubingByLayer.OPTION_META_URL.getOpt()));
+        JobRelatedMetaUtil.dumpAndUploadKylinPropsAndMetadata(dumpList, (KylinConfigExt) segment.getConfig(),
+                this.getParam(SparkCubingByLayer.OPTION_META_URL.getOpt()));
     }
 
     private void attachSegmentsMetadataWithDict(List<CubeSegment> segments) throws IOException {
@@ -435,7 +436,8 @@ public class SparkExecutable extends AbstractExecutable {
                 dumpList.add(segment.getStatisticsResourcePath());
             }
         }
-        JobRelatedMetaUtil.dumpAndUploadKylinPropsAndMetadata(dumpList, (KylinConfigExt) segments.get(0).getConfig(), this.getParam(SparkCubingByLayer.OPTION_META_URL.getOpt()));
+        JobRelatedMetaUtil.dumpAndUploadKylinPropsAndMetadata(dumpList, (KylinConfigExt) segments.get(0).getConfig(),
+                this.getParam(SparkCubingByLayer.OPTION_META_URL.getOpt()));
     }
 
     private void readCounters(final Map<String, String> info) {
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java
index 7f05234..b12fd02 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkFactDistinct.java
@@ -18,13 +18,18 @@
 
 package org.apache.kylin.engine.spark;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-import com.google.common.hash.HashFunction;
-import com.google.common.hash.Hasher;
-import com.google.common.hash.Hashing;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.Serializable;
+import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
@@ -86,20 +91,18 @@ import org.apache.spark.api.java.function.PairFlatMapFunction;
 import org.apache.spark.util.LongAccumulator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.google.common.hash.HashFunction;
+import com.google.common.hash.Hasher;
+import com.google.common.hash.Hashing;
+
 import scala.Tuple2;
 import scala.Tuple3;
 
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.Serializable;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
 public class SparkFactDistinct extends AbstractApplication implements Serializable {
 
     protected static final Logger logger = LoggerFactory.getLogger(SparkFactDistinct.class);
@@ -119,8 +122,8 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
             .withDescription("Hive Intermediate Table").create("hiveTable");
     public static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_INPUT).hasArg()
             .isRequired(true).withDescription("Hive Intermediate Table PATH").create(BatchConstants.ARG_INPUT);
-    public static final Option OPTION_COUNTER_PATH = OptionBuilder.withArgName(BatchConstants.ARG_COUNTER_OUPUT).hasArg()
-            .isRequired(true).withDescription("counter output path").create(BatchConstants.ARG_COUNTER_OUPUT);
+    public static final Option OPTION_COUNTER_PATH = OptionBuilder.withArgName(BatchConstants.ARG_COUNTER_OUPUT)
+            .hasArg().isRequired(true).withDescription("counter output path").create(BatchConstants.ARG_COUNTER_OUPUT);
 
     private Options options;
 
@@ -152,7 +155,8 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
         String counterPath = optionsHelper.getOptionValue(OPTION_COUNTER_PATH);
         int samplingPercent = Integer.parseInt(optionsHelper.getOptionValue(OPTION_STATS_SAMPLING_PERCENT));
 
-        Class[] kryoClassArray = new Class[] { Class.forName("scala.reflect.ClassTag$$anon$1"), Class.forName("org.apache.kylin.engine.mr.steps.SelfDefineSortableKey") };
+        Class[] kryoClassArray = new Class[] { Class.forName("scala.reflect.ClassTag$$anon$1"),
+                Class.forName("org.apache.kylin.engine.mr.steps.SelfDefineSortableKey") };
 
         SparkConf conf = new SparkConf().setAppName("Fact distinct columns for:" + cubeName + " segment " + segmentId);
         //serialization conf
@@ -186,17 +190,24 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
 
         final JavaRDD<String[]> recordRDD = SparkUtil.hiveRecordInputRDD(isSequenceFile, sc, inputPath, hiveTable);
 
-        JavaPairRDD<SelfDefineSortableKey, Text> flatOutputRDD = recordRDD.mapPartitionsToPair(new FlatOutputFucntion(cubeName, segmentId, metaUrl, sConf, samplingPercent, bytesWritten));
+        JavaPairRDD<SelfDefineSortableKey, Text> flatOutputRDD = recordRDD.mapPartitionsToPair(
+                new FlatOutputFucntion(cubeName, segmentId, metaUrl, sConf, samplingPercent, bytesWritten));
 
-        JavaPairRDD<SelfDefineSortableKey, Iterable<Text>> aggredRDD = flatOutputRDD.groupByKey(new FactDistinctPartitioner(cubeName, metaUrl, sConf, reducerMapping.getTotalReducerNum()));
+        JavaPairRDD<SelfDefineSortableKey, Iterable<Text>> aggredRDD = flatOutputRDD
+                .groupByKey(new FactDistinctPartitioner(cubeName, metaUrl, sConf, reducerMapping.getTotalReducerNum()));
 
-        JavaPairRDD<String, Tuple3<Writable, Writable, String>> outputRDD = aggredRDD.mapPartitionsToPair(new MultiOutputFunction(cubeName, metaUrl, sConf, samplingPercent));
+        JavaPairRDD<String, Tuple3<Writable, Writable, String>> outputRDD = aggredRDD
+                .mapPartitionsToPair(new MultiOutputFunction(cubeName, metaUrl, sConf, samplingPercent));
 
         // make each reducer output to respective dir
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_COLUMN, SequenceFileOutputFormat.class, NullWritable.class, Text.class);
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_DICT, SequenceFileOutputFormat.class, NullWritable.class, ArrayPrimitiveWritable.class);
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_STATISTICS, SequenceFileOutputFormat.class, LongWritable.class, BytesWritable.class);
-        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_PARTITION, TextOutputFormat.class, NullWritable.class, LongWritable.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_COLUMN, SequenceFileOutputFormat.class,
+                NullWritable.class, Text.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_DICT, SequenceFileOutputFormat.class,
+                NullWritable.class, ArrayPrimitiveWritable.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_STATISTICS, SequenceFileOutputFormat.class,
+                LongWritable.class, BytesWritable.class);
+        MultipleOutputs.addNamedOutput(job, BatchConstants.CFG_OUTPUT_PARTITION, TextOutputFormat.class,
+                NullWritable.class, LongWritable.class);
 
         FileOutputFormat.setOutputPath(job, new Path(outputPath));
         FileOutputFormat.setCompressOutput(job, false);
@@ -223,7 +234,6 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
         HadoopUtil.deleteHDFSMeta(metaUrl);
     }
 
-
     static class FlatOutputFucntion implements PairFlatMapFunction<Iterator<String[]>, SelfDefineSortableKey, Text> {
         private volatile transient boolean initialized = false;
         private String cubeName;
@@ -241,7 +251,8 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
         private LongAccumulator bytesWritten;
         private KeyValueBuilder keyValueBuilder;
 
-        public FlatOutputFucntion(String cubeName, String segmentId, String metaurl, SerializableConfiguration conf, int samplingPercent, LongAccumulator bytesWritten) {
+        public FlatOutputFucntion(String cubeName, String segmentId, String metaurl, SerializableConfiguration conf,
+                int samplingPercent, LongAccumulator bytesWritten) {
             this.cubeName = cubeName;
             this.segmentId = segmentId;
             this.metaUrl = metaurl;
@@ -572,7 +583,8 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
         private int totalReducerNum;
         private FactDistinctColumnsReducerMapping reducerMapping;
 
-        public FactDistinctPartitioner(String cubeName, String metaUrl, SerializableConfiguration conf, int totalReducerNum) {
+        public FactDistinctPartitioner(String cubeName, String metaUrl, SerializableConfiguration conf,
+                int totalReducerNum) {
             this.cubeName = cubeName;
             this.metaUrl = metaUrl;
             this.conf = conf;
@@ -641,7 +653,8 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
         private String minValue = null;
         private List<Tuple2<String, Tuple3<Writable, Writable, String>>> result;
 
-        public MultiOutputFunction(String cubeName, String metaurl, SerializableConfiguration conf, int samplingPercent) {
+        public MultiOutputFunction(String cubeName, String metaurl, SerializableConfiguration conf,
+                int samplingPercent) {
             this.cubeName = cubeName;
             this.metaUrl = metaurl;
             this.conf = conf;
@@ -651,7 +664,8 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
         private void init() throws IOException {
             taskId = TaskContext.getPartitionId();
             KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
-            try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig.setAndUnsetThreadLocalConfig(kConfig)) {
+            try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
+                    .setAndUnsetThreadLocalConfig(kConfig)) {
                 CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
                 cubeDesc = cubeInstance.getDescriptor();
                 cubeConfig = cubeInstance.getConfig();
@@ -686,7 +700,8 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
                         builder = DictionaryGenerator.newDictionaryBuilder(col.getType());
                         builder.init(null, 0, null);
                     }
-                    logger.info("Partition " + taskId + " handling column " + col + ", buildDictInReducer=" + buildDictInReducer);
+                    logger.info("Partition " + taskId + " handling column " + col + ", buildDictInReducer="
+                            + buildDictInReducer);
                 }
 
                 initialized = true;
@@ -760,7 +775,7 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
                             String fileName = col.getIdentity() + "/";
                             result.add(new Tuple2<String, Tuple3<Writable, Writable, String>>(
                                     BatchConstants.CFG_OUTPUT_COLUMN, new Tuple3<Writable, Writable, String>(
-                                    NullWritable.get(), new Text(keyBytes), fileName)));
+                                            NullWritable.get(), new Text(keyBytes), fileName)));
                         }
                     }
                 }
@@ -819,22 +834,24 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
                 String dimRangeFileName = col.getIdentity() + "/" + col.getName() + DIMENSION_COL_INFO_FILE_POSTFIX;
 
                 result.add(new Tuple2<String, Tuple3<Writable, Writable, String>>(BatchConstants.CFG_OUTPUT_PARTITION,
-                        new Tuple3<Writable, Writable, String>(NullWritable.get(), new Text(minValue.getBytes()),
-                                dimRangeFileName)));
+                        new Tuple3<Writable, Writable, String>(NullWritable.get(),
+                                new Text(minValue.getBytes(StandardCharsets.UTF_8)), dimRangeFileName)));
                 result.add(new Tuple2<String, Tuple3<Writable, Writable, String>>(BatchConstants.CFG_OUTPUT_PARTITION,
-                        new Tuple3<Writable, Writable, String>(NullWritable.get(), new Text(maxValue.getBytes()),
-                                dimRangeFileName)));
+                        new Tuple3<Writable, Writable, String>(NullWritable.get(),
+                                new Text(maxValue.getBytes(StandardCharsets.UTF_8)), dimRangeFileName)));
                 logger.info("write dimension range info for col : " + col.getName() + "  minValue:" + minValue
                         + " maxValue:" + maxValue);
             }
         }
 
-        private void outputDict(TblColRef col, Dictionary<String> dict, List<Tuple2<String, Tuple3<Writable, Writable, String>>> result)
+        private void outputDict(TblColRef col, Dictionary<String> dict,
+                List<Tuple2<String, Tuple3<Writable, Writable, String>>> result)
                 throws IOException, InterruptedException {
             // output written to baseDir/colName/colName.rldict-r-00000 (etc)
             String dictFileName = col.getIdentity() + "/" + col.getName() + DICT_FILE_POSTFIX;
 
-            try (ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream outputStream = new DataOutputStream(baos)) {
+            try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
+                    DataOutputStream outputStream = new DataOutputStream(baos)) {
                 outputStream.writeUTF(dict.getClass().getName());
                 dict.write(outputStream);
 
@@ -844,10 +861,12 @@ public class SparkFactDistinct extends AbstractApplication implements Serializab
             }
         }
 
-        private void outputStatistics(List<Long> allCuboids, List<Tuple2<String, Tuple3<Writable, Writable, String>>> result)
+        private void outputStatistics(List<Long> allCuboids,
+                List<Tuple2<String, Tuple3<Writable, Writable, String>>> result)
                 throws IOException, InterruptedException {
             // output written to baseDir/statistics/statistics-r-00000 (etc)
-            String statisticsFileName = BatchConstants.CFG_OUTPUT_STATISTICS + "/" + BatchConstants.CFG_OUTPUT_STATISTICS;
+            String statisticsFileName = BatchConstants.CFG_OUTPUT_STATISTICS + "/"
+                    + BatchConstants.CFG_OUTPUT_STATISTICS;
 
             // mapper overlap ratio at key -1
             long grandTotal = 0;
diff --git a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java
index 9f8c7dd..7e3ca05 100644
--- a/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java
+++ b/jdbc/src/main/java/org/apache/kylin/jdbc/KylinClient.java
@@ -20,6 +20,7 @@ package org.apache.kylin.jdbc;
 
 import java.io.IOException;
 import java.math.BigDecimal;
+import java.nio.charset.StandardCharsets;
 import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
 import java.sql.Date;
@@ -186,7 +187,7 @@ public class KylinClient implements IRemoteClient {
         case Types.BINARY:
         case Types.VARBINARY:
         case Types.LONGVARBINARY:
-            return value.getBytes();
+            return value.getBytes(StandardCharsets.UTF_8);
         case Types.DATE:
             return Date.valueOf(value);
         case Types.TIME:
@@ -217,7 +218,8 @@ public class KylinClient implements IRemoteClient {
 
         String username = connProps.getProperty("user");
         String password = connProps.getProperty("password");
-        String basicAuth = DatatypeConverter.printBase64Binary((username + ":" + password).getBytes());
+        String basicAuth = DatatypeConverter
+                .printBase64Binary((username + ":" + password).getBytes(StandardCharsets.UTF_8));
         method.addHeader("Authorization", "Basic " + basicAuth);
     }
 
diff --git a/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java b/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java
index dc41773..dd84bd6 100755
--- a/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/cube/ITDictionaryManagerTest.java
@@ -21,9 +21,13 @@ package org.apache.kylin.cube;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+import java.io.BufferedWriter;
 import java.io.File;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.Set;
 
 import org.apache.kylin.common.util.Dictionary;
@@ -60,7 +64,8 @@ public class ITDictionaryManagerTest extends LocalFileMetadataTestCase {
     @Test
     public void basic() throws Exception {
         dictMgr = DictionaryManager.getInstance(getTestConfig());
-        CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_without_slr_desc");
+        CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig())
+                .getCubeDesc("test_kylin_cube_without_slr_desc");
         TblColRef col = cubeDesc.findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_FORMAT_NAME");
 
         MockDistinctColumnValuesProvider mockupData = new MockDistinctColumnValuesProvider("A", "B", "C");
@@ -105,7 +110,8 @@ public class ITDictionaryManagerTest extends LocalFileMetadataTestCase {
 
         public MockDistinctColumnValuesProvider(String... values) throws IOException {
             File tmpFile = File.createTempFile("MockDistinctColumnValuesProvider", ".txt");
-            PrintWriter out = new PrintWriter(tmpFile);
+            PrintWriter out = new PrintWriter(
+                    new BufferedWriter(new OutputStreamWriter(new FileOutputStream(tmpFile), StandardCharsets.UTF_8)));
 
             set = Sets.newTreeSet();
             for (String value : values) {
diff --git a/kylin-it/src/test/java/org/apache/kylin/cube/cuboid/algorithm/ITAlgorithmTestBase.java b/kylin-it/src/test/java/org/apache/kylin/cube/cuboid/algorithm/ITAlgorithmTestBase.java
index 797d0db..1d6d0bc 100755
--- a/kylin-it/src/test/java/org/apache/kylin/cube/cuboid/algorithm/ITAlgorithmTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/cube/cuboid/algorithm/ITAlgorithmTestBase.java
@@ -19,8 +19,10 @@
 package org.apache.kylin.cube.cuboid.algorithm;
 
 import java.io.BufferedReader;
-import java.io.FileReader;
+import java.io.FileInputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -88,7 +90,8 @@ public class ITAlgorithmTestBase {
 
             String sCurrentLine;
 
-            br = new BufferedReader(new FileReader("src/test/resources/statistics.txt"));
+            br = new BufferedReader(new InputStreamReader(new FileInputStream("src/test/resources/statistics.txt"),
+                    StandardCharsets.UTF_8));
 
             while ((sCurrentLine = br.readLine()) != null) {
                 String[] statPair = sCurrentLine.split(" ");
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 16ceede..ec5bc35 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -26,6 +26,7 @@ import java.text.SimpleDateFormat;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Random;
 import java.util.Set;
@@ -115,7 +116,7 @@ public class BuildCubeWithEngine {
     public static void beforeClass() throws Exception {
         beforeClass(HBaseMetadataTestCase.SANDBOX_TEST_DATA);
     }
-    
+
     public static void beforeClass(String confDir) throws Exception {
         logger.info("Adding to classpath: " + new File(confDir).getAbsolutePath());
         ClassUtil.addClasspath(new File(confDir).getAbsolutePath());
@@ -138,7 +139,8 @@ public class BuildCubeWithEngine {
         System.setProperty("SPARK_HOME", "/usr/local/spark"); // need manually create and put spark to this folder on Jenkins
         System.setProperty("kylin.hadoop.conf.dir", confDir);
         if (StringUtils.isEmpty(System.getProperty("hdp.version"))) {
-            throw new RuntimeException("No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
+            throw new RuntimeException(
+                    "No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
         }
 
         HBaseMetadataTestCase.staticCreateTestMetadata(confDir);
@@ -153,7 +155,10 @@ public class BuildCubeWithEngine {
                 throw new IOException("mkdir fails");
             }
         } catch (IOException e) {
-            throw new RuntimeException("failed to create kylin.env.hdfs-working-dir, Please make sure the user has right to access " + KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory(), e);
+            throw new RuntimeException(
+                    "failed to create kylin.env.hdfs-working-dir, Please make sure the user has right to access "
+                            + KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory(),
+                    e);
         }
     }
 
@@ -161,7 +166,7 @@ public class BuildCubeWithEngine {
         String fastModeStr = System.getProperty("fastBuildMode");
         if (fastModeStr == null)
             fastModeStr = System.getenv("KYLIN_CI_FASTBUILD");
-        
+
         return "true".equalsIgnoreCase(fastModeStr);
     }
 
@@ -289,14 +294,14 @@ public class BuildCubeWithEngine {
     private boolean testLeftJoinCube() throws Exception {
         String cubeName = "ci_left_join_cube";
         clearSegment(cubeName);
-        
+
         // NOTE: ci_left_join_cube has percentile which isn't supported by Spark engine now
 
         return doBuildAndMergeOnCube(cubeName);
     }
 
     private boolean doBuildAndMergeOnCube(String cubeName) throws ParseException, Exception {
-        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd", Locale.ROOT);
         f.setTimeZone(TimeZone.getTimeZone("GMT"));
         long date1 = 0;
         long date2 = f.parse("2012-06-01").getTime();
@@ -307,7 +312,7 @@ public class BuildCubeWithEngine {
 
         if (fastBuildMode)
             return buildSegment(cubeName, date1, date4);
-        
+
         if (!buildSegment(cubeName, date1, date2))
             return false;
         checkNormalSegRangeInfo(cubeManager.getCube(cubeName));
@@ -327,14 +332,13 @@ public class BuildCubeWithEngine {
             return false;
         checkEmptySegRangeInfo(cubeManager.getCube(cubeName));
 
-
         if (!mergeSegment(cubeName, date2, date4)) // merge 2 normal segments
             return false;
         checkNormalSegRangeInfo(cubeManager.getCube(cubeName));
         if (!mergeSegment(cubeName, date2, date5)) // merge normal and empty
             return false;
         checkNormalSegRangeInfo(cubeManager.getCube(cubeName));
-        
+
         // now have 2 normal segments [date1, date2) [date2, date5) and 1 empty segment [date5, date6)
         return true;
     }
@@ -345,7 +349,7 @@ public class BuildCubeWithEngine {
 
         String cubeName = "ci_inner_join_cube";
         clearSegment(cubeName);
-        
+
         return doBuildAndMergeOnCube(cubeName);
     }
 
@@ -382,7 +386,8 @@ public class BuildCubeWithEngine {
     }
 
     private Boolean mergeSegment(String cubeName, long startDate, long endDate) throws Exception {
-        CubeSegment segment = cubeManager.mergeSegments(cubeManager.getCube(cubeName), new TSRange(startDate, endDate), null, true);
+        CubeSegment segment = cubeManager.mergeSegments(cubeManager.getCube(cubeName), new TSRange(startDate, endDate),
+                null, true);
         DefaultChainedExecutable job = EngineFactory.createBatchMergeJob(segment, "TEST");
         jobService.addJob(job);
         ExecutableState state = waitForJob(job.getId());
@@ -494,15 +499,15 @@ public class BuildCubeWithEngine {
             long max_v = DateFormat.stringToMillis(dmRangeInfo.getMax());
             long ts_range_start = segment.getTSRange().start.v;
             long ts_range_end = segment.getTSRange().end.v;
-            if (!(ts_range_start <= min_v && max_v <= ts_range_end -1)) {
-                throw new RuntimeException(String.format(
+            if (!(ts_range_start <= min_v && max_v <= ts_range_end - 1)) {
+                throw new RuntimeException(String.format(Locale.ROOT,
                         "Build cube failed, wrong partition column min/max value."
                                 + " Segment: %s, min value: %s, TsRange.start: %s, max value: %s, TsRange.end: %s",
                         segment, min_v, ts_range_start, max_v, ts_range_end));
             }
         }
     }
-    
+
     private CubeSegment getLastModifiedSegment(CubeInstance cube) {
         return Collections.max(cube.getSegments(), new Comparator<CubeSegment>() {
             @Override
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index 97a9c9c..80a5521 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -25,6 +25,7 @@ import java.io.IOException;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
 import java.util.TimeZone;
 import java.util.concurrent.Callable;
@@ -167,7 +168,7 @@ public class BuildCubeWithStream {
         new Thread(new Runnable() {
             @Override
             public void run() {
-                SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+                SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd", Locale.ROOT);
                 f.setTimeZone(TimeZone.getTimeZone("GMT"));
                 long dateStart = 0;
                 try {
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java b/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
index db2b948..ca4b5e1 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
@@ -20,6 +20,7 @@ package org.apache.kylin.provision;
 import java.io.UnsupportedEncodingException;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Properties;
 import java.util.Random;
 
@@ -38,7 +39,8 @@ import kafka.server.KafkaServerStartable;
 import kafka.utils.ZkUtils;
 
 public class MockKafka {
-    private static Properties createProperties(ZkConnection zkServerConnection, String logDir, String port, String brokerId) {
+    private static Properties createProperties(ZkConnection zkServerConnection, String logDir, String port,
+            String brokerId) {
         Properties properties = new Properties();
         properties.put("port", port);
         properties.put("broker.id", brokerId);
@@ -59,7 +61,8 @@ public class MockKafka {
     private ZkConnection zkConnection;
 
     public MockKafka(ZkConnection zkServerConnection) {
-        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + RandomUtil.randomUUID().toString(), "9092", "1");
+        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + RandomUtil.randomUUID().toString(),
+                "9092", "1");
         start();
     }
 
@@ -69,14 +72,16 @@ public class MockKafka {
     }
 
     public MockKafka(ZkConnection zkServerConnection, int port, int brokerId) {
-        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + RandomUtil.randomUUID().toString(), String.valueOf(port), String.valueOf(brokerId));
+        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + RandomUtil.randomUUID().toString(),
+                String.valueOf(port), String.valueOf(brokerId));
         //start();
     }
 
     private MockKafka(ZkConnection zkServerConnection, String logDir, String port, String brokerId) {
         this(createProperties(zkServerConnection, logDir, port, brokerId));
         this.zkConnection = zkServerConnection;
-        System.out.println(String.format("Kafka %s:%s dir:%s", kafkaServer.serverConfig().brokerId(), kafkaServer.serverConfig().port(), kafkaServer.serverConfig().logDirs()));
+        System.out.println(String.format(Locale.ROOT, "Kafka %s:%s dir:%s", kafkaServer.serverConfig().brokerId(),
+                kafkaServer.serverConfig().port(), kafkaServer.serverConfig().logDirs()));
     }
 
     public void createTopic(String topic, int partition, int replication) {
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java b/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
index d703941..261fe5a 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/H2Database.java
@@ -27,6 +27,7 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.io.IOUtils;
@@ -80,7 +81,7 @@ public class H2Database {
 
     private void loadH2Table(String tableName) throws SQLException {
         TableMetadataManager metaMgr = TableMetadataManager.getInstance(config);
-        TableDesc tableDesc = metaMgr.getTableDesc(tableName.toUpperCase(), project);
+        TableDesc tableDesc = metaMgr.getTableDesc(tableName.toUpperCase(Locale.ROOT), project);
         File tempFile = null;
 
         try {
@@ -162,11 +163,11 @@ public class H2Database {
     }
 
     private static String getH2DataType(String javaDataType) {
-        String hiveDataType = javaToH2DataTypeMapping.get(javaDataType.toLowerCase());
+        String hiveDataType = javaToH2DataTypeMapping.get(javaDataType.toLowerCase(Locale.ROOT));
         if (hiveDataType == null) {
             hiveDataType = javaDataType;
         }
-        return hiveDataType.toLowerCase();
+        return hiveDataType.toLowerCase(Locale.ROOT);
     }
 
 }
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index fa49afb..4324750 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -40,6 +40,7 @@ import java.util.Arrays;
 import java.util.Comparator;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 import java.util.TreeSet;
 import java.util.logging.LogManager;
@@ -169,7 +170,7 @@ public class KylinTestBase {
         System.out.println(folder.getAbsolutePath());
         Set<File> set = new TreeSet<>(new FileByNameComparator());
         for (final File fileEntry : folder.listFiles()) {
-            if (fileEntry.getName().toLowerCase().endsWith(fileType.toLowerCase())) {
+            if (fileEntry.getName().toLowerCase(Locale.ROOT).endsWith(fileType.toLowerCase(Locale.ROOT))) {
                 set.add(fileEntry);
             }
         }
@@ -349,7 +350,7 @@ public class KylinTestBase {
         for (int i = 0; i < tokens.length - 1; ++i) {
             if ((tokens[i].equalsIgnoreCase("inner") || tokens[i].equalsIgnoreCase("left"))
                     && tokens[i + 1].equalsIgnoreCase("join")) {
-                tokens[i] = targetType.toLowerCase();
+                tokens[i] = targetType.toLowerCase(Locale.ROOT);
             }
         }
 
@@ -508,7 +509,7 @@ public class KylinTestBase {
             String sql = getTextFromFile(sqlFile);
 
             String sqlWithLimit;
-            if (sql.toLowerCase().contains("limit ")) {
+            if (sql.toLowerCase(Locale.ROOT).contains("limit ")) {
                 sqlWithLimit = sql;
             } else {
                 sqlWithLimit = sql + " limit 5";
diff --git a/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java b/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java
index 26a81e3..04a1f20 100644
--- a/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java
+++ b/metrics-reporter-hive/src/main/java/org/apache/kylin/metrics/lib/impl/hive/HiveProducer.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Properties;
 
@@ -128,7 +129,7 @@ public class HiveProducer {
         sb.append(tableLocation);
         for (Map.Entry<String, String> e : recordKey.partition().entrySet()) {
             sb.append("/");
-            sb.append(e.getKey().toLowerCase());
+            sb.append(e.getKey().toLowerCase(Locale.ROOT));
             sb.append("=");
             sb.append(e.getValue());
         }
@@ -145,7 +146,7 @@ public class HiveProducer {
                 } else {
                     hql.append(",");
                 }
-                hql.append(e.getKey().toLowerCase());
+                hql.append(e.getKey().toLowerCase(Locale.ROOT));
                 hql.append("='" + e.getValue() + "'");
             }
             hql.append(")");
@@ -192,7 +193,7 @@ public class HiveProducer {
         List<FieldSchema> fields = tableFieldSchemaCache.get(tableNameSplits).getSecond();
         List<Object> columnValues = Lists.newArrayListWithExpectedSize(fields.size());
         for (FieldSchema fieldSchema : fields) {
-            columnValues.add(rawValue.get(fieldSchema.getName().toUpperCase()));
+            columnValues.add(rawValue.get(fieldSchema.getName().toUpperCase(Locale.ROOT)));
         }
 
         return new HiveProducerRecord(tableNameSplits.getFirst(), tableNameSplits.getSecond(), partitionKVs, columnValues);
diff --git a/query/src/main/java/org/apache/kylin/query/enumerator/HiveEnumerator.java b/query/src/main/java/org/apache/kylin/query/enumerator/HiveEnumerator.java
index f5ada16..2b258f0 100644
--- a/query/src/main/java/org/apache/kylin/query/enumerator/HiveEnumerator.java
+++ b/query/src/main/java/org/apache/kylin/query/enumerator/HiveEnumerator.java
@@ -25,6 +25,7 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.calcite.linq4j.Enumerator;
 import org.apache.kylin.common.util.DBUtils;
 import org.apache.kylin.query.relnode.OLAPContext;
@@ -81,7 +82,7 @@ public class HiveEnumerator implements Enumerator<Object[]> {
             if (hasNext) {
                 List<String> allFields = olapContext.returnTupleInfo.getAllFields();
                 for (int i = 0; i < allFields.size(); i++) {
-                    Object value = rs.getObject(allFields.get(i).toLowerCase());
+                    Object value = rs.getObject(allFields.get(i).toLowerCase(Locale.ROOT));
                     current[i] = value;
                 }
             }
diff --git a/query/src/main/java/org/apache/kylin/query/schema/OLAPSchemaFactory.java b/query/src/main/java/org/apache/kylin/query/schema/OLAPSchemaFactory.java
index a1935fe..534b02f 100644
--- a/query/src/main/java/org/apache/kylin/query/schema/OLAPSchemaFactory.java
+++ b/query/src/main/java/org/apache/kylin/query/schema/OLAPSchemaFactory.java
@@ -22,6 +22,7 @@ import java.io.File;
 import java.io.IOException;
 import java.util.Collection;
 import java.util.HashMap;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -147,7 +148,7 @@ public class OLAPSchemaFactory implements SchemaFactory {
         int index = 0;
         out.append("            \"functions\": [\n");
         for (Map.Entry<String, String> udf : udfs.entrySet()) {
-            String udfName = udf.getKey().trim().toUpperCase();
+            String udfName = udf.getKey().trim().toUpperCase(Locale.ROOT);
             String udfClassName = udf.getValue().trim();
             out.append("               {\n");
             out.append("                   name: '" + udfName + "',\n");
diff --git a/query/src/main/java/org/apache/kylin/query/security/QueryInterceptor.java b/query/src/main/java/org/apache/kylin/query/security/QueryInterceptor.java
index 0debd6c..3985192 100644
--- a/query/src/main/java/org/apache/kylin/query/security/QueryInterceptor.java
+++ b/query/src/main/java/org/apache/kylin/query/security/QueryInterceptor.java
@@ -21,6 +21,7 @@ package org.apache.kylin.query.security;
 import java.util.Collection;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.kylin.query.relnode.OLAPContext;
 
 public abstract class QueryInterceptor {
@@ -40,7 +41,7 @@ public abstract class QueryInterceptor {
 
         Collection<String> queryCols = getQueryIdentifiers(contexts);
         for (String id : blackList) {
-            if (queryCols.contains(id.toUpperCase())) {
+            if (queryCols.contains(id.toUpperCase(Locale.ROOT))) {
                 throw new AccessDeniedException(getIdentifierType() + ":" + id);
             }
         }
diff --git a/query/src/main/java/org/apache/kylin/query/security/TableLevelACL.java b/query/src/main/java/org/apache/kylin/query/security/TableLevelACL.java
index c1b67b0..7cc2273 100644
--- a/query/src/main/java/org/apache/kylin/query/security/TableLevelACL.java
+++ b/query/src/main/java/org/apache/kylin/query/security/TableLevelACL.java
@@ -21,6 +21,7 @@ package org.apache.kylin.query.security;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
 import org.apache.kylin.metadata.model.TblColRef;
@@ -30,8 +31,8 @@ public class TableLevelACL {
     public static void tableFilter(List<OLAPContext> contexts, List<String> tableBlackList) {
         Set<String> tableWithSchema = getTableWithSchema(contexts);
         for (String tbl : tableBlackList) {
-            if (tableWithSchema.contains(tbl.toUpperCase())) {
-//                throw new kylin.AccessDeniedException("table:" + tbl);
+            if (tableWithSchema.contains(tbl.toUpperCase(Locale.ROOT))) {
+                //                throw new kylin.AccessDeniedException("table:" + tbl);
                 System.out.println("Access table:" + tbl + " denied");
             }
         }
@@ -40,8 +41,8 @@ public class TableLevelACL {
     public static void columnFilter(List<OLAPContext> contexts, List<String> columnBlackList) {
         List<String> allColWithTblAndSchema = getAllColWithTblAndSchema(contexts);
         for (String tbl : columnBlackList) {
-            if (allColWithTblAndSchema.contains(tbl.toUpperCase())) {
-//                throw new kylin.AccessDeniedException("table:" + tbl);
+            if (allColWithTblAndSchema.contains(tbl.toUpperCase(Locale.ROOT))) {
+                //                throw new kylin.AccessDeniedException("table:" + tbl);
                 System.out.println("Access table:" + tbl + " denied");
             }
         }
diff --git a/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java b/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
index 424a172..4378221 100644
--- a/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
+++ b/query/src/main/java/org/apache/kylin/query/util/QueryUtil.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.query.util;
 
 import java.util.List;
+import java.util.Locale;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -58,7 +59,7 @@ public class QueryUtil {
         final String suffixPattern = "^.+?\\s(limit\\s\\d+)?\\s(offset\\s\\d+)?\\s*$";
         sql = sql.replaceAll("\\s+", " ");
         Pattern pattern = Pattern.compile(suffixPattern);
-        Matcher matcher = pattern.matcher(sql.toLowerCase() + "  ");
+        Matcher matcher = pattern.matcher(sql.toLowerCase(Locale.ROOT) + "  ");
 
         if (matcher.find()) {
             if (limit > 0 && matcher.group(1) == null) {
@@ -71,7 +72,7 @@ public class QueryUtil {
 
         // https://issues.apache.org/jira/browse/KYLIN-2649
         if (kylinConfig.getForceLimit() > 0 && limit <= 0 && matcher.group(1) == null
-                && sql1.toLowerCase().matches("^select\\s+\\*\\p{all}*")) {
+                && sql1.toLowerCase(Locale.ROOT).matches("^select\\s+\\*\\p{all}*")) {
             sql1 += ("\nLIMIT " + kylinConfig.getForceLimit());
         }
 
@@ -143,7 +144,7 @@ public class QueryUtil {
     }
 
     public static boolean isSelectStatement(String sql) {
-        String sql1 = sql.toLowerCase();
+        String sql1 = sql.toLowerCase(Locale.ROOT);
         sql1 = removeCommentInSql(sql1);
         sql1 = sql1.trim();
         return sql1.startsWith("select") || (sql1.startsWith("with") && sql1.contains("select"))
diff --git a/query/src/main/java/org/apache/kylin/query/util/TempStatementUtil.java b/query/src/main/java/org/apache/kylin/query/util/TempStatementUtil.java
index d64c791..8d42e34 100644
--- a/query/src/main/java/org/apache/kylin/query/util/TempStatementUtil.java
+++ b/query/src/main/java/org/apache/kylin/query/util/TempStatementUtil.java
@@ -20,6 +20,7 @@ package org.apache.kylin.query.util;
 
 import java.io.IOException;
 import java.util.HashSet;
+import java.util.Locale;
 import java.util.Set;
 import java.util.regex.Pattern;
 
@@ -32,6 +33,7 @@ public class TempStatementUtil {
     private static final String WITH = "WITH";
     private static final String DROP = "DROP";
     private static final String CREATE = "CREATE";
+
     public static Pair<Boolean, String> handleTempStatement(String sql, KylinConfig config) {
         if (!config.isConvertCreateTableToWith()) {
             return new Pair<>(false, sql);
@@ -132,15 +134,15 @@ public class TempStatementUtil {
     }
 
     private static boolean isCreateTable(String sql) {
-        return sql.trim().toUpperCase().startsWith(CREATE);
+        return sql.trim().toUpperCase(Locale.ROOT).startsWith(CREATE);
     }
 
     private static boolean isDropTable(String sql) {
-        return sql.trim().toUpperCase().startsWith(DROP);
+        return sql.trim().toUpperCase(Locale.ROOT).startsWith(DROP);
     }
 
     private static boolean isWith(String sql) {
-        return sql.trim().toUpperCase().startsWith(WITH);
+        return sql.trim().toUpperCase(Locale.ROOT).startsWith(WITH);
     }
 
     private static String appendWith(String sql, KylinConfig config) {
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/AdminController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/AdminController.java
index 963a945..f9c81f7 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/AdminController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/AdminController.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.controller;
 
 import java.io.IOException;
+import java.io.UnsupportedEncodingException;
 
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.kylin.common.KylinConfig;
@@ -65,7 +66,7 @@ public class AdminController extends BasicController {
             envRes.put("env", env);
 
             return envRes;
-        } catch (ConfigurationException e) {
+        } catch (ConfigurationException | UnsupportedEncodingException e) {
             throw new RuntimeException(msg.getGET_ENV_CONFIG_FAIL(), e);
         }
     }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java
index 061dd79..8607348 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/BasicController.java
@@ -23,6 +23,7 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.util.Locale;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
@@ -65,7 +66,8 @@ public class BasicController {
         Throwable cause = ex;
         while (cause != null) {
             if (cause.getClass().getPackage().getName().startsWith("org.apache.hadoop.hbase")) {
-                return new ErrorResponse(req.getRequestURL().toString(), new InternalErrorException(String.format(msg.getHBASE_FAIL(), ex.getMessage()), ex));
+                return new ErrorResponse(req.getRequestURL().toString(), new InternalErrorException(
+                        String.format(Locale.ROOT, msg.getHBASE_FAIL(), ex.getMessage()), ex));
             }
             cause = cause.getCause();
         }
@@ -117,7 +119,8 @@ public class BasicController {
 
     protected void setDownloadResponse(String downloadFile, final HttpServletResponse response) {
         File file = new File(downloadFile);
-        try (InputStream fileInputStream = new FileInputStream(file); OutputStream output = response.getOutputStream()) {
+        try (InputStream fileInputStream = new FileInputStream(file);
+                OutputStream output = response.getOutputStream()) {
             response.reset();
             response.setContentType("application/octet-stream");
             response.setContentLength((int) (file.length()));
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 3f30ab9..a78f26a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -295,14 +296,17 @@ public class CubeController extends BasicController {
      *
      * @throws IOException
      */
-    @RequestMapping(value = "/{cubeName}/refresh_lookup", method = { RequestMethod.PUT }, produces = { "application/json" })
+    @RequestMapping(value = "/{cubeName}/refresh_lookup", method = { RequestMethod.PUT }, produces = {
+            "application/json" })
     @ResponseBody
-    public JobInstance reBuildLookupSnapshot(@PathVariable String cubeName, @RequestBody LookupSnapshotBuildRequest request) {
+    public JobInstance reBuildLookupSnapshot(@PathVariable String cubeName,
+            @RequestBody LookupSnapshotBuildRequest request) {
         try {
             final CubeManager cubeMgr = cubeService.getCubeManager();
             final CubeInstance cube = cubeMgr.getCube(cubeName);
             String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
-            return jobService.submitLookupSnapshotJob(cube, request.getLookupTableName(), request.getSegmentIDs(), submitter);
+            return jobService.submitLookupSnapshotJob(cube, request.getLookupTableName(), request.getSegmentIDs(),
+                    submitter);
         } catch (IOException e) {
             logger.error(e.getLocalizedMessage(), e);
             throw new InternalErrorException(e.getLocalizedMessage());
@@ -1007,7 +1011,7 @@ public class CubeController extends BasicController {
         CubeInstance cubeInstance = cubeService.getCubeManager().getCube(cubeName);
         if (cubeInstance == null) {
             Message msg = MsgPicker.getMsg();
-            throw new NotFoundException(String.format(msg.getCUBE_NOT_FOUND(), cubeName));
+            throw new NotFoundException(String.format(Locale.ROOT, msg.getCUBE_NOT_FOUND(), cubeName));
         }
     }
 
@@ -1015,7 +1019,6 @@ public class CubeController extends BasicController {
         checkBuildingSegment(cube, cube.getConfig().getMaxBuildingSegments());
     }
 
-
     private void checkBuildingSegment(CubeInstance cube, int maxBuildingSeg) {
         if (cube.getBuildingSegments().size() >= maxBuildingSeg) {
             throw new TooManyRequestException(
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
index a27e148..43d67ab 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/ModelController.java
@@ -21,6 +21,7 @@ package org.apache.kylin.rest.controller;
 import java.io.IOException;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
@@ -114,7 +115,8 @@ public class ModelController extends BasicController {
         }
         if (!ValidateUtil.isAlphanumericUnderscore(modelDesc.getName())) {
             throw new BadRequestException(
-                    String.format("Invalid model name %s, only letters, numbers and underscore " + "supported."),
+                    String.format(Locale.ROOT,
+                            "Invalid model name %s, only letters, numbers and underscore " + "supported."),
                     modelDesc.getName());
         }
 
@@ -201,8 +203,8 @@ public class ModelController extends BasicController {
             throw new BadRequestException("New model name should not be empty.");
         }
         if (!ValidateUtil.isAlphanumericUnderscore(newModelName)) {
-            throw new BadRequestException(String
-                    .format("Invalid model name %s, only letters, numbers and underscore supported.", newModelName));
+            throw new BadRequestException(String.format(Locale.ROOT,
+                    "Invalid model name %s, only letters, numbers and underscore supported.", newModelName));
         }
 
         DataModelDesc newModelDesc = DataModelDesc.getCopyOf(modelDesc);
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java
index 44eeffe..ecea557 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/ProjectController.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.util.JsonUtil;
@@ -128,7 +129,8 @@ public class ProjectController extends BasicController {
 
         if (!ValidateUtil.isAlphanumericUnderscore(projectDesc.getName())) {
             throw new BadRequestException(
-                    String.format("Invalid Project name %s, only letters, numbers and underscore supported."),
+                    String.format(Locale.ROOT,
+                            "Invalid Project name %s, only letters, numbers and underscore " + "supported."),
                     projectDesc.getName());
         }
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
index 828c674..978450c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/QueryController.java
@@ -24,6 +24,7 @@ import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.TreeSet;
 
@@ -118,7 +119,8 @@ public class QueryController extends BasicController {
 
     @RequestMapping(value = "/saved_queries", method = RequestMethod.GET, produces = { "application/json" })
     @ResponseBody
-    public List<Query> getQueries(@RequestParam(value = "project", required = false) String project) throws IOException {
+    public List<Query> getQueries(@RequestParam(value = "project", required = false) String project)
+            throws IOException {
         String creator = SecurityContextHolder.getContext().getAuthentication().getName();
         return queryService.getQueries(creator, project);
     }
@@ -137,7 +139,7 @@ public class QueryController extends BasicController {
         SQLResponse result = queryService.doQueryWithCache(sqlRequest);
         response.setContentType("text/" + format + ";charset=utf-8");
 
-        SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS");
+        SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS", Locale.ROOT);
         Date now = new Date();
         String nowStr = sdf.format(now);
         response.setHeader("Content-Disposition", "attachment; filename=\"" + nowStr + ".result." + format + "\"");
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
index 66621c7..488b7e0 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -31,8 +32,8 @@ import org.apache.kylin.rest.exception.InternalErrorException;
 import org.apache.kylin.rest.exception.NotFoundException;
 import org.apache.kylin.rest.request.CardinalityRequest;
 import org.apache.kylin.rest.request.HiveTableRequest;
-import org.apache.kylin.rest.service.TableACLService;
 import org.apache.kylin.rest.response.TableSnapshotResponse;
+import org.apache.kylin.rest.service.TableACLService;
 import org.apache.kylin.rest.service.TableService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -74,7 +75,8 @@ public class TableController extends BasicController {
      */
     @RequestMapping(value = "", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    public List<TableDesc> getTableDesc(@RequestParam(value = "ext", required = false) boolean withExt, @RequestParam(value = "project", required = true) String project) throws IOException {
+    public List<TableDesc> getTableDesc(@RequestParam(value = "ext", required = false) boolean withExt,
+            @RequestParam(value = "project", required = true) String project) throws IOException {
         try {
             return tableService.getTableDescByProject(project, withExt);
         } catch (IOException e) {
@@ -90,7 +92,8 @@ public class TableController extends BasicController {
      * @return Table metadata array
      * @throws IOException
      */
-    @RequestMapping(value = "/{project}/{tableName:.+}", method = { RequestMethod.GET }, produces = { "application/json" })
+    @RequestMapping(value = "/{project}/{tableName:.+}", method = { RequestMethod.GET }, produces = {
+            "application/json" })
     @ResponseBody
     public TableDesc getTableDesc(@PathVariable String tableName, @PathVariable String project) {
         TableDesc table = tableService.getTableDescByName(tableName, false, project);
@@ -101,7 +104,8 @@ public class TableController extends BasicController {
 
     @RequestMapping(value = "/{tables}/{project}", method = { RequestMethod.POST }, produces = { "application/json" })
     @ResponseBody
-    public Map<String, String[]> loadHiveTables(@PathVariable String tables, @PathVariable String project, @RequestBody HiveTableRequest request) throws IOException {
+    public Map<String, String[]> loadHiveTables(@PathVariable String tables, @PathVariable String project,
+            @RequestBody HiveTableRequest request) throws IOException {
         String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
         Map<String, String[]> result = new HashMap<String, String[]>();
         String[] tableNames = StringUtil.splitAndTrim(tables, ",");
@@ -159,14 +163,16 @@ public class TableController extends BasicController {
      * @return Table metadata array
      * @throws IOException
      */
-    @RequestMapping(value = "/{project}/{tableNames}/cardinality", method = { RequestMethod.PUT }, produces = { "application/json" })
+    @RequestMapping(value = "/{project}/{tableNames}/cardinality", method = { RequestMethod.PUT }, produces = {
+            "application/json" })
     @ResponseBody
-    public CardinalityRequest generateCardinality(@PathVariable String tableNames, @RequestBody CardinalityRequest request, @PathVariable String project) throws Exception {
+    public CardinalityRequest generateCardinality(@PathVariable String tableNames,
+            @RequestBody CardinalityRequest request, @PathVariable String project) throws Exception {
         String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
         String[] tables = tableNames.split(",");
         try {
             for (String table : tables) {
-                tableService.calculateCardinality(table.trim().toUpperCase(), submitter, project);
+                tableService.calculateCardinality(table.trim().toUpperCase(Locale.ROOT), submitter, project);
             }
         } catch (IOException e) {
             logger.error("Failed to calculate cardinality", e);
@@ -183,7 +189,8 @@ public class TableController extends BasicController {
      */
     @RequestMapping(value = "/hive", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    private List<String> showHiveDatabases(@RequestParam(value = "project", required = false) String project) throws IOException {
+    private List<String> showHiveDatabases(@RequestParam(value = "project", required = false) String project)
+            throws IOException {
         try {
             return tableService.getSourceDbNames(project);
         } catch (Throwable e) {
@@ -200,7 +207,8 @@ public class TableController extends BasicController {
      */
     @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET }, produces = { "application/json" })
     @ResponseBody
-    private List<String> showHiveTables(@PathVariable String database, @RequestParam(value = "project", required = false) String project) throws IOException {
+    private List<String> showHiveTables(@PathVariable String database,
+            @RequestParam(value = "project", required = false) String project) throws IOException {
         try {
             return tableService.getSourceTableNames(project, database);
         } catch (Throwable e) {
@@ -211,13 +219,15 @@ public class TableController extends BasicController {
 
     @RequestMapping(value = "/{project}/{tableName}/{snapshotID}/snapshotLocalCache", method = { RequestMethod.PUT })
     @ResponseBody
-    public void updateSnapshotLocalCache(@PathVariable final String project, @PathVariable final String tableName, @PathVariable final String snapshotID) {
+    public void updateSnapshotLocalCache(@PathVariable final String project, @PathVariable final String tableName,
+            @PathVariable final String snapshotID) {
         tableService.updateSnapshotLocalCache(project, tableName, snapshotID);
     }
 
     @RequestMapping(value = "/{tableName}/{snapshotID}/snapshotLocalCache/state", method = { RequestMethod.GET })
     @ResponseBody
-    public String getSnapshotLocalCacheState(@PathVariable final String tableName, @PathVariable final String snapshotID) {
+    public String getSnapshotLocalCacheState(@PathVariable final String tableName,
+            @PathVariable final String snapshotID) {
         return tableService.getSnapshotLocalCacheState(tableName, snapshotID);
     }
 
@@ -229,7 +239,8 @@ public class TableController extends BasicController {
 
     @RequestMapping(value = "/{project}/{tableName}/snapshots", method = { RequestMethod.GET })
     @ResponseBody
-    public List<TableSnapshotResponse> getTableSnapshots(@PathVariable final String project, @PathVariable final String tableName) throws IOException {
+    public List<TableSnapshotResponse> getTableSnapshots(@PathVariable final String project,
+            @PathVariable final String tableName) throws IOException {
         return tableService.getLookupTableSnapshots(project, tableName);
     }
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
index 40fc5ef..6b99bee 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.metrics;
 
 import java.nio.charset.Charset;
+import java.util.Locale;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
@@ -103,8 +104,8 @@ public class QueryMetricsFacade {
             RecordEvent rpcMetricsEvent = new TimedRecordEvent(
                     KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQueryRpcCall());
             setRPCWrapper(rpcMetricsEvent, //
-                    norm(sqlRequest.getProject()), entry.getRealizationName(),
-                    entry.getRpcServer(), entry.getException());
+                    norm(sqlRequest.getProject()), entry.getRealizationName(), entry.getRpcServer(),
+                    entry.getException());
             setRPCStats(rpcMetricsEvent, //
                     entry.getCallTimeMs(), entry.getSkippedRows(), entry.getScannedRows(), entry.getReturnedRows(),
                     entry.getAggregatedRows());
@@ -117,8 +118,7 @@ public class QueryMetricsFacade {
                     KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQuery());
             setQueryWrapper(queryMetricsEvent, //
                     user, sqlHashCode, sqlResponse.isStorageCacheUsed() ? "CACHE" : contextEntry.getQueryType(),
-                    norm(sqlRequest.getProject()), contextEntry.getRealization(),
-                    contextEntry.getRealizationType(),
+                    norm(sqlRequest.getProject()), contextEntry.getRealization(), contextEntry.getRealizationType(),
                     sqlResponse.getThrowable());
 
             long totalStorageReturnCount = 0L;
@@ -129,9 +129,9 @@ public class QueryMetricsFacade {
                             KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQueryCube());
 
                     setCubeWrapper(cubeSegmentMetricsEvent, //
-                            norm(sqlRequest.getProject()),
-                            segmentEntry.getCubeName(), segmentEntry.getSegmentName(), segmentEntry.getSourceCuboidId(),
-                            segmentEntry.getTargetCuboidId(), segmentEntry.getFilterMask());
+                            norm(sqlRequest.getProject()), segmentEntry.getCubeName(), segmentEntry.getSegmentName(),
+                            segmentEntry.getSourceCuboidId(), segmentEntry.getTargetCuboidId(),
+                            segmentEntry.getFilterMask());
 
                     setCubeStats(cubeSegmentMetricsEvent, //
                             segmentEntry.getCallCount(), segmentEntry.getCallTimeSum(), segmentEntry.getCallTimeMax(),
@@ -152,7 +152,7 @@ public class QueryMetricsFacade {
     }
 
     private static String norm(String project) {
-        return project.toUpperCase();
+        return project.toUpperCase(Locale.ROOT);
     }
 
     private static void setRPCWrapper(RecordEvent metricsEvent, String projectName, String realizationName,
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java b/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
index 74eae63..1173fe1 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.security;
 
 import java.util.HashSet;
+import java.util.Locale;
 import java.util.Set;
 
 import org.apache.commons.lang.ArrayUtils;
@@ -49,9 +50,11 @@ public class AuthoritiesPopulator extends DefaultLdapAuthoritiesPopulator {
      * @param contextSource
      * @param groupSearchBase
      */
-    public AuthoritiesPopulator(ContextSource contextSource, String groupSearchBase, String adminRole, String defaultRole) {
+    public AuthoritiesPopulator(ContextSource contextSource, String groupSearchBase, String adminRole,
+            String defaultRole) {
         super(contextSource, groupSearchBase);
-        this.adminRoleAsAuthority = new SimpleGrantedAuthority(adminRole.toUpperCase()); // spring will convert group names to uppercase by default
+        this.adminRoleAsAuthority = new SimpleGrantedAuthority(adminRole.toUpperCase(Locale.ROOT)); // spring will
+        // convert group names to uppercase by default
 
         String[] defaultRoles = StringUtils.split(defaultRole, ",");
         if (ArrayUtils.contains(defaultRoles, Constant.ROLE_MODELER)) {
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java b/server-base/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java
index b677537..dc8ac74 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java
@@ -21,6 +21,7 @@ package org.apache.kylin.rest.security;
 import java.io.Serializable;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.persistence.AclEntity;
 import org.apache.kylin.rest.service.AclService;
@@ -88,7 +89,7 @@ public class KylinAclPermissionEvaluator extends AclPermissionEvaluator {
             try {
                 p = kylinPermissionFactory.buildFromName(permString);
             } catch (IllegalArgumentException notfound) {
-                p = kylinPermissionFactory.buildFromName(permString.toUpperCase());
+                p = kylinPermissionFactory.buildFromName(permString.toUpperCase(Locale.ROOT));
             }
 
             if (p != null) {
@@ -105,7 +106,7 @@ public class KylinAclPermissionEvaluator extends AclPermissionEvaluator {
         ExternalAclProvider eap = ExternalAclProvider.getInstance();
         if (eap == null)
             return super.hasPermission(authentication, targetId, targetType, permission);
-        
+
         return checkExternalPermission(eap, authentication, targetType, targetId.toString(), permission);
     }
 }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
index 9eb9bb7..47b8027 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
@@ -31,6 +31,7 @@
 package org.apache.kylin.rest.security;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Iterator;
@@ -51,7 +52,6 @@ import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
@@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Row;
 import org.apache.hadoop.hbase.client.RowMutations;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.filter.CompareFilter;
@@ -96,9 +97,11 @@ public class MockHTable implements Table {
     private final String tableName;
     private final List<String> columnFamilies = new ArrayList<>();
 
-    private NavigableMap<byte[], NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>> data = new TreeMap<>(Bytes.BYTES_COMPARATOR);
+    private NavigableMap<byte[], NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>> data = new TreeMap<>(
+            Bytes.BYTES_COMPARATOR);
 
-    private static List<KeyValue> toKeyValue(byte[] row, NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, int maxVersions) {
+    private static List<KeyValue> toKeyValue(byte[] row,
+            NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, int maxVersions) {
         return toKeyValue(row, rowdata, 0, Long.MAX_VALUE, maxVersions);
     }
 
@@ -163,7 +166,9 @@ public class MockHTable implements Table {
         throw new RuntimeException(this.getClass() + " does NOT implement this method.");
     }
 
-    private static List<KeyValue> toKeyValue(byte[] row, NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, long timestampStart, long timestampEnd, int maxVersions) {
+    private static List<KeyValue> toKeyValue(byte[] row,
+            NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, long timestampStart,
+            long timestampEnd, int maxVersions) {
         List<KeyValue> ret = new ArrayList<KeyValue>();
         for (byte[] family : rowdata.keySet())
             for (byte[] qualifier : rowdata.get(family).keySet()) {
@@ -238,12 +243,14 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public <R> void batchCallback(List<? extends Row> actions, Object[] results, Batch.Callback<R> callback) throws IOException, InterruptedException {
+    public <R> void batchCallback(List<? extends Row> actions, Object[] results, Batch.Callback<R> callback)
+            throws IOException, InterruptedException {
 
     }
 
     @Override
-    public <R> Object[] batchCallback(List<? extends Row> actions, Batch.Callback<R> callback) throws IOException, InterruptedException {
+    public <R> Object[] batchCallback(List<? extends Row> actions, Batch.Callback<R> callback)
+            throws IOException, InterruptedException {
         return new Object[0];
     }
 
@@ -267,11 +274,13 @@ public class MockHTable implements Table {
                     qualifiers = data.get(row).get(family).navigableKeySet();
                 for (byte[] qualifier : qualifiers) {
                     if (qualifier == null)
-                        qualifier = "".getBytes();
-                    if (!data.get(row).containsKey(family) || !data.get(row).get(family).containsKey(qualifier) || data.get(row).get(family).get(qualifier).isEmpty())
+                        qualifier = "".getBytes(StandardCharsets.UTF_8);
+                    if (!data.get(row).containsKey(family) || !data.get(row).get(family).containsKey(qualifier)
+                            || data.get(row).get(family).get(qualifier).isEmpty())
                         continue;
                     Map.Entry<Long, byte[]> timestampAndValue = data.get(row).get(family).get(qualifier).lastEntry();
-                    kvs.add(new KeyValue(row, family, qualifier, timestampAndValue.getKey(), timestampAndValue.getValue()));
+                    kvs.add(new KeyValue(row, family, qualifier, timestampAndValue.getKey(),
+                            timestampAndValue.getValue()));
                 }
             }
         }
@@ -320,7 +329,8 @@ public class MockHTable implements Table {
 
             List<KeyValue> kvs = null;
             if (!scan.hasFamilies()) {
-                kvs = toKeyValue(row, data.get(row), scan.getTimeRange().getMin(), scan.getTimeRange().getMax(), scan.getMaxVersions());
+                kvs = toKeyValue(row, data.get(row), scan.getTimeRange().getMin(), scan.getTimeRange().getMax(),
+                        scan.getMaxVersions());
             } else {
                 kvs = new ArrayList<KeyValue>();
                 for (byte[] family : scan.getFamilyMap().keySet()) {
@@ -482,16 +492,19 @@ public class MockHTable implements Table {
     @Override
     public void put(Put put) throws IOException {
         byte[] row = put.getRow();
-        NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowData = forceFind(data, row, new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>(Bytes.BYTES_COMPARATOR));
+        NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowData = forceFind(data, row,
+                new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>(Bytes.BYTES_COMPARATOR));
         for (byte[] family : put.getFamilyMap().keySet()) {
-            if (columnFamilies.contains(new String(family)) == false) {
-                throw new RuntimeException("Not Exists columnFamily : " + new String(family));
+            if (columnFamilies.contains(new String(family, StandardCharsets.UTF_8)) == false) {
+                throw new RuntimeException("Not Exists columnFamily : " + new String(family, StandardCharsets.UTF_8));
             }
-            NavigableMap<byte[], NavigableMap<Long, byte[]>> familyData = forceFind(rowData, family, new TreeMap<byte[], NavigableMap<Long, byte[]>>(Bytes.BYTES_COMPARATOR));
+            NavigableMap<byte[], NavigableMap<Long, byte[]>> familyData = forceFind(rowData, family,
+                    new TreeMap<byte[], NavigableMap<Long, byte[]>>(Bytes.BYTES_COMPARATOR));
             for (KeyValue kv : put.getFamilyMap().get(family)) {
                 kv.updateLatestStamp(Bytes.toBytes(System.currentTimeMillis()));
                 byte[] qualifier = kv.getQualifier();
-                NavigableMap<Long, byte[]> qualifierData = forceFind(familyData, qualifier, new TreeMap<Long, byte[]>());
+                NavigableMap<Long, byte[]> qualifierData = forceFind(familyData, qualifier,
+                        new TreeMap<Long, byte[]>());
                 qualifierData.put(kv.getTimestamp(), kv.getValue());
             }
         }
@@ -510,9 +523,13 @@ public class MockHTable implements Table {
 
     private boolean check(byte[] row, byte[] family, byte[] qualifier, byte[] value) {
         if (value == null || value.length == 0)
-            return !data.containsKey(row) || !data.get(row).containsKey(family) || !data.get(row).get(family).containsKey(qualifier);
+            return !data.containsKey(row) || !data.get(row).containsKey(family)
+                    || !data.get(row).get(family).containsKey(qualifier);
         else
-            return data.containsKey(row) && data.get(row).containsKey(family) && data.get(row).get(family).containsKey(qualifier) && !data.get(row).get(family).get(qualifier).isEmpty() && Arrays.equals(data.get(row).get(family).get(qualifier).lastEntry().getValue(), value);
+            return data.containsKey(row) && data.get(row).containsKey(family)
+                    && data.get(row).get(family).containsKey(qualifier)
+                    && !data.get(row).get(family).get(qualifier).isEmpty()
+                    && Arrays.equals(data.get(row).get(family).get(qualifier).lastEntry().getValue(), value);
     }
 
     /**
@@ -528,7 +545,8 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Put put) throws IOException {
+    public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp,
+            byte[] bytes3, Put put) throws IOException {
         return false;
     }
 
@@ -581,7 +599,8 @@ public class MockHTable implements Table {
      * {@inheritDoc}
      */
     @Override
-    public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete) throws IOException {
+    public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete)
+            throws IOException {
         if (check(row, family, qualifier, value)) {
             delete(delete);
             return true;
@@ -590,7 +609,8 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Delete delete) throws IOException {
+    public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp,
+            byte[] bytes3, Delete delete) throws IOException {
         return false;
     }
 
@@ -611,7 +631,8 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, Durability durability) throws IOException {
+    public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, Durability durability)
+            throws IOException {
         return 0;
     }
 
@@ -629,13 +650,15 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public <T extends Service, R> Map<byte[], R> coprocessorService(Class<T> service, byte[] startKey, byte[] endKey, Batch.Call<T, R> callable) throws ServiceException, Throwable {
+    public <T extends Service, R> Map<byte[], R> coprocessorService(Class<T> service, byte[] startKey, byte[] endKey,
+            Batch.Call<T, R> callable) throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
 
     @Override
-    public <T extends Service, R> void coprocessorService(Class<T> service, byte[] startKey, byte[] endKey, Batch.Call<T, R> callable, Batch.Callback<R> callback) throws ServiceException, Throwable {
+    public <T extends Service, R> void coprocessorService(Class<T> service, byte[] startKey, byte[] endKey,
+            Batch.Call<T, R> callable, Batch.Callback<R> callback) throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
@@ -658,19 +681,23 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public <R extends Message> Map<byte[], R> batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor, Message request, byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable {
+    public <R extends Message> Map<byte[], R> batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor,
+            Message request, byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
 
     @Override
-    public <R extends Message> void batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor, Message request, byte[] startKey, byte[] endKey, R responsePrototype, Batch.Callback<R> callback) throws ServiceException, Throwable {
+    public <R extends Message> void batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor,
+            Message request, byte[] startKey, byte[] endKey, R responsePrototype, Batch.Callback<R> callback)
+            throws ServiceException, Throwable {
         throw new NotImplementedException();
 
     }
 
     //@Override  (only since 0.98.8)
-    public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp, byte[] value, RowMutations mutation) throws IOException {
+    public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp,
+            byte[] value, RowMutations mutation) throws IOException {
         throw new NotImplementedException();
 
     }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java b/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
index 0a8e847..566e1cd 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
@@ -24,6 +24,7 @@ import java.io.PrintWriter;
 import java.io.StringWriter;
 import java.lang.reflect.Method;
 import java.nio.charset.Charset;
+import java.util.Locale;
 import java.util.Properties;
 
 import org.apache.commons.io.IOUtils;
@@ -72,7 +73,7 @@ public class PasswordPlaceholderConfigurer extends PropertyPlaceholderConfigurer
     }
 
     protected String resolvePlaceholder(String placeholder, Properties props) {
-        if (placeholder.toLowerCase().contains("password")) {
+        if (placeholder.toLowerCase(Locale.ROOT).contains("password")) {
             return EncryptUtil.decrypt(props.getProperty(placeholder));
         } else {
             return props.getProperty(placeholder);
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
index 4f439fe..aa0d549 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
@@ -24,6 +24,7 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import javax.annotation.Nullable;
@@ -99,7 +100,8 @@ public class AclService implements MutableAclService, InitializingBean {
 
     @Override
     public void afterPropertiesSet() throws Exception {
-        Broadcaster.getInstance(KylinConfig.getInstanceFromEnv()).registerStaticListener(new AclRecordSyncListener(), "acl");
+        Broadcaster.getInstance(KylinConfig.getInstanceFromEnv()).registerStaticListener(new AclRecordSyncListener(),
+                "acl");
     }
 
     private class AclRecordSyncListener extends Broadcaster.Listener {
@@ -156,7 +158,7 @@ public class AclService implements MutableAclService, InitializingBean {
         Message msg = MsgPicker.getMsg();
         Map<ObjectIdentity, Acl> aclsMap = readAclsById(Arrays.asList(object), sids);
         if (!aclsMap.containsKey(object)) {
-            throw new BadRequestException(String.format(msg.getNO_ACL_ENTRY(), object));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getNO_ACL_ENTRY(), object));
         }
         return aclsMap.get(object);
     }
@@ -173,7 +175,7 @@ public class AclService implements MutableAclService, InitializingBean {
             AclRecord record = getAclRecordByCache(objID(oid));
             if (record == null) {
                 Message msg = MsgPicker.getMsg();
-                throw new NotFoundException(String.format(msg.getACL_INFO_NOT_FOUND(), oid));
+                throw new NotFoundException(String.format(Locale.ROOT, msg.getACL_INFO_NOT_FOUND(), oid));
             }
 
             Acl parentAcl = null;
@@ -209,7 +211,8 @@ public class AclService implements MutableAclService, InitializingBean {
             List<ObjectIdentity> children = findChildren(objectIdentity);
             if (!deleteChildren && children.size() > 0) {
                 Message msg = MsgPicker.getMsg();
-                throw new BadRequestException(String.format(msg.getIDENTITY_EXIST_CHILDREN(), objectIdentity));
+                throw new BadRequestException(
+                        String.format(Locale.ROOT, msg.getIDENTITY_EXIST_CHILDREN(), objectIdentity));
             }
             for (ObjectIdentity oid : children) {
                 deleteAcl(oid, deleteChildren);
@@ -272,7 +275,7 @@ public class AclService implements MutableAclService, InitializingBean {
                 return aclMap.get(id);
             }
         }
-        
+
         try (AutoLock l = lock.lockForWrite()) {
             crud.reloadAll();
             return aclMap.get(id);
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
index 33957ab..33033cc 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclTableMigrationTool.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.service;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
@@ -198,8 +199,8 @@ public class AclTableMigrationTool {
 
     private ObjectIdentityImpl getDomainObjectInfoFromRs(Result result) {
         String type = new String(result.getValue(Bytes.toBytes(AclConstant.ACL_INFO_FAMILY),
-                Bytes.toBytes(AclConstant.ACL_INFO_FAMILY_TYPE_COLUMN)));
-        String id = new String(result.getRow());
+                Bytes.toBytes(AclConstant.ACL_INFO_FAMILY_TYPE_COLUMN)), StandardCharsets.UTF_8);
+        String id = new String(result.getRow(), StandardCharsets.UTF_8);
         ObjectIdentityImpl newInfo = new ObjectIdentityImpl(type, id);
         return newInfo;
     }
@@ -228,7 +229,7 @@ public class AclTableMigrationTool {
 
         if (familyMap != null && !familyMap.isEmpty()) {
             for (Map.Entry<byte[], byte[]> entry : familyMap.entrySet()) {
-                String sid = new String(entry.getKey());
+                String sid = new String(entry.getKey(), StandardCharsets.UTF_8);
                 LegacyAceInfo aceInfo = aceSerializer.deserialize(entry.getValue());
                 if (null != aceInfo) {
                     allAceInfoMap.put(sid, aceInfo);
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java
index f7881f1..23d523e 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java
@@ -20,13 +20,13 @@ package org.apache.kylin.rest.service;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
+import java.io.UnsupportedEncodingException;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Map;
 import java.util.Properties;
 import java.util.TreeMap;
 
-import com.google.common.collect.Lists;
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.commons.configuration.PropertiesConfiguration;
 import org.apache.commons.lang3.StringUtils;
@@ -39,6 +39,8 @@ import org.slf4j.LoggerFactory;
 import org.springframework.security.access.prepost.PreAuthorize;
 import org.springframework.stereotype.Component;
 
+import com.google.common.collect.Lists;
+
 /**
  */
 @Component("adminService")
@@ -49,7 +51,7 @@ public class AdminService extends BasicService {
      * Get Java Env info as string
      */
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
-    public String getEnv() throws ConfigurationException {
+    public String getEnv() throws ConfigurationException, UnsupportedEncodingException {
         PropertiesConfiguration tempConfig = new PropertiesConfiguration();
         OrderedProperties orderedProperties = new OrderedProperties(new TreeMap<String, String>());
         // Add Java Env
@@ -76,7 +78,7 @@ public class AdminService extends BasicService {
 
         // do save
         tempConfig.save(baos);
-        content = baos.toString();
+        content = baos.toString("UTF-8");
         return content;
     }
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 58a3b2f..96d60c7 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -167,8 +167,10 @@ public class CubeService extends BasicService implements InitializingBean {
         List<CubeInstance> filterCubes = new ArrayList<CubeInstance>();
         for (CubeInstance cubeInstance : filterModelCubes) {
             boolean isCubeMatch = (null == cubeName)
-                    || (!exactMatch && cubeInstance.getName().toLowerCase().contains(cubeName.toLowerCase()))
-                    || (exactMatch && cubeInstance.getName().toLowerCase().equals(cubeName.toLowerCase()));
+                    || (!exactMatch && cubeInstance.getName().toLowerCase(Locale.ROOT)
+                            .contains(cubeName.toLowerCase(Locale.ROOT)))
+                    || (exactMatch && cubeInstance.getName().toLowerCase(Locale.ROOT)
+                            .equals(cubeName.toLowerCase(Locale.ROOT)));
 
             if (isCubeMatch) {
                 filterCubes.add(cubeInstance);
@@ -200,11 +202,11 @@ public class CubeService extends BasicService implements InitializingBean {
         String cubeName = desc.getName();
 
         if (getCubeManager().getCube(cubeName) != null) {
-            throw new BadRequestException(String.format(msg.getCUBE_ALREADY_EXIST(), cubeName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getCUBE_ALREADY_EXIST(), cubeName));
         }
 
         if (getCubeDescManager().getCubeDesc(desc.getName()) != null) {
-            throw new BadRequestException(String.format(msg.getCUBE_DESC_ALREADY_EXIST(), desc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getCUBE_DESC_ALREADY_EXIST(), desc.getName()));
         }
 
         String owner = SecurityContextHolder.getContext().getAuthentication().getName();
@@ -273,12 +275,12 @@ public class CubeService extends BasicService implements InitializingBean {
         final List<CubingJob> cubingJobs = jobService.listJobsByRealizationName(cube.getName(), null,
                 EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING));
         if (!cubingJobs.isEmpty()) {
-            throw new BadRequestException(String.format(msg.getDISCARD_JOB_FIRST(), cube.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getDISCARD_JOB_FIRST(), cube.getName()));
         }
 
         //double check again
         if (!forceUpdate && !cube.getDescriptor().consistentWith(desc)) {
-            throw new BadRequestException(String.format(msg.getINCONSISTENT_CUBE_DESC(), desc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getINCONSISTENT_CUBE_DESC(), desc.getName()));
         }
 
         CubeDesc updatedCubeDesc = getCubeDescManager().updateCubeDesc(desc);
@@ -302,7 +304,7 @@ public class CubeService extends BasicService implements InitializingBean {
         final List<CubingJob> cubingJobs = jobService.listJobsByRealizationName(cube.getName(), null,
                 EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING, ExecutableState.ERROR));
         if (!cubingJobs.isEmpty()) {
-            throw new BadRequestException(String.format(msg.getDISCARD_JOB_FIRST(), cube.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getDISCARD_JOB_FIRST(), cube.getName()));
         }
 
         try {
@@ -322,20 +324,21 @@ public class CubeService extends BasicService implements InitializingBean {
                 List<RealizationEntry> cubeRealizationEntries = instance.getRealizationEntries();
 
                 boolean needUpdateHybrid = false;
-                for (RealizationEntry cubeRealizationEntry : cubeRealizationEntries){
-                    if (cube.getName().equals(cubeRealizationEntry.getRealization())){
+                for (RealizationEntry cubeRealizationEntry : cubeRealizationEntries) {
+                    if (cube.getName().equals(cubeRealizationEntry.getRealization())) {
                         needUpdateHybrid = true;
                         cubeRealizationEntries.remove(cubeRealizationEntry);
                         break;
                     }
                 }
 
-                if (needUpdateHybrid){
+                if (needUpdateHybrid) {
                     String[] cubeNames = new String[cubeRealizationEntries.size()];
-                    for (int i = 0; i < cubeRealizationEntries.size(); i++){
+                    for (int i = 0; i < cubeRealizationEntries.size(); i++) {
                         cubeNames[i] = cubeRealizationEntries.get(i).getRealization();
                     }
-                    hybridService.updateHybridCubeNoCheck(instance.getName(), projectInstance.getName(), cube.getModel().getName(), cubeNames);
+                    hybridService.updateHybridCubeNoCheck(instance.getName(), projectInstance.getName(),
+                            cube.getModel().getName(), cubeNames);
                 }
             }
         }
@@ -361,12 +364,13 @@ public class CubeService extends BasicService implements InitializingBean {
         final List<CubingJob> cubingJobs = jobService.listJobsByRealizationName(cubeName, null, EnumSet
                 .of(ExecutableState.READY, ExecutableState.RUNNING, ExecutableState.ERROR, ExecutableState.STOPPED));
         if (!cubingJobs.isEmpty()) {
-            throw new BadRequestException(String.format(msg.getDISCARD_JOB_FIRST(), cubeName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getDISCARD_JOB_FIRST(), cubeName));
         }
 
         RealizationStatusEnum ostatus = cube.getStatus();
         if (null != ostatus && !RealizationStatusEnum.DISABLED.equals(ostatus)) {
-            throw new BadRequestException(String.format(msg.getPURGE_NOT_DISABLED_CUBE(), cubeName, ostatus));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getPURGE_NOT_DISABLED_CUBE(), cubeName, ostatus));
         }
 
         this.releaseAllSegments(cube);
@@ -389,7 +393,8 @@ public class CubeService extends BasicService implements InitializingBean {
 
         RealizationStatusEnum ostatus = cube.getStatus();
         if (null != ostatus && !RealizationStatusEnum.READY.equals(ostatus)) {
-            throw new BadRequestException(String.format(msg.getDISABLE_NOT_READY_CUBE(), cubeName, ostatus));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getDISABLE_NOT_READY_CUBE(), cubeName, ostatus));
         }
 
         return getCubeManager().updateCubeStatus(cube, RealizationStatusEnum.DISABLED);
@@ -403,16 +408,17 @@ public class CubeService extends BasicService implements InitializingBean {
         RealizationStatusEnum ostatus = cube.getStatus();
 
         if (!cube.getStatus().equals(RealizationStatusEnum.DISABLED)) {
-            throw new BadRequestException(String.format(msg.getENABLE_NOT_DISABLED_CUBE(), cubeName, ostatus));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getENABLE_NOT_DISABLED_CUBE(), cubeName, ostatus));
         }
 
         if (cube.getSegments(SegmentStatusEnum.READY).size() == 0) {
-            throw new BadRequestException(String.format(msg.getNO_READY_SEGMENT(), cubeName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getNO_READY_SEGMENT(), cubeName));
         }
 
         if (!cube.getDescriptor().checkSignature()) {
             throw new BadRequestException(
-                    String.format(msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
+                    String.format(Locale.ROOT, msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
         }
     }
 
@@ -519,29 +525,32 @@ public class CubeService extends BasicService implements InitializingBean {
         }
 
         if (toDelete == null) {
-            throw new BadRequestException(String.format(msg.getSEG_NOT_FOUND(), segmentName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getSEG_NOT_FOUND(), segmentName));
         }
 
         if (toDelete.getStatus() != SegmentStatusEnum.READY) {
             if (toDelete.getStatus() == SegmentStatusEnum.NEW) {
                 if (!isOrphonSegment(cube, toDelete.getUuid())) {
-                    throw new BadRequestException(String.format(msg.getDELETE_NOT_READY_SEG(), segmentName));
+                    throw new BadRequestException(
+                            String.format(Locale.ROOT, msg.getDELETE_NOT_READY_SEG(), segmentName));
                 }
             } else {
-                throw new BadRequestException(String.format(msg.getDELETE_NOT_READY_SEG(), segmentName));
+                throw new BadRequestException(String.format(Locale.ROOT, msg.getDELETE_NOT_READY_SEG(), segmentName));
             }
         }
 
         if (!segmentName.equals(cube.getSegments().get(0).getName())
                 && !segmentName.equals(cube.getSegments().get(cube.getSegments().size() - 1).getName())) {
-            logger.warn(String.format(msg.getDELETE_SEGMENT_CAUSE_GAPS(), cube.getName(), segmentName));
+            logger.warn(String.format(Locale.ROOT, msg.getDELETE_SEGMENT_CAUSE_GAPS(), cube.getName(), segmentName));
         }
 
         return CubeManager.getInstance(getConfig()).updateCubeDropSegments(cube, toDelete);
     }
 
     public boolean isOrphonSegment(CubeInstance cube, String segId) {
-        List<JobInstance> jobInstances = jobService.searchJobsByCubeName(cube.getName(), cube.getProject(), Lists.newArrayList(JobStatusEnum.NEW, JobStatusEnum.PENDING, JobStatusEnum.RUNNING, JobStatusEnum.ERROR, JobStatusEnum.STOPPED),
+        List<JobInstance> jobInstances = jobService.searchJobsByCubeName(cube.getName(),
+                cube.getProject(), Lists.newArrayList(JobStatusEnum.NEW, JobStatusEnum.PENDING, JobStatusEnum.RUNNING,
+                        JobStatusEnum.ERROR, JobStatusEnum.STOPPED),
                 JobTimeFilterEnum.ALL, JobService.JobSearchMode.CUBING_ONLY);
         for (JobInstance jobInstance : jobInstances) {
             // if there are segment related jobs, can not delete this segment.
@@ -575,15 +584,15 @@ public class CubeService extends BasicService implements InitializingBean {
         update.setToRemoveSegs(cube.getSegments().toArray(new CubeSegment[cube.getSegments().size()]));
         update.setCuboids(Maps.<Long, Long> newHashMap());
         update.setCuboidsRecommend(Sets.<Long> newHashSet());
-        update.setUpdateTableSnapshotPath(Maps.<String, String>newHashMap());
+        update.setUpdateTableSnapshotPath(Maps.<String, String> newHashMap());
         CubeManager.getInstance(getConfig()).updateCube(update);
     }
 
     public void updateOnNewSegmentReady(String cubeName) {
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         String serverMode = kylinConfig.getServerMode();
-        if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase())
-                || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) {
+        if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase(Locale.ROOT))
+                || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase(Locale.ROOT))) {
             CubeInstance cube = getCubeManager().getCube(cubeName);
             if (cube != null) {
                 CubeSegment seg = cube.getLatestBuiltSegment();
@@ -666,18 +675,20 @@ public class CubeService extends BasicService implements InitializingBean {
         }
         if (!ValidateUtil.isAlphanumericUnderscore(cubeName)) {
             logger.info("Invalid Cube name {}, only letters, numbers and underscore supported.", cubeName);
-            throw new BadRequestException(String.format(msg.getINVALID_CUBE_NAME(), cubeName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getINVALID_CUBE_NAME(), cubeName));
         }
 
         if (!isDraft) {
             DataModelDesc modelDesc = modelService.getDataModelManager().getDataModelDesc(desc.getModelName());
             if (modelDesc == null) {
-                throw new BadRequestException(String.format(msg.getMODEL_NOT_FOUND(), desc.getModelName()));
+                throw new BadRequestException(
+                        String.format(Locale.ROOT, msg.getMODEL_NOT_FOUND(), desc.getModelName()));
             }
 
             if (modelDesc.isDraft()) {
                 logger.info("Cannot use draft model.");
-                throw new BadRequestException(String.format(msg.getUSE_DRAFT_MODEL(), desc.getModelName()));
+                throw new BadRequestException(
+                        String.format(Locale.ROOT, msg.getUSE_DRAFT_MODEL(), desc.getModelName()));
             }
         }
     }
@@ -739,7 +750,8 @@ public class CubeService extends BasicService implements InitializingBean {
 
         try {
             if (cube.getSegments().size() != 0 && !cube.getDescriptor().consistentWith(desc)) {
-                throw new BadRequestException(String.format(msg.getINCONSISTENT_CUBE_DESC(), desc.getName()));
+                throw new BadRequestException(
+                        String.format(Locale.ROOT, msg.getINCONSISTENT_CUBE_DESC(), desc.getName()));
             }
 
             desc = updateCubeAndDesc(cube, desc, projectName, true);
@@ -774,9 +786,13 @@ public class CubeService extends BasicService implements InitializingBean {
             RootPersistentEntity e = d.getEntity();
             if (e instanceof CubeDesc) {
                 CubeDesc c = (CubeDesc) e;
-                if ((cubeName == null || (exactMatch && cubeName.toLowerCase().equals(c.getName().toLowerCase()))
-                        || (!exactMatch && c.getName().toLowerCase().contains(cubeName.toLowerCase())))
-                        && (modelName == null || modelName.toLowerCase().equals(c.getModelName().toLowerCase()))) {
+                if ((cubeName == null
+                        || (exactMatch
+                                && cubeName.toLowerCase(Locale.ROOT).equals(c.getName().toLowerCase(Locale.ROOT)))
+                        || (!exactMatch
+                                && c.getName().toLowerCase(Locale.ROOT).contains(cubeName.toLowerCase(Locale.ROOT))))
+                        && (modelName == null || modelName.toLowerCase(Locale.ROOT)
+                                .equals(c.getModelName().toLowerCase(Locale.ROOT)))) {
                     // backward compability for percentile
                     if (c.getMeasures() != null) {
                         for (MeasureDesc m : c.getMeasures()) {
@@ -923,8 +939,8 @@ public class CubeService extends BasicService implements InitializingBean {
                 "Destination configuration should not be empty.");
 
         String stringBuilder = ("%s/bin/kylin.sh org.apache.kylin.tool.CubeMigrationCLI %s %s %s %s %s %s true true");
-        String cmd = String.format(stringBuilder, KylinConfig.getKylinHome(), srcCfgUri, dstCfgUri, cube.getName(),
-                projectName, config.isAutoMigrateCubeCopyAcl(), config.isAutoMigrateCubePurge());
+        String cmd = String.format(Locale.ROOT, stringBuilder, KylinConfig.getKylinHome(), srcCfgUri, dstCfgUri,
+                cube.getName(), projectName, config.isAutoMigrateCubeCopyAcl(), config.isAutoMigrateCubePurge());
 
         logger.info("One click migration cmd: " + cmd);
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java b/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
index e548693..ec395e0 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
@@ -16,11 +16,11 @@
  * limitations under the License.
 */
 
-
 package org.apache.kylin.rest.service;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.metadata.project.ProjectInstance;
@@ -53,14 +53,14 @@ public class DashboardService extends BasicService {
     @Autowired
     private CubeService cubeService;
 
-    private enum CategoryEnum {QUERY, JOB}
+    private enum CategoryEnum {
+        QUERY, JOB
+    }
 
     private enum QueryDimensionEnum {
-        PROJECT(QueryPropertyEnum.PROJECT.toString()),
-        CUBE(QueryPropertyEnum.REALIZATION.toString()),
-        DAY(TimePropertyEnum.DAY_DATE.toString()),
-        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()),
-        MONTH(TimePropertyEnum.MONTH.toString());
+        PROJECT(QueryPropertyEnum.PROJECT.toString()), CUBE(QueryPropertyEnum.REALIZATION.toString()), DAY(
+                TimePropertyEnum.DAY_DATE.toString()), WEEK(
+                        TimePropertyEnum.WEEK_BEGIN_DATE.toString()), MONTH(TimePropertyEnum.MONTH.toString());
         private final String sql;
 
         QueryDimensionEnum(String sql) {
@@ -73,11 +73,9 @@ public class DashboardService extends BasicService {
     };
 
     private enum JobDimensionEnum {
-        PROJECT(JobPropertyEnum.PROJECT.toString()),
-        CUBE(JobPropertyEnum.CUBE.toString()),
-        DAY(TimePropertyEnum.DAY_DATE.toString()),
-        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()),
-        MONTH(TimePropertyEnum.MONTH.toString());
+        PROJECT(JobPropertyEnum.PROJECT.toString()), CUBE(JobPropertyEnum.CUBE.toString()), DAY(
+                TimePropertyEnum.DAY_DATE.toString()), WEEK(
+                        TimePropertyEnum.WEEK_BEGIN_DATE.toString()), MONTH(TimePropertyEnum.MONTH.toString());
         private final String sql;
 
         JobDimensionEnum(String sql) {
@@ -90,10 +88,10 @@ public class DashboardService extends BasicService {
     };
 
     private enum QueryMetricEnum {
-        QUERY_COUNT("count(*)"),
-        AVG_QUERY_LATENCY("sum(" + QueryPropertyEnum.TIME_COST.toString() + ")/(count(" + QueryPropertyEnum.TIME_COST.toString() + "))"),
-        MAX_QUERY_LATENCY("max(" + QueryPropertyEnum.TIME_COST.toString() + ")"),
-        MIN_QUERY_LATENCY("min(" + QueryPropertyEnum.TIME_COST.toString() + ")");
+        QUERY_COUNT("count(*)"), AVG_QUERY_LATENCY("sum(" + QueryPropertyEnum.TIME_COST.toString() + ")/(count("
+                + QueryPropertyEnum.TIME_COST.toString() + "))"), MAX_QUERY_LATENCY(
+                        "max(" + QueryPropertyEnum.TIME_COST.toString() + ")"), MIN_QUERY_LATENCY(
+                                "min(" + QueryPropertyEnum.TIME_COST.toString() + ")");
 
         private final String sql;
 
@@ -107,10 +105,10 @@ public class DashboardService extends BasicService {
     }
 
     private enum JobMetricEnum {
-        JOB_COUNT("count(*)"),
-        AVG_JOB_BUILD_TIME("sum(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")/count(" + JobPropertyEnum.PER_BYTES_TIME_COST + ")"),
-        MAX_JOB_BUILD_TIME("max(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"),
-        MIN_JOB_BUILD_TIME("min(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")");
+        JOB_COUNT("count(*)"), AVG_JOB_BUILD_TIME("sum(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")/count("
+                + JobPropertyEnum.PER_BYTES_TIME_COST + ")"), MAX_JOB_BUILD_TIME(
+                        "max(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"), MIN_JOB_BUILD_TIME(
+                                "min(" + JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")");
 
         private final String sql;
 
@@ -133,7 +131,7 @@ public class DashboardService extends BasicService {
             totalCube += getHybridManager().listHybridInstances().size();
         } else {
             ProjectInstance project = getProjectManager().getProject(projectName);
-            totalCube +=  project.getRealizationCount(RealizationType.HYBRID);
+            totalCube += project.getRealizationCount(RealizationType.HYBRID);
         }
         Float minCubeExpansion = Float.POSITIVE_INFINITY;
         Float maxCubeExpansion = Float.NEGATIVE_INFINITY;
@@ -142,7 +140,8 @@ public class DashboardService extends BasicService {
             if (cubeInstance.getInputRecordSizeBytes() > 0) {
                 totalCubeSize += cubeInstance.getSizeKB();
                 totalRecoadSize += cubeInstance.getInputRecordSizeBytes();
-                Float cubeExpansion = new Float(cubeInstance.getSizeKB()) * 1024 / cubeInstance.getInputRecordSizeBytes();
+                Float cubeExpansion = new Float(cubeInstance.getSizeKB()) * 1024
+                        / cubeInstance.getInputRecordSizeBytes();
                 if (cubeExpansion > maxCubeExpansion) {
                     maxCubeExpansion = cubeExpansion;
                 }
@@ -178,33 +177,41 @@ public class DashboardService extends BasicService {
     }
 
     public String getQueryMetricsSQL(String startTime, String endTime, String projectName, String cubeName) {
-        String[] metrics = new String[] {QueryMetricEnum.QUERY_COUNT.toSQL(), QueryMetricEnum.AVG_QUERY_LATENCY.toSQL(), QueryMetricEnum.MAX_QUERY_LATENCY.toSQL(), QueryMetricEnum.MIN_QUERY_LATENCY.toSQL()};
+        String[] metrics = new String[] { QueryMetricEnum.QUERY_COUNT.toSQL(),
+                QueryMetricEnum.AVG_QUERY_LATENCY.toSQL(), QueryMetricEnum.MAX_QUERY_LATENCY.toSQL(),
+                QueryMetricEnum.MIN_QUERY_LATENCY.toSQL() };
         List<String> filters = getBaseFilters(CategoryEnum.QUERY, projectName, startTime, endTime);
         filters = addCubeFilter(filters, CategoryEnum.QUERY, cubeName);
-        return createSql(null, metrics, getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQuery()), filters.toArray(new String[filters.size()]));
+        return createSql(null, metrics,
+                getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQuery()),
+                filters.toArray(new String[filters.size()]));
     }
 
     public String getJobMetricsSQL(String startTime, String endTime, String projectName, String cubeName) {
-        String[] metrics = new String[] {JobMetricEnum.JOB_COUNT.toSQL(), JobMetricEnum.AVG_JOB_BUILD_TIME.toSQL(), JobMetricEnum.MAX_JOB_BUILD_TIME.toSQL(), JobMetricEnum.MIN_JOB_BUILD_TIME.toSQL()};
+        String[] metrics = new String[] { JobMetricEnum.JOB_COUNT.toSQL(), JobMetricEnum.AVG_JOB_BUILD_TIME.toSQL(),
+                JobMetricEnum.MAX_JOB_BUILD_TIME.toSQL(), JobMetricEnum.MIN_JOB_BUILD_TIME.toSQL() };
         List<String> filters = getBaseFilters(CategoryEnum.JOB, projectName, startTime, endTime);
         filters = addCubeFilter(filters, CategoryEnum.JOB, cubeName);
-        return createSql(null, metrics, getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectJob()), filters.toArray(new String[filters.size()]));
+        return createSql(null, metrics,
+                getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectJob()),
+                filters.toArray(new String[filters.size()]));
     }
 
-    public String getChartSQL(String startTime, String endTime, String projectName, String cubeName, String dimension, String metric, String category) {
-        try{
+    public String getChartSQL(String startTime, String endTime, String projectName, String cubeName, String dimension,
+            String metric, String category) {
+        try {
             CategoryEnum categoryEnum = CategoryEnum.valueOf(category);
             String table = "";
             String[] dimensionSQL = null;
             String[] metricSQL = null;
 
-            if(categoryEnum == CategoryEnum.QUERY) {
-                dimensionSQL = new String[] {QueryDimensionEnum.valueOf(dimension).toSQL()};
-                metricSQL = new String[] {QueryMetricEnum.valueOf(metric).toSQL()};
+            if (categoryEnum == CategoryEnum.QUERY) {
+                dimensionSQL = new String[] { QueryDimensionEnum.valueOf(dimension).toSQL() };
+                metricSQL = new String[] { QueryMetricEnum.valueOf(metric).toSQL() };
                 table = getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQuery());
             } else if (categoryEnum == CategoryEnum.JOB) {
-                dimensionSQL = new String[] {JobDimensionEnum.valueOf(dimension).toSQL()};
-                metricSQL = new String[] {JobMetricEnum.valueOf(metric).toSQL()};
+                dimensionSQL = new String[] { JobDimensionEnum.valueOf(dimension).toSQL() };
+                metricSQL = new String[] { JobMetricEnum.valueOf(metric).toSQL() };
                 table = getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectJob());
             }
 
@@ -220,17 +227,17 @@ public class DashboardService extends BasicService {
     }
 
     public MetricsResponse transformChartData(SQLResponse sqlResponse) {
-        if(!sqlResponse.getIsException()){
+        if (!sqlResponse.getIsException()) {
             MetricsResponse metrics = new MetricsResponse();
             List<List<String>> results = sqlResponse.getResults();
             for (List<String> result : results) {
                 String dimension = result.get(0);
-                if (dimension !=null && !dimension.isEmpty()) {
+                if (dimension != null && !dimension.isEmpty()) {
                     String metric = result.get(1);
                     metrics.increase(dimension, getMetricValue(metric));
                 }
             }
-            return  metrics;
+            return metrics;
         }
         return null;
     }
@@ -248,7 +255,7 @@ public class DashboardService extends BasicService {
     }
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
-    public void checkAuthorization() throws AccessDeniedException{
+    public void checkAuthorization() throws AccessDeniedException {
     }
 
     private List<String> getBaseFilters(CategoryEnum category, String projectName, String startTime, String endTime) {
@@ -262,7 +269,7 @@ public class DashboardService extends BasicService {
         filters.add(TimePropertyEnum.DAY_DATE.toString() + " >= '" + startTime + "'");
         filters.add(TimePropertyEnum.DAY_DATE.toString() + " <= '" + endTime + "'");
         if (!Strings.isNullOrEmpty(projectName)) {
-            filters.add(project + " ='" + projectName.toUpperCase() + "'");
+            filters.add(project + " ='" + projectName.toUpperCase(Locale.ROOT) + "'");
         } else {
             filters.add(project + " <> '" + MetricsManager.SYSTEM_PROJECT + "'");
         }
@@ -279,7 +286,7 @@ public class DashboardService extends BasicService {
             HybridInstance hybridInstance = getHybridManager().getHybridInstance(cubeName);
             if (null != hybridInstance) {
                 StringBuffer cubeNames = new StringBuffer();
-                for (CubeInstance cube:getCubeByHybrid(hybridInstance)) {
+                for (CubeInstance cube : getCubeByHybrid(hybridInstance)) {
                     cubeNames.append(",'" + cube.getName() + "'");
                 }
                 baseFilter.add(JobPropertyEnum.CUBE.toString() + " IN (" + cubeNames.substring(1) + ")");
@@ -320,7 +327,7 @@ public class DashboardService extends BasicService {
         if (filters != null && filters.length > 0) {
             StringBuffer filterSQL = new StringBuffer(" where ");
             filterSQL.append(filters[0]);
-            for(int i = 1; i < filters.length; i++) {
+            for (int i = 1; i < filters.length; i++) {
                 filterSQL.append(" and ");
                 filterSQL.append(filters[i]);
             }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java b/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
index fb6fbf1..57900eb 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/DiagnosisService.java
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.commons.lang3.StringUtils;
@@ -64,7 +65,7 @@ public class DiagnosisService extends BasicService {
         File[] files = destDir.listFiles();
         if (files == null) {
             throw new BadRequestException(
-                    String.format(msg.getDIAG_PACKAGE_NOT_AVAILABLE(), destDir.getAbsolutePath()));
+                    String.format(Locale.ROOT, msg.getDIAG_PACKAGE_NOT_AVAILABLE(), destDir.getAbsolutePath()));
         }
         for (File subDir : files) {
             if (subDir.isDirectory()) {
@@ -75,7 +76,8 @@ public class DiagnosisService extends BasicService {
                 }
             }
         }
-        throw new BadRequestException(String.format(msg.getDIAG_PACKAGE_NOT_FOUND(), destDir.getAbsolutePath()));
+        throw new BadRequestException(
+                String.format(Locale.ROOT, msg.getDIAG_PACKAGE_NOT_FOUND(), destDir.getAbsolutePath()));
     }
 
     public BadQueryHistory getProjectBadQueryHistory(String project) throws IOException {
@@ -108,7 +110,8 @@ public class DiagnosisService extends BasicService {
         logger.debug("DiagnosisInfoCLI args: " + Arrays.toString(args));
         File script = new File(KylinConfig.getKylinHome() + File.separator + "bin", "diag.sh");
         if (!script.exists()) {
-            throw new BadRequestException(String.format(msg.getDIAG_NOT_FOUND(), script.getAbsolutePath()));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getDIAG_NOT_FOUND(), script.getAbsolutePath()));
         }
 
         String diagCmd = script.getAbsolutePath() + " " + StringUtils.join(args, " ");
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java b/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
index f3742de..7775d66 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/EncodingService.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.service;
 
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.dimension.BooleanDimEnc;
 import org.apache.kylin.dimension.DateDimEnc;
@@ -42,16 +43,19 @@ public class EncodingService extends BasicService {
         Message msg = MsgPicker.getMsg();
 
         if (dataType.isIntegerFamily()) {
-            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME, IntegerDimEnc.ENCODING_NAME);
+            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME,
+                    DictionaryDimEnc.ENCODING_NAME, IntegerDimEnc.ENCODING_NAME);
         } else if (dataType.isNumberFamily()) { //numbers include integers
             return Lists.newArrayList(DictionaryDimEnc.ENCODING_NAME);
         } else if (dataType.isDateTimeFamily()) {
-            return Lists.newArrayList(DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME);
+            return Lists.newArrayList(DateDimEnc.ENCODING_NAME, TimeDimEnc.ENCODING_NAME,
+                    DictionaryDimEnc.ENCODING_NAME);
         } else if (dataType.isStringFamily()) {
-            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME, FixedLenDimEnc.ENCODING_NAME, //
+            return Lists.newArrayList(BooleanDimEnc.ENCODING_NAME, DictionaryDimEnc.ENCODING_NAME,
+                    FixedLenDimEnc.ENCODING_NAME, //
                     FixedLenHexDimEnc.ENCODING_NAME, IntegerDimEnc.ENCODING_NAME);
         } else {
-            throw new BadRequestException(String.format(msg.getVALID_ENCODING_NOT_AVAILABLE(), dataType));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getVALID_ENCODING_NOT_AVAILABLE(), dataType));
         }
     }
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/ExtFilterService.java b/server-base/src/main/java/org/apache/kylin/rest/service/ExtFilterService.java
index 5bd6a81..5d2fa68 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/ExtFilterService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/ExtFilterService.java
@@ -20,6 +20,7 @@ package org.apache.kylin.rest.service;
 
 import java.io.IOException;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.metadata.model.ExternalFilterDesc;
 import org.apache.kylin.rest.constant.Constant;
@@ -41,7 +42,7 @@ public class ExtFilterService extends BasicService {
         Message msg = MsgPicker.getMsg();
 
         if (getTableManager().getExtFilterDesc(desc.getName()) != null) {
-            throw new BadRequestException(String.format(msg.getFILTER_ALREADY_EXIST(), desc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getFILTER_ALREADY_EXIST(), desc.getName()));
         }
         getTableManager().saveExternalFilter(desc);
     }
@@ -51,7 +52,7 @@ public class ExtFilterService extends BasicService {
         Message msg = MsgPicker.getMsg();
 
         if (getTableManager().getExtFilterDesc(desc.getName()) == null) {
-            throw new BadRequestException(String.format(msg.getFILTER_NOT_FOUND(), desc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getFILTER_NOT_FOUND(), desc.getName()));
         }
         getTableManager().saveExternalFilter(desc);
     }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
index f3b0c62..d8aa711 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
@@ -181,7 +181,7 @@ public class JobService extends BasicService implements InitializingBean {
         case STOPPED:
             return ExecutableState.STOPPED;
         default:
-            throw new BadRequestException(String.format(msg.getILLEGAL_EXECUTABLE_STATE(), status));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getILLEGAL_EXECUTABLE_STATE(), status));
         }
     }
 
@@ -204,7 +204,7 @@ public class JobService extends BasicService implements InitializingBean {
         case ALL:
             return 0;
         default:
-            throw new BadRequestException(String.format(msg.getILLEGAL_TIME_FILTER(), timeFilter));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getILLEGAL_TIME_FILTER(), timeFilter));
         }
     }
 
@@ -224,7 +224,7 @@ public class JobService extends BasicService implements InitializingBean {
         Message msg = MsgPicker.getMsg();
 
         if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
-            throw new BadRequestException(String.format(msg.getBUILD_BROKEN_CUBE(), cube.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getBUILD_BROKEN_CUBE(), cube.getName()));
         }
 
         checkCubeDescSignature(cube);
@@ -252,7 +252,7 @@ public class JobService extends BasicService implements InitializingBean {
                 newSeg = getCubeManager().refreshSegment(cube, tsRange, segRange);
                 job = EngineFactory.createBatchCubingJob(newSeg, submitter);
             } else {
-                throw new BadRequestException(String.format(msg.getINVALID_BUILD_TYPE(), buildType));
+                throw new BadRequestException(String.format(Locale.ROOT, msg.getINVALID_BUILD_TYPE(), buildType));
             }
 
             getExecutableManager().addJob(job);
@@ -289,7 +289,7 @@ public class JobService extends BasicService implements InitializingBean {
         Message msg = MsgPicker.getMsg();
 
         if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
-            throw new BadRequestException(String.format(msg.getBUILD_BROKEN_CUBE(), cube.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getBUILD_BROKEN_CUBE(), cube.getName()));
         }
 
         checkCubeDescSignature(cube);
@@ -414,7 +414,7 @@ public class JobService extends BasicService implements InitializingBean {
 
         if (!cube.getDescriptor().checkSignature())
             throw new BadRequestException(
-                    String.format(msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
+                    String.format(Locale.ROOT, msg.getINCONSISTENT_CUBE_DESC_SIGNATURE(), cube.getDescriptor()));
     }
 
     private void checkAllowBuilding(CubeInstance cube) {
@@ -476,7 +476,7 @@ public class JobService extends BasicService implements InitializingBean {
             return null;
         }
         if (!(job instanceof CubingJob)) {
-            throw new BadRequestException(String.format(msg.getILLEGAL_JOB_TYPE(), job.getId()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getILLEGAL_JOB_TYPE(), job.getId()));
         }
 
         CubingJob cubeJob = (CubingJob) job;
@@ -536,7 +536,7 @@ public class JobService extends BasicService implements InitializingBean {
             return null;
         }
         if (!(job instanceof CheckpointExecutable)) {
-            throw new BadRequestException(String.format(msg.getILLEGAL_JOB_TYPE(), job.getId()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getILLEGAL_JOB_TYPE(), job.getId()));
         }
 
         CheckpointExecutable checkpointExecutable = (CheckpointExecutable) job;
@@ -736,7 +736,7 @@ public class JobService extends BasicService implements InitializingBean {
             aclEvaluate.checkProjectOperationPermission(projectName);
         }
         // prepare time range
-        Calendar calendar = Calendar.getInstance();
+        Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         calendar.setTime(new Date());
         long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
         long timeEndInMillis = Long.MAX_VALUE;
@@ -786,7 +786,8 @@ public class JobService extends BasicService implements InitializingBean {
                                     if (nameExactMatch)
                                         return executableCubeName.equalsIgnoreCase(cubeName);
                                     else
-                                        return executableCubeName.toLowerCase().contains(cubeName.toLowerCase());
+                                        return executableCubeName.toLowerCase(Locale.ROOT)
+                                                .contains(cubeName.toLowerCase(Locale.ROOT));
                                 } else {
                                     return false;
                                 }
@@ -835,7 +836,8 @@ public class JobService extends BasicService implements InitializingBean {
                                 if (nameExactMatch) {
                                     return cubeJob.getName().equalsIgnoreCase(jobName);
                                 } else {
-                                    return cubeJob.getName().toLowerCase().contains(jobName.toLowerCase());
+                                    return cubeJob.getName().toLowerCase(Locale.ROOT)
+                                            .contains(jobName.toLowerCase(Locale.ROOT));
                                 }
                             }
                         })));
@@ -846,7 +848,7 @@ public class JobService extends BasicService implements InitializingBean {
             final String projectName, final List<JobStatusEnum> statusList, final JobTimeFilterEnum timeFilter) {
         // TODO: use cache of jobs for this method
         // prepare time range
-        Calendar calendar = Calendar.getInstance();
+        Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         calendar.setTime(new Date());
         long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
         long timeEndInMillis = Long.MAX_VALUE;
@@ -884,7 +886,8 @@ public class JobService extends BasicService implements InitializingBean {
                                     if (nameExactMatch)
                                         return executableCubeName.equalsIgnoreCase(cubeName);
                                     else
-                                        return executableCubeName.toLowerCase().contains(cubeName.toLowerCase());
+                                        return executableCubeName.toLowerCase(Locale.ROOT)
+                                                .contains(cubeName.toLowerCase(Locale.ROOT));
                                 } else {
                                     return false;
                                 }
@@ -933,7 +936,8 @@ public class JobService extends BasicService implements InitializingBean {
                                 if (nameExactMatch) {
                                     return checkpointExecutable.getName().equalsIgnoreCase(jobName);
                                 } else {
-                                    return checkpointExecutable.getName().toLowerCase().contains(jobName.toLowerCase());
+                                    return checkpointExecutable.getName().toLowerCase(Locale.ROOT)
+                                            .contains(jobName.toLowerCase(Locale.ROOT));
                                 }
                             }
                         })));
@@ -1006,7 +1010,7 @@ public class JobService extends BasicService implements InitializingBean {
             aclEvaluate.checkProjectOperationPermission(projectName);
         }
         // prepare time range
-        Calendar calendar = Calendar.getInstance();
+        Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         calendar.setTime(new Date());
         long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
         long timeEndInMillis = Long.MAX_VALUE;
@@ -1038,7 +1042,7 @@ public class JobService extends BasicService implements InitializingBean {
             aclEvaluate.checkProjectOperationPermission(projectName);
         }
         // prepare time range
-        Calendar calendar = Calendar.getInstance();
+        Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
         calendar.setTime(new Date());
         long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
         long timeEndInMillis = Long.MAX_VALUE;
@@ -1087,7 +1091,8 @@ public class JobService extends BasicService implements InitializingBean {
                                     if (nameExactMatch)
                                         return executableCubeName.equalsIgnoreCase(cubeName);
                                     else
-                                        return executableCubeName.toLowerCase().contains(cubeName.toLowerCase());
+                                        return executableCubeName.toLowerCase(Locale.ROOT)
+                                                .contains(cubeName.toLowerCase(Locale.ROOT));
                                 } else {
                                     return false;
                                 }
@@ -1133,7 +1138,8 @@ public class JobService extends BasicService implements InitializingBean {
                                 if (nameExactMatch) {
                                     return cubeJob.getName().equalsIgnoreCase(jobName);
                                 } else {
-                                    return cubeJob.getName().toLowerCase().contains(jobName.toLowerCase());
+                                    return cubeJob.getName().toLowerCase(Locale.ROOT)
+                                            .contains(jobName.toLowerCase(Locale.ROOT));
                                 }
                             }
                         })));
@@ -1160,7 +1166,8 @@ public class JobService extends BasicService implements InitializingBean {
                                     if (nameExactMatch)
                                         return executableCubeName.equalsIgnoreCase(cubeName);
                                     else
-                                        return executableCubeName.toLowerCase().contains(cubeName.toLowerCase());
+                                        return executableCubeName.toLowerCase(Locale.ROOT)
+                                                .contains(cubeName.toLowerCase(Locale.ROOT));
                                 } else {
                                     return false;
                                 }
@@ -1206,7 +1213,8 @@ public class JobService extends BasicService implements InitializingBean {
                                 if (nameExactMatch) {
                                     return checkpointExecutable.getName().equalsIgnoreCase(jobName);
                                 } else {
-                                    return checkpointExecutable.getName().toLowerCase().contains(jobName.toLowerCase());
+                                    return checkpointExecutable.getName().toLowerCase(Locale.ROOT)
+                                            .contains(jobName.toLowerCase(Locale.ROOT));
                                 }
                             }
                         })));
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java b/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java
index 32c7339..ad0de2e 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/KafkaConfigService.java
@@ -21,6 +21,7 @@ package org.apache.kylin.rest.service;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.rest.exception.BadRequestException;
 import org.apache.kylin.rest.msg.Message;
@@ -52,7 +53,8 @@ public class KafkaConfigService extends BasicService {
         return kafkaConfigs;
     }
 
-    public List<KafkaConfig> getKafkaConfigs(final String kafkaConfigName, final String project, final Integer limit, final Integer offset) throws IOException {
+    public List<KafkaConfig> getKafkaConfigs(final String kafkaConfigName, final String project, final Integer limit,
+            final Integer offset) throws IOException {
         aclEvaluate.checkProjectWritePermission(project);
         List<KafkaConfig> kafkaConfigs;
         kafkaConfigs = listAllKafkaConfigs(kafkaConfigName);
@@ -73,7 +75,8 @@ public class KafkaConfigService extends BasicService {
         Message msg = MsgPicker.getMsg();
 
         if (getKafkaManager().getKafkaConfig(config.getName()) != null) {
-            throw new BadRequestException(String.format(msg.getKAFKA_CONFIG_ALREADY_EXIST(), config.getName()));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getKAFKA_CONFIG_ALREADY_EXIST(), config.getName()));
         }
         getKafkaManager().createKafkaConfig(config);
         return config;
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/KylinUserService.java b/server-base/src/main/java/org/apache/kylin/rest/service/KylinUserService.java
index 0cc48a9..eea8cd7 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/KylinUserService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/KylinUserService.java
@@ -21,6 +21,7 @@ package org.apache.kylin.rest.service;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 import javax.annotation.PostConstruct;
 
@@ -118,7 +119,7 @@ public class KylinUserService implements UserService {
         Message msg = MsgPicker.getMsg();
         ManagedUser managedUser = getKylinUserManager().get(userName);
         if (managedUser == null) {
-            throw new UsernameNotFoundException(String.format(msg.getUSER_NOT_FOUND(), userName));
+            throw new UsernameNotFoundException(String.format(Locale.ROOT, msg.getUSER_NOT_FOUND(), userName));
         }
         logger.trace("load user : {}", userName);
         return managedUser;
@@ -130,7 +131,7 @@ public class KylinUserService implements UserService {
     }
 
     @Override
-    public List<String> listAdminUsers() throws IOException{
+    public List<String> listAdminUsers() throws IOException {
         List<String> adminUsers = new ArrayList<>();
         for (ManagedUser managedUser : listUsers()) {
             if (managedUser.getAuthorities().contains(new SimpleGrantedAuthority(Constant.ROLE_ADMIN))) {
@@ -141,7 +142,7 @@ public class KylinUserService implements UserService {
     }
 
     @Override
-    public void completeUserInfo(ManagedUser user){
+    public void completeUserInfo(ManagedUser user) {
     }
 
     public static String getId(String userName) {
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java b/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
index b3f6e2d..f3281e2 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/ModelService.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -97,8 +98,10 @@ public class ModelService extends BasicService {
         List<DataModelDesc> filterModels = new ArrayList<DataModelDesc>();
         for (DataModelDesc modelDesc : models) {
             boolean isModelMatch = (null == modelName) || modelName.length() == 0
-                    || (exactMatch && modelDesc.getName().toLowerCase().equals(modelName.toLowerCase()))
-                    || (!exactMatch && modelDesc.getName().toLowerCase().contains(modelName.toLowerCase()));
+                    || (exactMatch
+                            && modelDesc.getName().toLowerCase(Locale.ROOT).equals(modelName.toLowerCase(Locale.ROOT)))
+                    || (!exactMatch && modelDesc.getName().toLowerCase(Locale.ROOT)
+                            .contains(modelName.toLowerCase(Locale.ROOT)));
 
             if (isModelMatch) {
                 filterModels.add(modelDesc);
@@ -130,7 +133,7 @@ public class ModelService extends BasicService {
         aclEvaluate.checkProjectWritePermission(projectName);
         Message msg = MsgPicker.getMsg();
         if (getDataModelManager().getDataModelDesc(desc.getName()) != null) {
-            throw new BadRequestException(String.format(msg.getDUPLICATE_MODEL_NAME(), desc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getDUPLICATE_MODEL_NAME(), desc.getName()));
         }
 
         String factTableName = desc.getRootFactTableName();
@@ -159,7 +162,8 @@ public class ModelService extends BasicService {
         List<CubeDesc> cubeDescs = getCubeDescManager().listAllDesc();
         for (CubeDesc cubeDesc : cubeDescs) {
             if (cubeDesc.getModelName().equals(desc.getName())) {
-                throw new BadRequestException(String.format(msg.getDROP_REFERENCED_MODEL(), cubeDesc.getName()));
+                throw new BadRequestException(
+                        String.format(Locale.ROOT, msg.getDROP_REFERENCED_MODEL(), cubeDesc.getName()));
             }
         }
 
@@ -338,7 +342,7 @@ public class ModelService extends BasicService {
         }
         if (!ValidateUtil.isAlphanumericUnderscore(modelName)) {
             logger.info("Invalid model name {}, only letters, numbers and underscore supported.", modelDesc.getName());
-            throw new BadRequestException(String.format(msg.getINVALID_MODEL_NAME(), modelName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getINVALID_MODEL_NAME(), modelName));
         }
     }
 
@@ -368,7 +372,7 @@ public class ModelService extends BasicService {
         }
 
         if (!modelDesc.getError().isEmpty()) {
-            throw new BadRequestException(String.format(msg.getBROKEN_MODEL_DESC(), modelDesc.getName()));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getBROKEN_MODEL_DESC(), modelDesc.getName()));
         }
 
         return modelDesc;
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java b/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java
index 22ee95e..a7fec44 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/ProjectService.java
@@ -23,6 +23,8 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
+
 import java.util.Set;
 import javax.annotation.Nullable;
 import org.apache.directory.api.util.Strings;
@@ -81,7 +83,7 @@ public class ProjectService extends BasicService {
         ProjectInstance currentProject = getProjectManager().getProject(projectName);
 
         if (currentProject != null) {
-            throw new BadRequestException(String.format(msg.getPROJECT_ALREADY_EXIST(), projectName));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getPROJECT_ALREADY_EXIST(), projectName));
         }
         String owner = SecurityContextHolder.getContext().getAuthentication().getName();
         ProjectInstance createdProject = getProjectManager().createProject(projectName, owner, description,
@@ -138,6 +140,7 @@ public class ProjectService extends BasicService {
             tableService.unloadHiveTable(table, projectName);
         }
         getProjectManager().dropProject(projectName);
+
         accessService.clean(project, true);
     }
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index f94ba26..7bef0f2 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -41,16 +41,13 @@ import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.Set;
 
 import javax.annotation.PostConstruct;
 
-import net.sf.ehcache.Cache;
-import net.sf.ehcache.CacheManager;
-import net.sf.ehcache.Element;
-
 import org.apache.calcite.avatica.ColumnMetaData.Rep;
 import org.apache.calcite.config.CalciteConnectionConfig;
 import org.apache.calcite.jdbc.CalcitePrepare;
@@ -128,6 +125,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 
+import net.sf.ehcache.Cache;
+import net.sf.ehcache.CacheManager;
+import net.sf.ehcache.Element;
+
 /**
  * @author xduo
  */
@@ -171,7 +172,8 @@ public class QueryService extends BasicService {
         config.setMaxTotal(kylinConfig.getQueryMaxCacheStatementNum());
         config.setBlockWhenExhausted(false);
         config.setMinEvictableIdleTimeMillis(10 * 60 * 1000L); // cached statement will be evict if idle for 10 minutes
-        GenericKeyedObjectPool<PreparedContextKey, PreparedContext> pool = new GenericKeyedObjectPool<>(factory, config);
+        GenericKeyedObjectPool<PreparedContextKey, PreparedContext> pool = new GenericKeyedObjectPool<>(factory,
+                config);
         return pool;
     }
 
@@ -357,9 +359,9 @@ public class QueryService extends BasicService {
 
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         String serverMode = kylinConfig.getServerMode();
-        if (!(Constant.SERVER_MODE_QUERY.equals(serverMode.toLowerCase())
-                || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase()))) {
-            throw new BadRequestException(String.format(msg.getQUERY_NOT_ALLOWED(), serverMode));
+        if (!(Constant.SERVER_MODE_QUERY.equals(serverMode.toLowerCase(Locale.ROOT))
+                || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase(Locale.ROOT)))) {
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getQUERY_NOT_ALLOWED(), serverMode));
         }
         if (StringUtils.isBlank(sqlRequest.getProject())) {
             throw new BadRequestException(msg.getEMPTY_PROJECT_NAME());
@@ -537,7 +539,7 @@ public class QueryService extends BasicService {
         boolean borrowPrepareContext = false;
         PreparedContextKey preparedContextKey = null;
         PreparedContext preparedContext = null;
-        
+
         try {
             conn = QueryConnection.getConnection(sqlRequest.getProject());
             String userInfo = SecurityContextHolder.getContext().getAuthentication().getName();
@@ -573,7 +575,7 @@ public class QueryService extends BasicService {
             OLAPContext.setParameters(parameters);
             // force clear the query context before a new query
             OLAPContext.clearThreadLocalContexts();
-            
+
             // special case for prepare query.
             List<List<String>> results = Lists.newArrayList();
             List<SelectedColumnMeta> columnMetas = Lists.newArrayList();
@@ -683,7 +685,7 @@ public class QueryService extends BasicService {
                         columnMeta.getString(23));
 
                 if (!"metadata".equalsIgnoreCase(colmnMeta.getTABLE_SCHEM())
-                        && !colmnMeta.getCOLUMN_NAME().toUpperCase().startsWith("_KY_")) {
+                        && !colmnMeta.getCOLUMN_NAME().toUpperCase(Locale.ROOT).startsWith("_KY_")) {
                     tableMap.get(colmnMeta.getTABLE_SCHEM() + "#" + colmnMeta.getTABLE_NAME()).addColumn(colmnMeta);
                 }
             }
@@ -759,7 +761,7 @@ public class QueryService extends BasicService {
                         columnMeta.getString(23));
 
                 if (!"metadata".equalsIgnoreCase(colmnMeta.getTABLE_SCHEM())
-                        && !colmnMeta.getCOLUMN_NAME().toUpperCase().startsWith("_KY_")) {
+                        && !colmnMeta.getCOLUMN_NAME().toUpperCase(Locale.ROOT).startsWith("_KY_")) {
                     tableMap.get(colmnMeta.getTABLE_SCHEM() + "#" + colmnMeta.getTABLE_NAME()).addColumn(colmnMeta);
                     columnMap.put(colmnMeta.getTABLE_SCHEM() + "#" + colmnMeta.getTABLE_NAME() + "#"
                             + colmnMeta.getCOLUMN_NAME(), colmnMeta);
@@ -880,7 +882,7 @@ public class QueryService extends BasicService {
             processStatementAttr(stat, sqlRequest);
             resultSet = stat.executeQuery(correctedSql);
 
-            r = createResponseFromResultSet(resultSet); 
+            r = createResponseFromResultSet(resultSet);
 
         } catch (SQLException sqlException) {
             r = pushDownQuery(sqlRequest, correctedSql, conn, sqlException);
@@ -895,8 +897,8 @@ public class QueryService extends BasicService {
         return buildSqlResponse(isPushDown, r.getFirst(), r.getSecond());
     }
 
-    private SQLResponse executePrepareRequest(String correctedSql, PrepareSqlRequest sqlRequest, PreparedContext preparedContext)
-            throws Exception {
+    private SQLResponse executePrepareRequest(String correctedSql, PrepareSqlRequest sqlRequest,
+            PreparedContext preparedContext) throws Exception {
         ResultSet resultSet = null;
         boolean isPushDown = false;
 
@@ -923,7 +925,8 @@ public class QueryService extends BasicService {
         return buildSqlResponse(isPushDown, r.getFirst(), r.getSecond());
     }
 
-    private Pair<List<List<String>>, List<SelectedColumnMeta>> pushDownQuery(SQLRequest sqlRequest, String correctedSql, Connection conn, SQLException sqlException) throws Exception{
+    private Pair<List<List<String>>, List<SelectedColumnMeta>> pushDownQuery(SQLRequest sqlRequest, String correctedSql,
+            Connection conn, SQLException sqlException) throws Exception {
         try {
             return PushDownUtil.tryPushDownSelectQuery(sqlRequest.getProject(), correctedSql, conn.getSchema(),
                     sqlException, BackdoorToggles.getPrepareOnly());
@@ -944,12 +947,13 @@ public class QueryService extends BasicService {
 
         // Fill in selected column meta
         for (int i = 1; i <= columnCount; ++i) {
-            columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), metaData
-                    .isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i), metaData
-                    .getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i), metaData
-                    .getSchemaName(i), metaData.getCatalogName(i), metaData.getTableName(i), metaData.getPrecision(i),
-                    metaData.getScale(i), metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData
-                            .isReadOnly(i), metaData.isWritable(i), metaData.isDefinitelyWritable(i)));
+            columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i),
+                    metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i),
+                    metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i),
+                    metaData.getSchemaName(i), metaData.getCatalogName(i), metaData.getTableName(i),
+                    metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i),
+                    metaData.getColumnTypeName(i), metaData.isReadOnly(i), metaData.isWritable(i),
+                    metaData.isDefinitelyWritable(i)));
         }
 
         // fill in results
@@ -1147,7 +1151,7 @@ public class QueryService extends BasicService {
         this.cacheManager = cacheManager;
     }
 
-    private static PreparedContext createPreparedContext(String project, String sql) throws Exception{
+    private static PreparedContext createPreparedContext(String project, String sql) throws Exception {
         Connection conn = QueryConnection.getConnection(project);
         PreparedStatement preparedStatement = conn.prepareStatement(sql);
         Collection<OLAPContext> olapContexts = OLAPContext.getThreadLocalContexts();
@@ -1179,8 +1183,8 @@ public class QueryService extends BasicService {
         }
     }
 
-    private static class PreparedContextFactory extends
-            BaseKeyedPooledObjectFactory<PreparedContextKey, PreparedContext> {
+    private static class PreparedContextFactory
+            extends BaseKeyedPooledObjectFactory<PreparedContextKey, PreparedContext> {
 
         @Override
         public PreparedContext create(PreparedContextKey key) throws Exception {
@@ -1217,13 +1221,17 @@ public class QueryService extends BasicService {
 
         @Override
         public boolean equals(Object o) {
-            if (this == o) return true;
-            if (o == null || getClass() != o.getClass()) return false;
+            if (this == o)
+                return true;
+            if (o == null || getClass() != o.getClass())
+                return false;
 
             PreparedContextKey that = (PreparedContextKey) o;
 
-            if (prjLastModifyTime != that.prjLastModifyTime) return false;
-            if (project != null ? !project.equals(that.project) : that.project != null) return false;
+            if (prjLastModifyTime != that.prjLastModifyTime)
+                return false;
+            if (project != null ? !project.equals(that.project) : that.project != null)
+                return false;
             return sql != null ? sql.equals(that.sql) : that.sql == null;
 
         }
@@ -1243,7 +1251,7 @@ public class QueryService extends BasicService {
         private Collection<OLAPContext> olapContexts;
 
         public PreparedContext(Connection conn, PreparedStatement preparedStatement,
-                               Collection<OLAPContext> olapContexts) {
+                Collection<OLAPContext> olapContexts) {
             this.conn = conn;
             this.preparedStatement = preparedStatement;
             this.olapContexts = olapContexts;
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java b/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java
index d4d7cc7..09acb58 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/StreamingService.java
@@ -21,6 +21,7 @@ package org.apache.kylin.rest.service;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.metadata.streaming.StreamingConfig;
@@ -50,7 +51,8 @@ public class StreamingService extends BasicService {
         return streamingConfigs;
     }
 
-    public List<StreamingConfig> getStreamingConfigs(final String table, final String project, final Integer limit, final Integer offset) throws IOException {
+    public List<StreamingConfig> getStreamingConfigs(final String table, final String project, final Integer limit,
+            final Integer offset) throws IOException {
         aclEvaluate.checkProjectWritePermission(project);
         List<StreamingConfig> streamingConfigs;
         streamingConfigs = listAllStreamingConfigs(table);
@@ -71,7 +73,8 @@ public class StreamingService extends BasicService {
         Message msg = MsgPicker.getMsg();
 
         if (getStreamingManager().getStreamingConfig(config.getName()) != null) {
-            throw new BadRequestException(String.format(msg.getSTREAMING_CONFIG_ALREADY_EXIST(), config.getName()));
+            throw new BadRequestException(
+                    String.format(Locale.ROOT, msg.getSTREAMING_CONFIG_ALREADY_EXIST(), config.getName()));
         }
         StreamingConfig streamingConfig = getStreamingManager().createStreamingConfig(config);
         return streamingConfig;
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java b/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java
index cffe36d..89a505a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/TableSchemaUpdateChecker.java
@@ -22,6 +22,7 @@ import static com.google.common.base.Preconditions.checkNotNull;
 import static java.lang.String.format;
 
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
 import javax.annotation.Nullable;
@@ -60,15 +61,17 @@ public class TableSchemaUpdateChecker {
         }
 
         static CheckResult validOnFirstLoad(String tableName) {
-            return new CheckResult(true, format("Table '%s' hasn't been loaded before", tableName));
+            return new CheckResult(true, format(Locale.ROOT, "Table '%s' hasn't been loaded before", tableName));
         }
 
         static CheckResult validOnCompatibleSchema(String tableName) {
-            return new CheckResult(true, format("Table '%s' is compatible with all existing cubes", tableName));
+            return new CheckResult(true,
+                    format(Locale.ROOT, "Table '%s' is compatible with all existing cubes", tableName));
         }
 
         static CheckResult invalidOnFetchSchema(String tableName, Exception e) {
-            return new CheckResult(false, format("Failed to fetch metadata of '%s': %s", tableName, e.getMessage()));
+            return new CheckResult(false,
+                    format(Locale.ROOT, "Failed to fetch metadata of '%s': %s", tableName, e.getMessage()));
         }
 
         static CheckResult invalidOnIncompatibleSchema(String tableName, List<String> reasons) {
@@ -77,7 +80,10 @@ public class TableSchemaUpdateChecker {
                 buf.append("- ").append(reason).append("\n");
             }
 
-            return new CheckResult(false, format("Found %d issue(s) with '%s':%n%s Please disable and purge related cube(s) first", reasons.size(), tableName, buf.toString()));
+            return new CheckResult(false,
+                    format(Locale.ROOT,
+                            "Found %d issue(s) with '%s':%n%s Please disable and " + "purge related " + "cube(s) first",
+                            reasons.size(), tableName, buf.toString()));
         }
     }
 
@@ -87,18 +93,19 @@ public class TableSchemaUpdateChecker {
     }
 
     private List<CubeInstance> findCubeByTable(final TableDesc table) {
-        Iterable<CubeInstance> relatedCubes = Iterables.filter(cubeManager.listAllCubes(), new Predicate<CubeInstance>() {
-            @Override
-            public boolean apply(@Nullable CubeInstance cube) {
-                if (cube == null || cube.allowBrokenDescriptor()) {
-                    return false;
-                }
-                DataModelDesc model = cube.getModel();
-                if (model == null)
-                    return false;
-                return model.containsTable(table);
-            }
-        });
+        Iterable<CubeInstance> relatedCubes = Iterables.filter(cubeManager.listAllCubes(),
+                new Predicate<CubeInstance>() {
+                    @Override
+                    public boolean apply(@Nullable CubeInstance cube) {
+                        if (cube == null || cube.allowBrokenDescriptor()) {
+                            return false;
+                        }
+                        DataModelDesc model = cube.getModel();
+                        if (model == null)
+                            return false;
+                        return model.containsTable(table);
+                    }
+                });
 
         return ImmutableList.copyOf(relatedCubes);
     }
@@ -185,7 +192,8 @@ public class TableSchemaUpdateChecker {
                 TableDesc factTable = cube.getModel().findFirstTable(fullTableName).getTableDesc();
                 List<String> violateColumns = checkAllColumnsInCube(cube, factTable, newTableDesc);
                 if (!violateColumns.isEmpty()) {
-                    issues.add(format("Column %s used in cube[%s] and model[%s], but changed in hive", violateColumns, cube.getName(), modelName));
+                    issues.add(format(Locale.ROOT, "Column %s used in cube[%s] and model[%s], but changed " + "in hive",
+                            violateColumns, cube.getName(), modelName));
                 }
             }
 
@@ -194,7 +202,8 @@ public class TableSchemaUpdateChecker {
             if (cube.getModel().isLookupTable(fullTableName)) {
                 TableDesc lookupTable = cube.getModel().findFirstTable(fullTableName).getTableDesc();
                 if (!checkAllColumnsInTableDesc(lookupTable, newTableDesc)) {
-                    issues.add(format("Table '%s' is used as Lookup Table in cube[%s] and model[%s], but changed in hive", lookupTable.getIdentity(), cube.getName(), modelName));
+                    issues.add(format(Locale.ROOT, "Table '%s' is used as Lookup Table in cube[%s] and model[%s], but "
+                            + "changed in " + "hive", lookupTable.getIdentity(), cube.getName(), modelName));
                 }
             }
         }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java b/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java
index 1bb03e4..4009fc9 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/TableService.java
@@ -25,6 +25,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.UUID;
 
@@ -138,7 +139,8 @@ public class TableService extends BasicService {
         return loadTablesToProject(Lists.newArrayList(Pair.newPair(tableDesc, extDesc)), project);
     }
 
-    private String[] loadTablesToProject(List<Pair<TableDesc, TableExtDesc>> allMeta, String project) throws IOException {
+    private String[] loadTablesToProject(List<Pair<TableDesc, TableExtDesc>> allMeta, String project)
+            throws IOException {
         aclEvaluate.checkProjectAdminPermission(project);
         // do schema check
         TableMetadataManager metaMgr = getTableManager();
@@ -187,7 +189,7 @@ public class TableService extends BasicService {
         return result;
     }
 
-    public List<Pair<TableDesc, TableExtDesc>> extractHiveTableMeta(String[] tables, String project) throws Exception {        // de-dup
+    public List<Pair<TableDesc, TableExtDesc>> extractHiveTableMeta(String[] tables, String project) throws Exception { // de-dup
         SetMultimap<String, String> db2tables = LinkedHashMultimap.create();
         for (String fullTableName : tables) {
             String[] parts = HadoopUtil.parseHiveTableName(fullTableName);
@@ -201,10 +203,10 @@ public class TableService extends BasicService {
         for (Map.Entry<String, String> entry : db2tables.entries()) {
             Pair<TableDesc, TableExtDesc> pair = explr.loadTableMetadata(entry.getKey(), entry.getValue(), project);
             TableDesc tableDesc = pair.getFirst();
-            Preconditions.checkState(tableDesc.getDatabase().equals(entry.getKey().toUpperCase()));
-            Preconditions.checkState(tableDesc.getName().equals(entry.getValue().toUpperCase()));
+            Preconditions.checkState(tableDesc.getDatabase().equals(entry.getKey().toUpperCase(Locale.ROOT)));
+            Preconditions.checkState(tableDesc.getName().equals(entry.getValue().toUpperCase(Locale.ROOT)));
             Preconditions.checkState(tableDesc.getIdentity()
-                    .equals(entry.getKey().toUpperCase() + "." + entry.getValue().toUpperCase()));
+                    .equals(entry.getKey().toUpperCase(Locale.ROOT) + "." + entry.getValue().toUpperCase(Locale.ROOT)));
             TableExtDesc extDesc = pair.getSecond();
             Preconditions.checkState(tableDesc.getIdentity().equals(extDesc.getIdentity()));
             allMeta.add(pair);
@@ -250,7 +252,7 @@ public class TableService extends BasicService {
             rtn = true;
         } else {
             List<String> models = modelService.getModelsUsingTable(desc, project);
-            throw new BadRequestException(String.format(msg.getTABLE_IN_USE_BY_MODEL(), models));
+            throw new BadRequestException(String.format(Locale.ROOT, msg.getTABLE_IN_USE_BY_MODEL(), models));
         }
 
         // it is a project local table, ready to remove since no model is using it within the project
@@ -347,7 +349,8 @@ public class TableService extends BasicService {
         ExtTableSnapshotInfo extTableSnapshotInfo = snapshotInfoManager.getSnapshot(tableName, snapshotID);
         TableDesc tableDesc = getTableManager().getTableDesc(tableName, project);
         if (extTableSnapshotInfo == null) {
-            throw new IllegalArgumentException("cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
+            throw new IllegalArgumentException(
+                    "cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
         }
         LookupProviderFactory.rebuildLocalCache(tableDesc, extTableSnapshotInfo);
     }
@@ -356,7 +359,8 @@ public class TableService extends BasicService {
         ExtTableSnapshotInfoManager snapshotInfoManager = ExtTableSnapshotInfoManager.getInstance(getConfig());
         ExtTableSnapshotInfo extTableSnapshotInfo = snapshotInfoManager.getSnapshot(tableName, snapshotID);
         if (extTableSnapshotInfo == null) {
-            throw new IllegalArgumentException("cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
+            throw new IllegalArgumentException(
+                    "cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
         }
         LookupProviderFactory.removeLocalCache(extTableSnapshotInfo);
     }
@@ -365,7 +369,8 @@ public class TableService extends BasicService {
         ExtTableSnapshotInfoManager snapshotInfoManager = ExtTableSnapshotInfoManager.getInstance(getConfig());
         ExtTableSnapshotInfo extTableSnapshotInfo = snapshotInfoManager.getSnapshot(tableName, snapshotID);
         if (extTableSnapshotInfo == null) {
-            throw new IllegalArgumentException("cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
+            throw new IllegalArgumentException(
+                    "cannot find ext snapshot info for table:" + tableName + " snapshot:" + snapshotID);
         }
         CacheState cacheState = LookupProviderFactory.getCacheState(extTableSnapshotInfo);
         return cacheState.name();
@@ -378,7 +383,8 @@ public class TableService extends BasicService {
         return internalGetLookupTableSnapshots(tableName, signature);
     }
 
-    List<TableSnapshotResponse> internalGetLookupTableSnapshots(String tableName, TableSignature signature) throws IOException {
+    List<TableSnapshotResponse> internalGetLookupTableSnapshots(String tableName, TableSignature signature)
+            throws IOException {
         ExtTableSnapshotInfoManager extSnapshotInfoManager = ExtTableSnapshotInfoManager.getInstance(getConfig());
         SnapshotManager snapshotManager = SnapshotManager.getInstance(getConfig());
         List<ExtTableSnapshotInfo> extTableSnapshots = extSnapshotInfoManager.getSnapshots(tableName);
@@ -459,7 +465,8 @@ public class TableService extends BasicService {
         TableDesc table = getTableManager().getTableDesc(tableName, prj);
         final TableExtDesc tableExt = getTableManager().getTableExt(tableName, prj);
         if (table == null) {
-            BadRequestException e = new BadRequestException(String.format(msg.getTABLE_DESC_NOT_FOUND(), tableName));
+            BadRequestException e = new BadRequestException(
+                    String.format(Locale.ROOT, msg.getTABLE_DESC_NOT_FOUND(), tableName));
             logger.error("Cannot find table descriptor " + tableName, e);
             throw e;
         }
@@ -495,6 +502,6 @@ public class TableService extends BasicService {
 
     public String normalizeHiveTableName(String tableName) {
         String[] dbTableName = HadoopUtil.parseHiveTableName(tableName);
-        return (dbTableName[0] + "." + dbTableName[1]).toUpperCase();
+        return (dbTableName[0] + "." + dbTableName[1]).toUpperCase(Locale.ROOT);
     }
 }
diff --git a/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java b/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java
index 08010e4..a7aea99 100644
--- a/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java
+++ b/server-base/src/test/java/org/apache/kylin/rest/bean/BeanValidator.java
@@ -28,6 +28,7 @@ import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -42,7 +43,8 @@ public class BeanValidator {
     /**
      * Tests the get/set methods of the specified class.
      */
-    public static <T> void validateAccssor(final Class<T> clazz, final String... skipThese) throws IntrospectionException {
+    public static <T> void validateAccssor(final Class<T> clazz, final String... skipThese)
+            throws IntrospectionException {
         final PropertyDescriptor[] props = Introspector.getBeanInfo(clazz).getPropertyDescriptors();
         for (PropertyDescriptor prop : props) {
 
@@ -69,18 +71,23 @@ public class BeanValidator {
 
                         setter.invoke(bean, value);
 
-                        Assert.assertEquals(String.format("Failed while testing property %s", prop.getName()), value, getter.invoke(bean));
+                        Assert.assertEquals(
+                                String.format(Locale.ROOT, "Failed while testing property %s", prop.getName()), value,
+                                getter.invoke(bean));
 
                     } catch (Exception ex) {
                         ex.printStackTrace();
-                        System.err.println(String.format("An exception was thrown while testing the property %s: %s", prop.getName(), ex.toString()));
+                        System.err.println(
+                                String.format(Locale.ROOT, "An exception was thrown while testing the property %s: %s",
+                                        prop.getName(), ex.toString()));
                     }
                 }
             }
         }
     }
 
-    private static Object buildValue(Class<?> clazz) throws InstantiationException, IllegalAccessException, IllegalArgumentException, SecurityException, InvocationTargetException {
+    private static Object buildValue(Class<?> clazz) throws InstantiationException, IllegalAccessException,
+            IllegalArgumentException, SecurityException, InvocationTargetException {
 
         final Constructor<?>[] ctrs = clazz.getConstructors();
         for (Constructor<?> ctr : ctrs) {
@@ -117,30 +124,33 @@ public class BeanValidator {
         } else if (clazz.isEnum()) {
             return clazz.getEnumConstants()[0];
         } else if (clazz.isInterface()) {
-            return Proxy.newProxyInstance(clazz.getClassLoader(), new java.lang.Class[] { clazz }, new java.lang.reflect.InvocationHandler() {
-                @Override
-                public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
-                    if (Object.class.getMethod("equals", Object.class).equals(method)) {
-                        return proxy == args[0];
-                    }
-                    if (Object.class.getMethod("hashCode", Object.class).equals(method)) {
-                        return Integer.valueOf(System.identityHashCode(proxy));
-                    }
-                    if (Object.class.getMethod("toString", Object.class).equals(method)) {
-                        return "Bean " + getMockedType(proxy);
-                    }
-
-                    return null;
-                }
-
-            });
+            return Proxy.newProxyInstance(clazz.getClassLoader(), new java.lang.Class[] { clazz },
+                    new java.lang.reflect.InvocationHandler() {
+                        @Override
+                        public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
+                            if (Object.class.getMethod("equals", Object.class).equals(method)) {
+                                return proxy == args[0];
+                            }
+                            if (Object.class.getMethod("hashCode", Object.class).equals(method)) {
+                                return Integer.valueOf(System.identityHashCode(proxy));
+                            }
+                            if (Object.class.getMethod("toString", Object.class).equals(method)) {
+                                return "Bean " + getMockedType(proxy);
+                            }
+
+                            return null;
+                        }
+
+                    });
         } else {
-            System.err.println("Unable to build an instance of class " + clazz.getName() + ", please add some code to the " + BeanValidator.class.getName() + " class to do this.");
+            System.err.println("Unable to build an instance of class " + clazz.getName()
+                    + ", please add some code to the " + BeanValidator.class.getName() + " class to do this.");
             return null;
         }
     }
 
-    public static <T> void findBooleanIsMethods(Class<T> clazz, PropertyDescriptor descriptor) throws IntrospectionException {
+    public static <T> void findBooleanIsMethods(Class<T> clazz, PropertyDescriptor descriptor)
+            throws IntrospectionException {
         if (descriptor.getReadMethod() == null && descriptor.getPropertyType() == Boolean.class) {
             try {
                 PropertyDescriptor pd = new PropertyDescriptor(descriptor.getName(), clazz);
diff --git a/server-base/src/test/java/org/apache/kylin/rest/controller/StreamingControllerTest.java b/server-base/src/test/java/org/apache/kylin/rest/controller/StreamingControllerTest.java
index 171d22e..bbbecf6 100644
--- a/server-base/src/test/java/org/apache/kylin/rest/controller/StreamingControllerTest.java
+++ b/server-base/src/test/java/org/apache/kylin/rest/controller/StreamingControllerTest.java
@@ -18,35 +18,36 @@
 
 package org.apache.kylin.rest.controller;
 
+import java.io.IOException;
+import java.util.Locale;
+
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.junit.Assert;
 import org.junit.Test;
 
-import java.io.IOException;
-
 public class StreamingControllerTest {
 
     @Test
     public void testReadTableDesc() throws IOException {
-        String requestTableData = "{\"name\":\"my_table_name\",\"source_type\":1,\"columns\":[{\"id\":1,\"name\":" +
-                "\"amount\",\"datatype\":\"decimal\"},{\"id\":2,\"name\":\"category\",\"datatype\":\"varchar(256)\"}," +
-                "{\"id\":3,\"name\":\"order_time\",\"datatype\":\"timestamp\"},{\"id\":4,\"name\":\"device\"," +
-                "\"datatype\":\"varchar(256)\"},{\"id\":5,\"name\":\"qty\",\"datatype\":\"int\"},{\"id\":6,\"name\":" +
-                "\"user_id\",\"datatype\":\"varchar(256)\"},{\"id\":7,\"name\":\"user_age\",\"datatype\":\"int\"}," +
-                "{\"id\":8,\"name\":\"user_gender\",\"datatype\":\"varchar(256)\"},{\"id\":9,\"name\":\"currency\"," +
-                "\"datatype\":\"varchar(256)\"},{\"id\":10,\"name\":\"country\",\"datatype\":\"varchar(256)\"}," +
-                "{\"id\":11,\"name\":\"year_start\",\"datatype\":\"date\"},{\"id\":12,\"name\":\"quarter_start\"," +
-                "\"datatype\":\"date\"},{\"id\":13,\"name\":\"month_start\",\"datatype\":\"date\"},{\"id\":14," +
-                "\"name\":\"week_start\",\"datatype\":\"date\"},{\"id\":15,\"name\":\"day_start\",\"datatype\":" +
-                "\"date\"},{\"id\":16,\"name\":\"hour_start\",\"datatype\":\"timestamp\"},{\"id\":17,\"name\":" +
-                "\"minute_start\",\"datatype\":\"timestamp\"}],\"database\":\"my_database_name\"}";
+        String requestTableData = "{\"name\":\"my_table_name\",\"source_type\":1,\"columns\":[{\"id\":1,\"name\":"
+                + "\"amount\",\"datatype\":\"decimal\"},{\"id\":2,\"name\":\"category\",\"datatype\":\"varchar(256)\"},"
+                + "{\"id\":3,\"name\":\"order_time\",\"datatype\":\"timestamp\"},{\"id\":4,\"name\":\"device\","
+                + "\"datatype\":\"varchar(256)\"},{\"id\":5,\"name\":\"qty\",\"datatype\":\"int\"},{\"id\":6,\"name\":"
+                + "\"user_id\",\"datatype\":\"varchar(256)\"},{\"id\":7,\"name\":\"user_age\",\"datatype\":\"int\"},"
+                + "{\"id\":8,\"name\":\"user_gender\",\"datatype\":\"varchar(256)\"},{\"id\":9,\"name\":\"currency\","
+                + "\"datatype\":\"varchar(256)\"},{\"id\":10,\"name\":\"country\",\"datatype\":\"varchar(256)\"},"
+                + "{\"id\":11,\"name\":\"year_start\",\"datatype\":\"date\"},{\"id\":12,\"name\":\"quarter_start\","
+                + "\"datatype\":\"date\"},{\"id\":13,\"name\":\"month_start\",\"datatype\":\"date\"},{\"id\":14,"
+                + "\"name\":\"week_start\",\"datatype\":\"date\"},{\"id\":15,\"name\":\"day_start\",\"datatype\":"
+                + "\"date\"},{\"id\":16,\"name\":\"hour_start\",\"datatype\":\"timestamp\"},{\"id\":17,\"name\":"
+                + "\"minute_start\",\"datatype\":\"timestamp\"}],\"database\":\"my_database_name\"}";
         TableDesc desc = JsonUtil.readValue(requestTableData, TableDesc.class);
         String[] dbTable = HadoopUtil.parseHiveTableName(desc.getIdentity());
         desc.setName(dbTable[1]);
         desc.setDatabase(dbTable[0]);
-        Assert.assertEquals("my_table_name".toUpperCase(), desc.getName());
-        Assert.assertEquals("my_database_name".toUpperCase(), desc.getDatabase());
+        Assert.assertEquals("my_table_name".toUpperCase(Locale.ROOT), desc.getName());
+        Assert.assertEquals("my_database_name".toUpperCase(Locale.ROOT), desc.getDatabase());
     }
 }
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/DBConnConf.java b/source-hive/src/main/java/org/apache/kylin/source/hive/DBConnConf.java
index fd9bfa9..3460d5c 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/DBConnConf.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/DBConnConf.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.source.hive;
 
+import java.util.Locale;
+
 import org.apache.commons.configuration.PropertiesConfiguration;
 
 public class DBConnConf {
@@ -49,7 +51,7 @@ public class DBConnConf {
     }
 
     public String toString() {
-        return String.format("%s,%s,%s,%s", driver, url, user, pass);
+        return String.format(Locale.ROOT, "%s,%s,%s,%s", driver, url, user, pass);
     }
 
     public String getDriver() {
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveInputBase.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveInputBase.java
index 94c1a02..2c998df 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveInputBase.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveInputBase.java
@@ -20,6 +20,7 @@ package org.apache.kylin.source.hive;
 
 import java.io.IOException;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 
 import org.apache.hadoop.fs.FileSystem;
@@ -54,7 +55,7 @@ public class HiveInputBase {
         String tableName = (table.isView()) ? table.getMaterializedName(uuid) : table.getName();
         String database = (table.isView()) ? KylinConfig.getInstanceFromEnv().getHiveDatabaseForIntermediateTable()
                 : table.getDatabase();
-        return String.format("%s.%s", database, tableName).toUpperCase();
+        return String.format(Locale.ROOT, "%s.%s", database, tableName).toUpperCase(Locale.ROOT);
     }
 
     protected void addStepPhase1_DoCreateFlatTable(DefaultChainedExecutable jobFlow, String hdfsWorkingDir,
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMetadataExplorer.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMetadataExplorer.java
index 42df327..061d5d4 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMetadataExplorer.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMetadataExplorer.java
@@ -18,9 +18,11 @@
 
 package org.apache.kylin.source.hive;
 
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 import java.util.UUID;
 
 import org.apache.commons.lang3.StringUtils;
@@ -69,8 +71,8 @@ public class HiveMetadataExplorer implements ISourceMetadataExplorer, ISampleDat
         // make a new TableDesc instance, don't modify the one in use
         if (tableDesc == null) {
             tableDesc = new TableDesc();
-            tableDesc.setDatabase(database.toUpperCase());
-            tableDesc.setName(tableName.toUpperCase());
+            tableDesc.setDatabase(database.toUpperCase(Locale.ROOT));
+            tableDesc.setName(tableName.toUpperCase(Locale.ROOT));
             tableDesc.setUuid(RandomUtil.randomUUID().toString());
             tableDesc.setLastModified(0);
         } else {
@@ -87,7 +89,7 @@ public class HiveMetadataExplorer implements ISourceMetadataExplorer, ISampleDat
         for (int i = 0, n = hiveTableMeta.partitionColumns.size(); i < n; i++) {
             if (i > 0)
                 partitionColumnString.append(", ");
-            partitionColumnString.append(hiveTableMeta.partitionColumns.get(i).name.toUpperCase());
+            partitionColumnString.append(hiveTableMeta.partitionColumns.get(i).name.toUpperCase(Locale.ROOT));
         }
 
         TableExtDesc tableExtDesc = new TableExtDesc();
@@ -120,7 +122,7 @@ public class HiveMetadataExplorer implements ISourceMetadataExplorer, ISampleDat
     }
 
     private String generateCreateSchemaSql(String schemaName) {
-        return String.format("CREATE DATABASE IF NOT EXISTS %s", schemaName);
+        return String.format(Locale.ROOT, "CREATE DATABASE IF NOT EXISTS %s", schemaName);
     }
 
     @Override
@@ -177,10 +179,10 @@ public class HiveMetadataExplorer implements ISourceMetadataExplorer, ISampleDat
     }
 
     private static String getHiveDataType(String javaDataType) {
-        String hiveDataType = javaDataType.toLowerCase().startsWith("varchar") ? "string" : javaDataType;
-        hiveDataType = javaDataType.toLowerCase().startsWith("integer") ? "int" : hiveDataType;
+        String hiveDataType = javaDataType.toLowerCase(Locale.ROOT).startsWith("varchar") ? "string" : javaDataType;
+        hiveDataType = javaDataType.toLowerCase(Locale.ROOT).startsWith("integer") ? "int" : hiveDataType;
 
-        return hiveDataType.toLowerCase();
+        return hiveDataType.toLowerCase(Locale.ROOT);
     }
 
     @Override
@@ -188,14 +190,15 @@ public class HiveMetadataExplorer implements ISourceMetadataExplorer, ISampleDat
         if (StringUtils.isEmpty(query)) {
             throw new RuntimeException("Evaluate query shall not be empty.");
         }
-        
+
         KylinConfig config = KylinConfig.getInstanceFromEnv();
         String tmpDatabase = config.getHiveDatabaseForIntermediateTable();
-        String tmpView = "kylin_eval_query_" + UUID.nameUUIDFromBytes(query.getBytes()).toString().replace("-", "");
-        
+        String tmpView = "kylin_eval_query_"
+                + UUID.nameUUIDFromBytes(query.getBytes(StandardCharsets.UTF_8)).toString().replace("-", "");
+
         String dropViewSql = "DROP VIEW IF EXISTS " + tmpDatabase + "." + tmpView;
         String evalViewSql = "CREATE VIEW " + tmpDatabase + "." + tmpView + " as " + query;
-        
+
         try {
             logger.debug("Removing duplicate view {}", tmpView);
             hiveClient.executeHQL(dropViewSql);
@@ -223,7 +226,7 @@ public class HiveMetadataExplorer implements ISourceMetadataExplorer, ISampleDat
         for (int i = 0; i < columnNumber; i++) {
             HiveTableMeta.HiveTableColumnMeta field = hiveTableMeta.allColumns.get(i);
             ColumnDesc cdesc = new ColumnDesc();
-            cdesc.setName(field.name.toUpperCase());
+            cdesc.setName(field.name.toUpperCase(Locale.ROOT));
 
             // use "double" in kylin for "float"
             if ("float".equalsIgnoreCase(field.dataType)) {
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java
index 5ffa2f6..eb32756 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityUpdateJob.java
@@ -24,6 +24,7 @@ import java.io.StringWriter;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
@@ -55,7 +56,8 @@ public class HiveColumnCardinalityUpdateJob extends AbstractHadoopJob {
     public static final String JOB_TITLE = "Kylin Hive Column Cardinality Update Job";
 
     @SuppressWarnings("static-access")
-    protected static final Option OPTION_TABLE = OptionBuilder.withArgName("table name").hasArg().isRequired(true).withDescription("The hive table name").create("table");
+    protected static final Option OPTION_TABLE = OptionBuilder.withArgName("table name").hasArg().isRequired(true)
+            .withDescription("The hive table name").create("table");
 
     public HiveColumnCardinalityUpdateJob() {
 
@@ -74,15 +76,15 @@ public class HiveColumnCardinalityUpdateJob extends AbstractHadoopJob {
             parseOptions(options, args);
 
             String project = getOptionValue(OPTION_PROJECT);
-            String table = getOptionValue(OPTION_TABLE).toUpperCase();
-            
+            String table = getOptionValue(OPTION_TABLE).toUpperCase(Locale.ROOT);
+
             // start job
             String jobName = JOB_TITLE + getOptionsAsString();
             logger.info("Starting: " + jobName);
             Configuration conf = getConf();
             Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
 
-            updateKylinTableExd(table.toUpperCase(), output.toString(), conf, project);
+            updateKylinTableExd(table.toUpperCase(Locale.ROOT), output.toString(), conf, project);
             return 0;
         } catch (Exception e) {
             printUsage(options);
@@ -91,7 +93,8 @@ public class HiveColumnCardinalityUpdateJob extends AbstractHadoopJob {
 
     }
 
-    public void updateKylinTableExd(String tableName, String outPath, Configuration config, String prj) throws IOException {
+    public void updateKylinTableExd(String tableName, String outPath, Configuration config, String prj)
+            throws IOException {
         List<String> columns = null;
         try {
             columns = readLines(new Path(outPath), config);
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/CmdStep.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/CmdStep.java
index 09895f9..287019b 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/CmdStep.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/CmdStep.java
@@ -18,6 +18,7 @@
 package org.apache.kylin.source.jdbc;
 
 import java.io.IOException;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
@@ -45,7 +46,7 @@ public class CmdStep extends AbstractExecutable {
 
     protected void sqoopFlatHiveTable(KylinConfig config) throws IOException {
         String cmd = getParam("cmd");
-        stepLogger.log(String.format("exe cmd:%s", cmd));
+        stepLogger.log(String.format(Locale.ROOT, "exe cmd:%s", cmd));
         Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger);
         getManager().addJobInfo(getId(), stepLogger.getInfo());
         if (response.getFirst() != 0) {
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java
index bba2008..1c31e91 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcExplorer.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.source.jdbc;
 
+import java.nio.charset.StandardCharsets;
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
@@ -25,6 +26,7 @@ import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 import java.util.UUID;
 
 import org.apache.commons.lang3.StringUtils;
@@ -78,8 +80,8 @@ public class JdbcExplorer implements ISourceMetadataExplorer, ISampleDataDeploye
     public Pair<TableDesc, TableExtDesc> loadTableMetadata(String database, String table, String prj)
             throws SQLException {
         TableDesc tableDesc = new TableDesc();
-        tableDesc.setDatabase(database.toUpperCase());
-        tableDesc.setName(table.toUpperCase());
+        tableDesc.setDatabase(database.toUpperCase(Locale.ROOT));
+        tableDesc.setName(table.toUpperCase(Locale.ROOT));
         tableDesc.setUuid(RandomUtil.randomUUID().toString());
         tableDesc.setLastModified(0);
         tableDesc.setSourceType(ISourceAware.ID_JDBC);
@@ -95,7 +97,8 @@ public class JdbcExplorer implements ISourceMetadataExplorer, ISampleDataDeploye
             if (tableType != null) {
                 tableDesc.setTableType(tableType);
             } else {
-                throw new RuntimeException(String.format("table %s not found in schema:%s", table, database));
+                throw new RuntimeException(
+                        String.format(Locale.ROOT, "table %s not found in schema:%s", table, database));
             }
         }
 
@@ -116,12 +119,12 @@ public class JdbcExplorer implements ISourceMetadataExplorer, ISampleDataDeploye
 
     private String getSqlDataType(String javaDataType) {
         if (JdbcDialect.DIALECT_VERTICA.equals(dialect) || JdbcDialect.DIALECT_MSSQL.equals(dialect)) {
-            if (javaDataType.toLowerCase().equals("double")) {
+            if (javaDataType.toLowerCase(Locale.ROOT).equals("double")) {
                 return "float";
             }
         }
 
-        return javaDataType.toLowerCase();
+        return javaDataType.toLowerCase(Locale.ROOT);
     }
 
     @Override
@@ -131,10 +134,12 @@ public class JdbcExplorer implements ISourceMetadataExplorer, ISampleDataDeploye
 
     private String generateCreateSchemaSql(String schemaName) {
         if (JdbcDialect.DIALECT_VERTICA.equals(dialect) || JdbcDialect.DIALECT_MYSQL.equals(dialect)) {
-            return String.format("CREATE schema IF NOT EXISTS %s", schemaName);
+            return String.format(Locale.ROOT, "CREATE schema IF NOT EXISTS %s", schemaName);
         } else if (JdbcDialect.DIALECT_MSSQL.equals(dialect)) {
-            return String.format("IF NOT EXISTS (SELECT name FROM sys.schemas WHERE name = N'%s') EXEC('CREATE SCHEMA"
-                    + " [%s] AUTHORIZATION [dbo]')", schemaName, schemaName);
+            return String.format(Locale.ROOT,
+                    "IF NOT EXISTS (SELECT name FROM sys.schemas WHERE name = N'%s') EXEC('CREATE SCHEMA"
+                            + " [%s] AUTHORIZATION [dbo]')",
+                    schemaName, schemaName);
         } else {
             logger.error("unsupported dialect {}.", dialect);
             return null;
@@ -148,14 +153,14 @@ public class JdbcExplorer implements ISourceMetadataExplorer, ISampleDataDeploye
 
     private String generateLoadDataSql(String tableName, String tableFileDir) {
         if (JdbcDialect.DIALECT_VERTICA.equals(dialect)) {
-            return String.format("copy %s from local '%s/%s.csv' delimiter as ',';", tableName, tableFileDir,
-                    tableName);
+            return String.format(Locale.ROOT, "copy %s from local '%s/%s.csv' delimiter as ',';", tableName,
+                    tableFileDir, tableName);
         } else if (JdbcDialect.DIALECT_MYSQL.equals(dialect)) {
-            return String.format("LOAD DATA INFILE '%s/%s.csv' INTO %s FIELDS TERMINATED BY ',';", tableFileDir,
-                    tableName, tableName);
+            return String.format(Locale.ROOT, "LOAD DATA INFILE '%s/%s.csv' INTO %s FIELDS TERMINATED BY ',';",
+                    tableFileDir, tableName, tableName);
         } else if (JdbcDialect.DIALECT_MSSQL.equals(dialect)) {
-            return String.format("BULK INSERT %s FROM '%s/%s.csv' WITH(FIELDTERMINATOR = ',')", tableName, tableFileDir,
-                    tableName);
+            return String.format(Locale.ROOT, "BULK INSERT %s FROM '%s/%s.csv' WITH(FIELDTERMINATOR = ',')", tableName,
+                    tableFileDir, tableName);
         } else {
             logger.error("unsupported dialect {}.", dialect);
             return null;
@@ -169,8 +174,9 @@ public class JdbcExplorer implements ISourceMetadataExplorer, ISampleDataDeploye
 
     private String[] generateCreateTableSql(TableDesc tableDesc) {
         logger.info("Generate create table sql: {}", tableDesc);
-        String tableIdentity = String.format("%s.%s", tableDesc.getDatabase().toUpperCase(), tableDesc.getName())
-                .toUpperCase();
+        String tableIdentity = String
+                .format(Locale.ROOT, "%s.%s", tableDesc.getDatabase().toUpperCase(Locale.ROOT), tableDesc.getName())
+                .toUpperCase(Locale.ROOT);
         String dropsql = "DROP TABLE IF EXISTS " + tableIdentity;
         String dropsql2 = "DROP VIEW IF EXISTS " + tableIdentity;
 
@@ -239,7 +245,7 @@ public class JdbcExplorer implements ISourceMetadataExplorer, ISampleDataDeploye
         KylinConfig config = KylinConfig.getInstanceFromEnv();
         String tmpDatabase = config.getHiveDatabaseForIntermediateTable();
         String tmpView = tmpDatabase + ".kylin_eval_query_"
-                + UUID.nameUUIDFromBytes(query.getBytes()).toString().replaceAll("-", "");
+                + UUID.nameUUIDFromBytes(query.getBytes(StandardCharsets.UTF_8)).toString().replaceAll("-", "");
 
         String dropViewSql = "DROP VIEW IF EXISTS " + tmpView;
         String evalViewSql = "CREATE VIEW " + tmpView + " as " + query;
@@ -283,7 +289,7 @@ public class JdbcExplorer implements ISourceMetadataExplorer, ISampleDataDeploye
             String remarks = meta.getString("REMARKS");
 
             ColumnDesc cdesc = new ColumnDesc();
-            cdesc.setName(cname.toUpperCase());
+            cdesc.setName(cname.toUpperCase(Locale.ROOT));
 
             String kylinType = SqlUtil.jdbcTypeToKylinDataType(type);
             int precision = (SqlUtil.isPrecisionApplicable(kylinType) && csize > 0) ? csize : -1;
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java
index 284aac5..b5069a1 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.source.jdbc;
 
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.kylin.common.KylinConfig;
@@ -170,8 +171,8 @@ public class JdbcHiveMRInput extends HiveMRInput {
             String filedDelimiter = config.getJdbcSourceFieldDelimiter();
             int mapperNum = config.getSqoopMapperNum();
 
-            String bquery = String.format("SELECT min(%s), max(%s) FROM \"%s\".%s as %s", splitColumn, splitColumn,
-                    splitDatabase, splitTable, splitTableAlias);
+            String bquery = String.format(Locale.ROOT, "SELECT min(%s), max(%s) FROM \"%s\".%s as %s", splitColumn,
+                    splitColumn, splitDatabase, splitTable, splitTableAlias);
             if (partitionDesc.isPartitioned()) {
                 SegmentRange segRange = flatDesc.getSegRange();
                 if (segRange != null && !segRange.isInfinite()) {
@@ -184,12 +185,14 @@ public class JdbcHiveMRInput extends HiveMRInput {
                 }
             }
 
-            String cmd = String.format("%s/bin/sqoop import" + generateSqoopConfigArgString()
-                    + "--connect \"%s\" --driver %s --username %s --password %s --query \"%s AND \\$CONDITIONS\" "
-                    + "--target-dir %s/%s --split-by %s --boundary-query \"%s\" --null-string '' "
-                    + "--fields-terminated-by '%s' --num-mappers %d", sqoopHome, connectionUrl, driverClass, jdbcUser,
-                    jdbcPass, selectSql, jobWorkingDir, hiveTable, splitColumn, bquery, filedDelimiter, mapperNum);
-            logger.debug(String.format("sqoop cmd:%s", cmd));
+            String cmd = String.format(Locale.ROOT,
+                    "%s/bin/sqoop import" + generateSqoopConfigArgString()
+                            + "--connect \"%s\" --driver %s --username %s --password %s --query \"%s AND \\$CONDITIONS\" "
+                            + "--target-dir %s/%s --split-by %s --boundary-query \"%s\" --null-string '' "
+                            + "--fields-terminated-by '%s' --num-mappers %d",
+                    sqoopHome, connectionUrl, driverClass, jdbcUser, jdbcPass, selectSql, jobWorkingDir, hiveTable,
+                    splitColumn, bquery, filedDelimiter, mapperNum);
+            logger.debug(String.format(Locale.ROOT, "sqoop cmd:%s", cmd));
             CmdStep step = new CmdStep();
             step.setCmd(cmd);
             step.setName(ExecutableConstants.STEP_NAME_SQOOP_TO_FLAT_HIVE_TABLE);
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTable.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTable.java
index 4313862..e880a3d 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTable.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTable.java
@@ -19,7 +19,7 @@
 package org.apache.kylin.source.jdbc;
 
 import java.io.IOException;
-
+import java.util.Locale;
 
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.source.IReadableTable;
@@ -35,7 +35,6 @@ public class JdbcTable implements IReadableTable {
     final private String database;
     final private String tableName;
 
-
     public JdbcTable(TableDesc tableDesc) {
         this.database = tableDesc.getDatabase();
         this.tableName = tableDesc.getName();
@@ -48,12 +47,12 @@ public class JdbcTable implements IReadableTable {
 
     @Override
     public TableSignature getSignature() throws IOException {
-        String path = String.format("%s.%s", database, tableName);
+        String path = String.format(Locale.ROOT, "%s.%s", database, tableName);
         long lastModified = System.currentTimeMillis(); // assume table is ever changing
-        int size=0;
+        int size = 0;
         return new TableSignature(path, size, lastModified);
     }
-    
+
     @Override
     public boolean exists() {
         return true;
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTableReader.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTableReader.java
index e2616b7..2e99748 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTableReader.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcTableReader.java
@@ -23,6 +23,7 @@ import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.source.IReadableTable.TableReader;
@@ -61,13 +62,13 @@ public class JdbcTableReader implements TableReader {
         String jdbcPass = config.getJdbcSourcePass();
         dbconf = new DBConnConf(driverClass, connectionUrl, jdbcUser, jdbcPass);
         jdbcCon = SqlUtil.getConnection(dbconf);
-        String sql = String.format("select * from %s.%s", dbName, tableName);
+        String sql = String.format(Locale.ROOT, "select * from %s.%s", dbName, tableName);
         try {
             statement = jdbcCon.createStatement();
             rs = statement.executeQuery(sql);
             colCount = rs.getMetaData().getColumnCount();
         } catch (SQLException e) {
-            throw new IOException(String.format("error while exec %s", sql), e);
+            throw new IOException(String.format(Locale.ROOT, "error while exec %s", sql), e);
         }
     }
 
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/DefaultJdbcMetadata.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/DefaultJdbcMetadata.java
index f4ffc23..0842199 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/DefaultJdbcMetadata.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/DefaultJdbcMetadata.java
@@ -24,6 +24,7 @@ import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.kylin.source.hive.DBConnConf;
 import org.apache.kylin.source.jdbc.SqlUtil;
 import org.slf4j.Logger;
@@ -44,7 +45,7 @@ public class DefaultJdbcMetadata implements IJdbcMetadata {
             while (rs.next()) {
                 String schema = rs.getString("TABLE_SCHEM");
                 String catalog = rs.getString("TABLE_CATALOG");
-                logger.info(String.format("%s,%s", schema, catalog));
+                logger.info(String.format(Locale.ROOT, "%s,%s", schema, catalog));
                 ret.add(schema);
             }
         }
diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/JdbcMetadataFactory.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/JdbcMetadataFactory.java
index 4100f79..ae4c0ff 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/JdbcMetadataFactory.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/metadata/JdbcMetadataFactory.java
@@ -17,12 +17,13 @@
 */
 package org.apache.kylin.source.jdbc.metadata;
 
+import java.util.Locale;
 import org.apache.kylin.source.hive.DBConnConf;
 import org.apache.kylin.source.jdbc.JdbcDialect;
 
 public abstract class JdbcMetadataFactory {
     public static IJdbcMetadata getJdbcMetadata(String dialect, final DBConnConf dbConnConf) {
-        String jdbcDialect = (null == dialect) ? "" : dialect.toLowerCase();
+        String jdbcDialect = (null == dialect) ? "" : dialect.toLowerCase(Locale.ROOT);
         switch (jdbcDialect) {
         case (JdbcDialect.DIALECT_MSSQL):
             return new SQLServerJdbcMetadata(dbConnConf);
diff --git a/source-hive/src/test/java/org/apache/kylin/source/jdbc/JdbcExplorerTest.java b/source-hive/src/test/java/org/apache/kylin/source/jdbc/JdbcExplorerTest.java
index b269329..a0df4f4 100644
--- a/source-hive/src/test/java/org/apache/kylin/source/jdbc/JdbcExplorerTest.java
+++ b/source-hive/src/test/java/org/apache/kylin/source/jdbc/JdbcExplorerTest.java
@@ -32,6 +32,7 @@ import java.sql.SQLException;
 import java.sql.Types;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -138,13 +139,13 @@ public class JdbcExplorerTest extends LocalFileMetadataTestCase {
         TableDesc tableDesc = result.getFirst();
         ColumnDesc columnDesc = tableDesc.getColumns()[1];
 
-        Assert.assertEquals(databaseName.toUpperCase(), tableDesc.getDatabase());
+        Assert.assertEquals(databaseName.toUpperCase(Locale.ROOT), tableDesc.getDatabase());
         Assert.assertEquals(3, tableDesc.getColumnCount());
         Assert.assertEquals("TABLE", tableDesc.getTableType());
         Assert.assertEquals("COL2", columnDesc.getName());
         Assert.assertEquals("integer", columnDesc.getTypeName());
         Assert.assertEquals("comment2", columnDesc.getComment());
-        Assert.assertEquals(databaseName.toUpperCase() + "." + tableName.toUpperCase(),
+        Assert.assertEquals(databaseName.toUpperCase(Locale.ROOT) + "." + tableName.toUpperCase(Locale.ROOT),
                 result.getSecond().getIdentity());
     }
 
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
index 73b224e..1c94f9c 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
@@ -143,10 +144,11 @@ public class KafkaMRInput extends KafkaInputBase implements IMRInput {
                 jobFlow.addTask(createSaveKafkaDataStep(jobFlow.getId(), tableLocation, seg));
                 intermediatePaths.add(tableLocation);
             } else {
-                final String mockFactTableName = MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeName.toLowerCase()
-                        + "_" + seg.getUuid().replaceAll("-", "_") + "_fact";
+                final String mockFactTableName = MetadataConstants.KYLIN_INTERMEDIATE_PREFIX
+                        + cubeName.toLowerCase(Locale.ROOT) + "_" + seg.getUuid().replaceAll("-", "_") + "_fact";
                 jobFlow.addTask(createSaveKafkaDataStep(jobFlow.getId(), baseLocation + "/" + mockFactTableName, seg));
-                jobFlow.addTask(createFlatTable(hiveTableDatabase, mockFactTableName, baseLocation, cubeName, cubeDesc, flatDesc, intermediateTables, intermediatePaths));
+                jobFlow.addTask(createFlatTable(hiveTableDatabase, mockFactTableName, baseLocation, cubeName, cubeDesc,
+                        flatDesc, intermediateTables, intermediatePaths));
             }
         }
 
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSparkInput.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSparkInput.java
index 7600329..7db6c32 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSparkInput.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSparkInput.java
@@ -18,6 +18,7 @@
 package org.apache.kylin.source.kafka;
 
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeSegment;
@@ -84,8 +85,8 @@ public class KafkaSparkInput extends KafkaInputBase implements ISparkInput {
                 jobFlow.addTask(createSaveKafkaDataStep(jobFlow.getId(), tableLocation, seg));
                 intermediatePaths.add(tableLocation);
             } else {
-                final String mockFactTableName = MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + cubeName.toLowerCase()
-                        + "_" + seg.getUuid().replaceAll("-", "_") + "_fact";
+                final String mockFactTableName = MetadataConstants.KYLIN_INTERMEDIATE_PREFIX
+                        + cubeName.toLowerCase(Locale.ROOT) + "_" + seg.getUuid().replaceAll("-", "_") + "_fact";
                 jobFlow.addTask(createSaveKafkaDataStep(jobFlow.getId(), baseLocation + "/" + mockFactTableName, seg));
                 jobFlow.addTask(createFlatTable(hiveTableDatabase, mockFactTableName, baseLocation, cubeName, cubeDesc,
                         flatDesc, intermediateTables, intermediatePaths));
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
index 3618ba6..5aa60c9 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
@@ -23,6 +23,7 @@ import java.lang.reflect.Constructor;
 import java.nio.ByteBuffer;
 import java.util.List;
 import java.util.ArrayList;
+import java.util.Locale;
 import java.util.Map;
 import java.util.HashMap;
 import java.util.TreeMap;
@@ -116,7 +117,7 @@ public final class TimedJsonStreamParser extends StreamingParser {
             ArrayList<String> result = Lists.newArrayList();
 
             for (TblColRef column : allColumns) {
-                final String columnName = column.getName().toLowerCase();
+                final String columnName = column.getName().toLowerCase(Locale.ROOT);
                 if (populateDerivedTimeColumns(columnName, result, t) == false) {
                     result.add(getValueByKey(column, root));
                 }
@@ -138,15 +139,15 @@ public final class TimedJsonStreamParser extends StreamingParser {
     }
 
     public String[] getEmbeddedPropertyNames(TblColRef column) {
-        final String colName = column.getName().toLowerCase();
+        final String colName = column.getName().toLowerCase(Locale.ROOT);
         String[] names = nameMap.get(colName);
         if (names == null) {
             String comment = column.getColumnDesc().getComment(); // use comment to parse the structure
             if (!StringUtils.isEmpty(comment) && comment.contains(EMBEDDED_PROPERTY_SEPARATOR)) {
-                names = comment.toLowerCase().split("\\" + EMBEDDED_PROPERTY_SEPARATOR);
+                names = comment.toLowerCase(Locale.ROOT).split("\\" + EMBEDDED_PROPERTY_SEPARATOR);
                 nameMap.put(colName, names);
             } else if (colName.contains(separator)) { // deprecated, just be compitable for old version
-                names = colName.toLowerCase().split(separator);
+                names = colName.toLowerCase(Locale.ROOT).split(separator);
                 nameMap.put(colName, names);
             }
         }
@@ -155,7 +156,7 @@ public final class TimedJsonStreamParser extends StreamingParser {
     }
 
     protected String getValueByKey(TblColRef column, Map<String, Object> rootMap) throws IOException {
-        final String key = column.getName().toLowerCase();
+        final String key = column.getName().toLowerCase(Locale.ROOT);
         if (rootMap.containsKey(key)) {
             return objToString(rootMap.get(key));
         }
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
index c22c72f..0902866 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
@@ -21,6 +21,7 @@ package org.apache.kylin.source.kafka.hadoop;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Iterator;
+import java.util.Locale;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
@@ -89,13 +90,15 @@ public class KafkaInputRecordReader extends RecordReader<LongWritable, BytesWrit
 
         Properties kafkaProperties = KafkaConsumerProperties.extractKafkaConfigToProperties(conf);
 
-        consumer = org.apache.kylin.source.kafka.util.KafkaClient.getKafkaConsumer(brokers, consumerGroup, kafkaProperties);
+        consumer = org.apache.kylin.source.kafka.util.KafkaClient.getKafkaConsumer(brokers, consumerGroup,
+                kafkaProperties);
 
         earliestOffset = this.split.getOffsetStart();
         latestOffset = this.split.getOffsetEnd();
         TopicPartition topicPartition = new TopicPartition(topic, partition);
         consumer.assign(Arrays.asList(topicPartition));
-        log.info("Split {} Topic: {} Broker: {} Partition: {} Start: {} End: {}", new Object[] { this.split, topic, this.split.getBrokers(), partition, earliestOffset, latestOffset });
+        log.info("Split {} Topic: {} Broker: {} Partition: {} Start: {} End: {}",
+                new Object[] { this.split, topic, this.split.getBrokers(), partition, earliestOffset, latestOffset });
     }
 
     @Override
@@ -120,7 +123,9 @@ public class KafkaInputRecordReader extends RecordReader<LongWritable, BytesWrit
             iterator = messages.iterator();
             if (!iterator.hasNext()) {
                 log.info("No more messages, stop");
-                throw new IOException(String.format("Unexpected ending of stream, expected ending offset %d, but end at %d", latestOffset, watermark));
+                throw new IOException(String.format(Locale.ROOT,
+                        "Unexpected ending of stream, expected ending offset " + "%d, but end at %d", latestOffset,
+                        watermark));
             }
         }
 
@@ -139,7 +144,8 @@ public class KafkaInputRecordReader extends RecordReader<LongWritable, BytesWrit
         }
 
         log.error("Unexpected iterator end.");
-        throw new IOException(String.format("Unexpected ending of stream, expected ending offset %d, but end at %d", latestOffset, watermark));
+        throw new IOException(String.format(Locale.ROOT,
+                "Unexpected ending of stream, expected ending offset %d, but end at %d", latestOffset, watermark));
     }
 
     @Override
@@ -162,7 +168,8 @@ public class KafkaInputRecordReader extends RecordReader<LongWritable, BytesWrit
 
     @Override
     public void close() throws IOException {
-        log.info("{} num. processed messages {} ", topic + ":" + split.getBrokers() + ":" + partition, numProcessedMessages);
+        log.info("{} num. processed messages {} ", topic + ":" + split.getBrokers() + ":" + partition,
+                numProcessedMessages);
         consumer.close();
     }
 
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index 16b8db2..911c8d5 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -23,6 +23,7 @@ import java.lang.reflect.Field;
 import java.nio.BufferOverflowException;
 import java.nio.ByteBuffer;
 import java.util.List;
+import java.util.Locale;
 import java.util.concurrent.ExecutorService;
 import java.util.zip.DataFormatException;
 
@@ -90,14 +91,18 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
 
         if (shardNum == totalShards) {
             //all shards
-            return Lists.newArrayList(Pair.newPair(getByteArrayForShort((short) 0), getByteArrayForShort((short) (shardNum - 1))));
+            return Lists.newArrayList(
+                    Pair.newPair(getByteArrayForShort((short) 0), getByteArrayForShort((short) (shardNum - 1))));
         } else if (baseShard + shardNum <= totalShards) {
             //endpoint end key is inclusive, so no need to append 0 or anything
-            return Lists.newArrayList(Pair.newPair(getByteArrayForShort(baseShard), getByteArrayForShort((short) (baseShard + shardNum - 1))));
+            return Lists.newArrayList(Pair.newPair(getByteArrayForShort(baseShard),
+                    getByteArrayForShort((short) (baseShard + shardNum - 1))));
         } else {
             //0,1,2,3,4 wants 4,0
-            return Lists.newArrayList(Pair.newPair(getByteArrayForShort(baseShard), getByteArrayForShort((short) (totalShards - 1))), //
-                    Pair.newPair(getByteArrayForShort((short) 0), getByteArrayForShort((short) (baseShard + shardNum - totalShards - 1))));
+            return Lists.newArrayList(
+                    Pair.newPair(getByteArrayForShort(baseShard), getByteArrayForShort((short) (totalShards - 1))), //
+                    Pair.newPair(getByteArrayForShort((short) 0),
+                            getByteArrayForShort((short) (baseShard + shardNum - totalShards - 1))));
         }
     }
 
@@ -149,14 +154,18 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
 
         final ExpectedSizeIterator epResultItr = new ExpectedSizeIterator(queryContext, shardNum, coprocessorTimeout);
 
-        logger.info("Serialized scanRequestBytes {} bytes, rawScanBytesString {} bytes", scanRequestByteString.size(), rawScanByteString.size());
+        logger.info("Serialized scanRequestBytes {} bytes, rawScanBytesString {} bytes", scanRequestByteString.size(),
+                rawScanByteString.size());
 
-        logger.info("The scan {} for segment {} is as below with {} separate raw scans, shard part of start/end key is set to 0", Integer.toHexString(System.identityHashCode(scanRequest)), cubeSeg, rawScans.size());
+        logger.info(
+                "The scan {} for segment {} is as below with {} separate raw scans, shard part of start/end key is set to 0",
+                Integer.toHexString(System.identityHashCode(scanRequest)), cubeSeg, rawScans.size());
         for (RawScan rs : rawScans) {
             logScan(rs, cubeSeg.getStorageLocationIdentifier());
         }
 
-        logger.debug("Submitting rpc to {} shards starting from shard {}, scan range count {}", shardNum, cuboidBaseShard, rawScans.size());
+        logger.debug("Submitting rpc to {} shards starting from shard {}, scan range count {}", shardNum,
+                cuboidBaseShard, rawScans.size());
 
         // KylinConfig: use env instance instead of CubeSegment, because KylinConfig will share among queries
         // for different cubes until redeployment of coprocessor jar.
@@ -177,8 +186,8 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
         builder.setMaxScanBytes(cubeSeg.getConfig().getPartitionMaxScanBytes());
         builder.setIsExactAggregate(storageContext.isExactAggregation());
 
-        final String logHeader = String.format("<sub-thread for Query %s GTScanRequest %s>", queryContext.getQueryId(),
-                Integer.toHexString(System.identityHashCode(scanRequest)));
+        final String logHeader = String.format(Locale.ROOT, "<sub-thread for Query %s GTScanRequest %s>",
+                queryContext.getQueryId(), Integer.toHexString(System.identityHashCode(scanRequest)));
         for (final Pair<byte[], byte[]> epRange : getEPKeyRanges(cuboidBaseShard, shardNum, totalShards)) {
             executorService.submit(new Runnable() {
                 @Override
@@ -370,7 +379,8 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
                     RawScan.serializer.serialize(rs, rawScanBuffer);
                 }
                 rawScanBuffer.flip();
-                rawScanByteString = HBaseZeroCopyByteString.wrap(rawScanBuffer.array(), rawScanBuffer.position(), rawScanBuffer.limit());
+                rawScanByteString = HBaseZeroCopyByteString.wrap(rawScanBuffer.array(), rawScanBuffer.position(),
+                        rawScanBuffer.limit());
                 break;
             } catch (BufferOverflowException boe) {
                 logger.info("Buffer size {} cannot hold the raw scans, resizing to 4 times", rawScanBufferSize);
@@ -385,13 +395,17 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
         Stats stats = result.getStats();
         byte[] compressedRows = HBaseZeroCopyByteString.zeroCopyGetBytes(result.getCompressedRows());
 
-        sb.append("Endpoint RPC returned from HTable ").append(cubeSeg.getStorageLocationIdentifier()).append(" Shard ").append(BytesUtil.toHex(region)).append(" on host: ").append(stats.getHostname()).append(".");
+        sb.append("Endpoint RPC returned from HTable ").append(cubeSeg.getStorageLocationIdentifier()).append(" Shard ")
+                .append(BytesUtil.toHex(region)).append(" on host: ").append(stats.getHostname()).append(".");
         sb.append("Total scanned row: ").append(stats.getScannedRowCount()).append(". ");
         sb.append("Total scanned bytes: ").append(stats.getScannedBytes()).append(". ");
         sb.append("Total filtered row: ").append(stats.getFilteredRowCount()).append(". ");
         sb.append("Total aggred row: ").append(stats.getAggregatedRowCount()).append(". ");
-        sb.append("Time elapsed in EP: ").append(stats.getServiceEndTime() - stats.getServiceStartTime()).append("(ms). ");
-        sb.append("Server CPU usage: ").append(stats.getSystemCpuLoad()).append(", server physical mem left: ").append(stats.getFreePhysicalMemorySize()).append(", server swap mem left:").append(stats.getFreeSwapSpaceSize()).append(".");
+        sb.append("Time elapsed in EP: ").append(stats.getServiceEndTime() - stats.getServiceStartTime())
+                .append("(ms). ");
+        sb.append("Server CPU usage: ").append(stats.getSystemCpuLoad()).append(", server physical mem left: ")
+                .append(stats.getFreePhysicalMemorySize()).append(", server swap mem left:")
+                .append(stats.getFreeSwapSpaceSize()).append(".");
         sb.append("Etc message: ").append(stats.getEtcMsg()).append(".");
         sb.append("Normal Complete: ").append(stats.getNormalComplete() == 1).append(".");
         sb.append("Compressed row size: ").append(compressedRows.length);
@@ -401,7 +415,8 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
 
     private RuntimeException getCoprocessorException(CubeVisitResponse response) {
         if (!response.hasErrorInfo()) {
-            return new RuntimeException("Coprocessor aborts due to scan timeout or other reasons, please re-deploy coprocessor to see concrete error message");
+            return new RuntimeException(
+                    "Coprocessor aborts due to scan timeout or other reasons, please re-deploy coprocessor to see concrete error message");
         }
 
         CubeVisitResponse.ErrorInfo errorInfo = response.getErrorInfo();
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
index 199a1fe..0135a22 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.storage.hbase.lookup;
 
 import java.io.IOException;
+import java.util.Locale;
 import java.util.Random;
 
 import org.apache.commons.cli.Options;
@@ -87,7 +88,7 @@ public class LookupTableToHFileJob extends AbstractHadoopJob {
             parseOptions(options, args);
 
             Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
             String tableName = getOptionValue(OPTION_TABLE_NAME);
             String lookupSnapshotID = getOptionValue(OPTION_LOOKUP_SNAPSHOT_ID);
             String jobId = getOptionValue(OPTION_CUBING_JOB_ID);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
index 37e36ac..7205802 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -87,7 +88,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
 
         partitionFilePath = new Path(getOptionValue(OPTION_PARTITION_FILE_PATH));
 
-        String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+        String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase(Locale.ROOT);
         CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
         cube = cubeMgr.getCube(cubeName);
         cubeDesc = cube.getDescriptor();
@@ -226,13 +227,14 @@ public class CreateHTableJob extends AbstractHadoopJob {
                 }
 
                 if (shardNum > nRegion) {
-                    logger.info(
-                            String.format("Cuboid %d 's estimated size %.2f MB will generate %d regions, reduce to %d",
-                                    cuboidId, estimatedSize, shardNum, nRegion));
+                    logger.info(String.format(Locale.ROOT,
+                            "Cuboid %d 's estimated size %.2f MB will generate %d regions, " + "reduce to %d", cuboidId,
+                            estimatedSize, shardNum, nRegion));
                     shardNum = nRegion;
                 } else {
-                    logger.info(String.format("Cuboid %d 's estimated size %.2f MB will generate %d regions", cuboidId,
-                            estimatedSize, shardNum));
+                    logger.info(
+                            String.format(Locale.ROOT, "Cuboid %d 's estimated size %.2f MB will generate %d regions",
+                                    cuboidId, estimatedSize, shardNum));
                 }
 
                 cuboidShards.put(cuboidId, (short) shardNum);
@@ -245,8 +247,9 @@ public class CreateHTableJob extends AbstractHadoopJob {
             }
 
             for (int i = 0; i < nRegion; ++i) {
-                logger.info(String.format("Region %d's estimated size is %.2f MB, accounting for %.2f percent", i,
-                        regionSizes[i], 100.0 * regionSizes[i] / totalSizeInM));
+                logger.info(
+                        String.format(Locale.ROOT, "Region %d's estimated size is %.2f MB, accounting for %.2f percent",
+                                i, regionSizes[i], 100.0 * regionSizes[i] / totalSizeInM));
             }
 
             CuboidShardUtil.saveCuboidShards(cubeSegment, cuboidShards, nRegion);
@@ -341,7 +344,8 @@ public class CreateHTableJob extends AbstractHadoopJob {
             for (Long cuboid : allCuboids) {
 
                 if (accumulatedSize >= hfileSizeMB) {
-                    logger.info(String.format("Region %d's hfile %d size is %.2f mb", i, j, accumulatedSize));
+                    logger.info(
+                            String.format(Locale.ROOT, "Region %d's hfile %d size is %.2f mb", i, j, accumulatedSize));
                     byte[] split = new byte[RowConstants.ROWKEY_SHARD_AND_CUBOID_LEN];
                     BytesUtil.writeUnsigned(i, split, 0, RowConstants.ROWKEY_SHARDID_LEN);
                     System.arraycopy(Bytes.toBytes(cuboid), 0, split, RowConstants.ROWKEY_SHARDID_LEN,
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index f006adb..d06c993 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -20,6 +20,7 @@ package org.apache.kylin.storage.hbase.steps;
 
 import java.io.IOException;
 
+import java.util.Locale;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -161,7 +162,7 @@ public class CubeHTableUtil {
             cf.setBlocksize(kylinConfig.getHbaseSmallFamilyBlockSize());
         }
 
-        String hbaseDefaultCC = kylinConfig.getHbaseDefaultCompressionCodec().toLowerCase();
+        String hbaseDefaultCC = kylinConfig.getHbaseDefaultCompressionCodec().toLowerCase(Locale.ROOT);
         switch (hbaseDefaultCC) {
         case "snappy": {
             logger.info("hbase will use snappy to compress data");
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java
index e58a00e..1f75660 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HFileOutputFormat3.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.net.URLDecoder;
 import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
@@ -94,55 +95,45 @@ import com.google.common.annotations.VisibleForTesting;
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class HFileOutputFormat3
-        extends FileOutputFormat<ImmutableBytesWritable, Cell> {
+public class HFileOutputFormat3 extends FileOutputFormat<ImmutableBytesWritable, Cell> {
     static Log LOG = LogFactory.getLog(HFileOutputFormat3.class);
 
     // The following constants are private since these are used by
     // HFileOutputFormat2 to internally transfer data between job setup and
     // reducer run using conf.
     // These should not be changed by the client.
-    private static final String COMPRESSION_FAMILIES_CONF_KEY =
-            "hbase.hfileoutputformat.families.compression";
-    private static final String BLOOM_TYPE_FAMILIES_CONF_KEY =
-            "hbase.hfileoutputformat.families.bloomtype";
-    private static final String BLOCK_SIZE_FAMILIES_CONF_KEY =
-            "hbase.mapreduce.hfileoutputformat.blocksize";
-    private static final String DATABLOCK_ENCODING_FAMILIES_CONF_KEY =
-            "hbase.mapreduce.hfileoutputformat.families.datablock.encoding";
+    private static final String COMPRESSION_FAMILIES_CONF_KEY = "hbase.hfileoutputformat.families.compression";
+    private static final String BLOOM_TYPE_FAMILIES_CONF_KEY = "hbase.hfileoutputformat.families.bloomtype";
+    private static final String BLOCK_SIZE_FAMILIES_CONF_KEY = "hbase.mapreduce.hfileoutputformat.blocksize";
+    private static final String DATABLOCK_ENCODING_FAMILIES_CONF_KEY = "hbase.mapreduce.hfileoutputformat.families.datablock.encoding";
 
     // This constant is public since the client can modify this when setting
     // up their conf object and thus refer to this symbol.
     // It is present for backwards compatibility reasons. Use it only to
     // override the auto-detection of datablock encoding.
-    public static final String DATABLOCK_ENCODING_OVERRIDE_CONF_KEY =
-            "hbase.mapreduce.hfileoutputformat.datablock.encoding";
+    public static final String DATABLOCK_ENCODING_OVERRIDE_CONF_KEY = "hbase.mapreduce.hfileoutputformat.datablock.encoding";
 
     @Override
-    public RecordWriter<ImmutableBytesWritable, Cell> getRecordWriter(
-            final TaskAttemptContext context) throws IOException, InterruptedException {
+    public RecordWriter<ImmutableBytesWritable, Cell> getRecordWriter(final TaskAttemptContext context)
+            throws IOException, InterruptedException {
         return createRecordWriter(context, this.getOutputCommitter(context));
     }
 
-    static <V extends Cell> RecordWriter<ImmutableBytesWritable, V>
-    createRecordWriter(final TaskAttemptContext context, final OutputCommitter committer)
-            throws IOException, InterruptedException {
+    static <V extends Cell> RecordWriter<ImmutableBytesWritable, V> createRecordWriter(final TaskAttemptContext context,
+            final OutputCommitter committer) throws IOException, InterruptedException {
 
         // Get the path of the temporary output file
-        final Path outputdir = ((FileOutputCommitter)committer).getWorkPath();
+        final Path outputdir = ((FileOutputCommitter) committer).getWorkPath();
         final Configuration conf = context.getConfiguration();
         LOG.debug("Task output path: " + outputdir);
         final FileSystem fs = outputdir.getFileSystem(conf);
         // These configs. are from hbase-*.xml
-        final long maxsize = conf.getLong(HConstants.HREGION_MAX_FILESIZE,
-                HConstants.DEFAULT_MAX_FILE_SIZE);
+        final long maxsize = conf.getLong(HConstants.HREGION_MAX_FILESIZE, HConstants.DEFAULT_MAX_FILE_SIZE);
         // Invented config.  Add to hbase-*.xml if other than default compression.
-        final String defaultCompressionStr = conf.get("hfile.compression",
-                Compression.Algorithm.NONE.getName());
-        final Algorithm defaultCompression = AbstractHFileWriter
-                .compressionByName(defaultCompressionStr);
-        final boolean compactionExclude = conf.getBoolean(
-                "hbase.mapreduce.hfileoutputformat.compaction.exclude", false);
+        final String defaultCompressionStr = conf.get("hfile.compression", Compression.Algorithm.NONE.getName());
+        final Algorithm defaultCompression = AbstractHFileWriter.compressionByName(defaultCompressionStr);
+        final boolean compactionExclude = conf.getBoolean("hbase.mapreduce.hfileoutputformat.compaction.exclude",
+                false);
 
         // create a map from column family to the compression algorithm
         final Map<byte[], Algorithm> compressionMap = createFamilyCompressionMap(conf);
@@ -150,8 +141,7 @@ public class HFileOutputFormat3
         final Map<byte[], Integer> blockSizeMap = createFamilyBlockSizeMap(conf);
 
         String dataBlockEncodingStr = conf.get(DATABLOCK_ENCODING_OVERRIDE_CONF_KEY);
-        final Map<byte[], DataBlockEncoding> datablockEncodingMap
-                = createFamilyDataBlockEncodingMap(conf);
+        final Map<byte[], DataBlockEncoding> datablockEncodingMap = createFamilyDataBlockEncodingMap(conf);
         final DataBlockEncoding overriddenEncoding;
         if (dataBlockEncodingStr != null) {
             overriddenEncoding = DataBlockEncoding.valueOf(dataBlockEncodingStr);
@@ -161,23 +151,21 @@ public class HFileOutputFormat3
 
         return new RecordWriter<ImmutableBytesWritable, V>() {
             // Map of families to writers and how much has been output on the writer.
-            private final Map<byte [], WriterLength> writers =
-                    new TreeMap<byte [], WriterLength>(Bytes.BYTES_COMPARATOR);
-            private byte [] previousRow = HConstants.EMPTY_BYTE_ARRAY;
-            private final byte [] now = Bytes.toBytes(System.currentTimeMillis());
+            private final Map<byte[], WriterLength> writers = new TreeMap<byte[], WriterLength>(Bytes.BYTES_COMPARATOR);
+            private byte[] previousRow = HConstants.EMPTY_BYTE_ARRAY;
+            private final byte[] now = Bytes.toBytes(System.currentTimeMillis());
             private boolean rollRequested = false;
 
             @Override
-            public void write(ImmutableBytesWritable row, V cell)
-                    throws IOException {
+            public void write(ImmutableBytesWritable row, V cell) throws IOException {
                 KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
                 if (row == null && kv == null) {
                     rollWriters();
                     return;
                 }
-                byte [] rowKey = CellUtil.cloneRow(kv);
+                byte[] rowKey = CellUtil.cloneRow(kv);
                 long length = kv.getLength();
-                byte [] family = CellUtil.cloneFamily(kv);
+                byte[] family = CellUtil.cloneFamily(kv);
                 WriterLength wl = this.writers.get(family);
                 if (wl == null) {
                     fs.mkdirs(new Path(outputdir, Bytes.toString(family)));
@@ -200,8 +188,7 @@ public class HFileOutputFormat3
             private void rollWriters() throws IOException {
                 for (WriterLength wl : this.writers.values()) {
                     if (wl.writer != null) {
-                        LOG.info("Writer=" + wl.writer.getPath() +
-                                ((wl.written == 0)? "": ", wrote=" + wl.written));
+                        LOG.info("Writer=" + wl.writer.getPath() + ((wl.written == 0) ? "" : ", wrote=" + wl.written));
                         close(wl.writer);
                     }
                     wl.writer = null;
@@ -210,10 +197,8 @@ public class HFileOutputFormat3
                 this.rollRequested = false;
             }
 
-            @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="BX_UNBOXING_IMMEDIATELY_REBOXED",
-                    justification="Not important")
-            private WriterLength getNewWriter(byte[] family, Configuration conf)
-                    throws IOException {
+            @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "BX_UNBOXING_IMMEDIATELY_REBOXED", justification = "Not important")
+            private WriterLength getNewWriter(byte[] family, Configuration conf) throws IOException {
                 WriterLength wl = new WriterLength();
                 Path familydir = new Path(outputdir, Bytes.toString(family));
                 Algorithm compression = compressionMap.get(family);
@@ -227,18 +212,15 @@ public class HFileOutputFormat3
                 encoding = encoding == null ? DataBlockEncoding.NONE : encoding;
                 Configuration tempConf = new Configuration(conf);
                 tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);
-                HFileContextBuilder contextBuilder = new HFileContextBuilder()
-                        .withCompression(compression)
+                HFileContextBuilder contextBuilder = new HFileContextBuilder().withCompression(compression)
                         .withChecksumType(HStore.getChecksumType(conf))
-                        .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
-                        .withBlockSize(blockSize);
+                        .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)).withBlockSize(blockSize);
                 contextBuilder.withDataBlockEncoding(encoding);
                 HFileContext hFileContext = contextBuilder.build();
 
-                wl.writer = new StoreFile.WriterBuilder(conf, new CacheConfig(tempConf), fs)
-                        .withOutputDir(familydir).withBloomType(bloomType)
-                        .withComparator(KeyValue.COMPARATOR)
-                        .withFileContext(hFileContext).build();
+                wl.writer = new StoreFile.WriterBuilder(conf, new CacheConfig(tempConf), fs).withOutputDir(familydir)
+                        .withBloomType(bloomType).withComparator(KeyValue.COMPARATOR).withFileContext(hFileContext)
+                        .build();
 
                 this.writers.put(family, wl);
                 return wl;
@@ -246,23 +228,18 @@ public class HFileOutputFormat3
 
             private void close(final StoreFile.Writer w) throws IOException {
                 if (w != null) {
-                    w.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY,
-                            Bytes.toBytes(System.currentTimeMillis()));
-                    w.appendFileInfo(StoreFile.BULKLOAD_TASK_KEY,
-                            Bytes.toBytes(context.getTaskAttemptID().toString()));
-                    w.appendFileInfo(StoreFile.MAJOR_COMPACTION_KEY,
-                            Bytes.toBytes(true));
-                    w.appendFileInfo(StoreFile.EXCLUDE_FROM_MINOR_COMPACTION_KEY,
-                            Bytes.toBytes(compactionExclude));
+                    w.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY, Bytes.toBytes(System.currentTimeMillis()));
+                    w.appendFileInfo(StoreFile.BULKLOAD_TASK_KEY, Bytes.toBytes(context.getTaskAttemptID().toString()));
+                    w.appendFileInfo(StoreFile.MAJOR_COMPACTION_KEY, Bytes.toBytes(true));
+                    w.appendFileInfo(StoreFile.EXCLUDE_FROM_MINOR_COMPACTION_KEY, Bytes.toBytes(compactionExclude));
                     w.appendTrackedTimestampsToMetadata();
                     w.close();
                 }
             }
 
             @Override
-            public void close(TaskAttemptContext c)
-                    throws IOException, InterruptedException {
-                for (WriterLength wl: this.writers.values()) {
+            public void close(TaskAttemptContext c) throws IOException, InterruptedException {
+                for (WriterLength wl : this.writers.values()) {
                     close(wl.writer);
                 }
             }
@@ -281,11 +258,9 @@ public class HFileOutputFormat3
      * Return the start keys of all of the regions in this table,
      * as a list of ImmutableBytesWritable.
      */
-    private static List<ImmutableBytesWritable> getRegionStartKeys(RegionLocator table)
-            throws IOException {
+    private static List<ImmutableBytesWritable> getRegionStartKeys(RegionLocator table) throws IOException {
         byte[][] byteKeys = table.getStartKeys();
-        ArrayList<ImmutableBytesWritable> ret =
-                new ArrayList<ImmutableBytesWritable>(byteKeys.length);
+        ArrayList<ImmutableBytesWritable> ret = new ArrayList<ImmutableBytesWritable>(byteKeys.length);
         for (byte[] byteKey : byteKeys) {
             ret.add(new ImmutableBytesWritable(byteKey));
         }
@@ -297,8 +272,8 @@ public class HFileOutputFormat3
      * {@link TotalOrderPartitioner} that contains the split points in startKeys.
      */
     @SuppressWarnings("deprecation")
-    private static void writePartitions(Configuration conf, Path partitionsPath,
-                                        List<ImmutableBytesWritable> startKeys) throws IOException {
+    private static void writePartitions(Configuration conf, Path partitionsPath, List<ImmutableBytesWritable> startKeys)
+            throws IOException {
         LOG.info("Writing partition information to " + partitionsPath);
         if (startKeys.isEmpty()) {
             throw new IllegalArgumentException("No regions passed");
@@ -308,21 +283,18 @@ public class HFileOutputFormat3
         // have keys < the first region (which has an empty start key)
         // so we need to remove it. Otherwise we would end up with an
         // empty reducer with index 0
-        TreeSet<ImmutableBytesWritable> sorted =
-                new TreeSet<ImmutableBytesWritable>(startKeys);
+        TreeSet<ImmutableBytesWritable> sorted = new TreeSet<ImmutableBytesWritable>(startKeys);
 
         ImmutableBytesWritable first = sorted.first();
         if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {
-            throw new IllegalArgumentException(
-                    "First region of table should have empty start key. Instead has: "
-                            + Bytes.toStringBinary(first.get()));
+            throw new IllegalArgumentException("First region of table should have empty start key. Instead has: "
+                    + Bytes.toStringBinary(first.get()));
         }
         sorted.remove(first);
 
         // Write the actual file
         FileSystem fs = partitionsPath.getFileSystem(conf);
-        SequenceFile.Writer writer = SequenceFile.createWriter(
-                fs, conf, partitionsPath, ImmutableBytesWritable.class,
+        SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, partitionsPath, ImmutableBytesWritable.class,
                 NullWritable.class);
 
         try {
@@ -351,8 +323,7 @@ public class HFileOutputFormat3
      * @deprecated Use {@link #configureIncrementalLoad(Job, Table, RegionLocator)} instead.
      */
     @Deprecated
-    public static void configureIncrementalLoad(Job job, HTable table)
-            throws IOException {
+    public static void configureIncrementalLoad(Job job, HTable table) throws IOException {
         configureIncrementalLoad(job, table.getTableDescriptor(), table.getRegionLocator());
     }
 
@@ -370,8 +341,7 @@ public class HFileOutputFormat3
      * The user should be sure to set the map output value class to either KeyValue or Put before
      * running this function.
      */
-    public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator)
-            throws IOException {
+    public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator) throws IOException {
         configureIncrementalLoad(job, table.getTableDescriptor(), regionLocator);
     }
 
@@ -389,14 +359,13 @@ public class HFileOutputFormat3
      * The user should be sure to set the map output value class to either KeyValue or Put before
      * running this function.
      */
-    public static void configureIncrementalLoad(Job job, HTableDescriptor tableDescriptor,
-                                                RegionLocator regionLocator) throws IOException {
+    public static void configureIncrementalLoad(Job job, HTableDescriptor tableDescriptor, RegionLocator regionLocator)
+            throws IOException {
         configureIncrementalLoad(job, tableDescriptor, regionLocator, HFileOutputFormat3.class);
     }
 
-    static void configureIncrementalLoad(Job job, HTableDescriptor tableDescriptor,
-                                         RegionLocator regionLocator, Class<? extends OutputFormat<?, ?>> cls) throws IOException,
-            UnsupportedEncodingException {
+    static void configureIncrementalLoad(Job job, HTableDescriptor tableDescriptor, RegionLocator regionLocator,
+            Class<? extends OutputFormat<?, ?>> cls) throws IOException, UnsupportedEncodingException {
         Configuration conf = job.getConfiguration();
         job.setOutputKeyClass(ImmutableBytesWritable.class);
         job.setOutputValueClass(KeyValue.class);
@@ -415,15 +384,13 @@ public class HFileOutputFormat3
             LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass());
         }
 
-        conf.setStrings("io.serializations", conf.get("io.serializations"),
-                MutationSerialization.class.getName(), ResultSerialization.class.getName(),
-                KeyValueSerialization.class.getName());
+        conf.setStrings("io.serializations", conf.get("io.serializations"), MutationSerialization.class.getName(),
+                ResultSerialization.class.getName(), KeyValueSerialization.class.getName());
 
         // Use table's region boundaries for TOP split points.
         LOG.info("Looking up current regions for table " + tableDescriptor.getTableName());
         List<ImmutableBytesWritable> startKeys = getRegionStartKeys(regionLocator);
-        LOG.info("Configuring " + startKeys.size() + " reduce partitions " +
-                "to match current region count");
+        LOG.info("Configuring " + startKeys.size() + " reduce partitions " + "to match current region count");
         job.setNumReduceTasks(startKeys.size());
 
         configurePartitioner(job, startKeys);
@@ -465,12 +432,9 @@ public class HFileOutputFormat3
      * @return a map from column family to the configured compression algorithm
      */
     @VisibleForTesting
-    static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration
-                                                                     conf) {
-        Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
-                COMPRESSION_FAMILIES_CONF_KEY);
-        Map<byte[], Algorithm> compressionMap = new TreeMap<byte[],
-                Algorithm>(Bytes.BYTES_COMPARATOR);
+    static Map<byte[], Algorithm> createFamilyCompressionMap(Configuration conf) {
+        Map<byte[], String> stringMap = createFamilyConfValueMap(conf, COMPRESSION_FAMILIES_CONF_KEY);
+        Map<byte[], Algorithm> compressionMap = new TreeMap<byte[], Algorithm>(Bytes.BYTES_COMPARATOR);
         for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
             Algorithm algorithm = AbstractHFileWriter.compressionByName(e.getValue());
             compressionMap.put(e.getKey(), algorithm);
@@ -487,10 +451,8 @@ public class HFileOutputFormat3
      */
     @VisibleForTesting
     static Map<byte[], BloomType> createFamilyBloomTypeMap(Configuration conf) {
-        Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
-                BLOOM_TYPE_FAMILIES_CONF_KEY);
-        Map<byte[], BloomType> bloomTypeMap = new TreeMap<byte[],
-                BloomType>(Bytes.BYTES_COMPARATOR);
+        Map<byte[], String> stringMap = createFamilyConfValueMap(conf, BLOOM_TYPE_FAMILIES_CONF_KEY);
+        Map<byte[], BloomType> bloomTypeMap = new TreeMap<byte[], BloomType>(Bytes.BYTES_COMPARATOR);
         for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
             BloomType bloomType = BloomType.valueOf(e.getValue());
             bloomTypeMap.put(e.getKey(), bloomType);
@@ -507,10 +469,8 @@ public class HFileOutputFormat3
      */
     @VisibleForTesting
     static Map<byte[], Integer> createFamilyBlockSizeMap(Configuration conf) {
-        Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
-                BLOCK_SIZE_FAMILIES_CONF_KEY);
-        Map<byte[], Integer> blockSizeMap = new TreeMap<byte[],
-                Integer>(Bytes.BYTES_COMPARATOR);
+        Map<byte[], String> stringMap = createFamilyConfValueMap(conf, BLOCK_SIZE_FAMILIES_CONF_KEY);
+        Map<byte[], Integer> blockSizeMap = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
         for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
             Integer blockSize = Integer.parseInt(e.getValue());
             blockSizeMap.put(e.getKey(), blockSize);
@@ -527,19 +487,15 @@ public class HFileOutputFormat3
      *         configured data block type for the family
      */
     @VisibleForTesting
-    static Map<byte[], DataBlockEncoding> createFamilyDataBlockEncodingMap(
-            Configuration conf) {
-        Map<byte[], String> stringMap = createFamilyConfValueMap(conf,
-                DATABLOCK_ENCODING_FAMILIES_CONF_KEY);
-        Map<byte[], DataBlockEncoding> encoderMap = new TreeMap<byte[],
-                DataBlockEncoding>(Bytes.BYTES_COMPARATOR);
+    static Map<byte[], DataBlockEncoding> createFamilyDataBlockEncodingMap(Configuration conf) {
+        Map<byte[], String> stringMap = createFamilyConfValueMap(conf, DATABLOCK_ENCODING_FAMILIES_CONF_KEY);
+        Map<byte[], DataBlockEncoding> encoderMap = new TreeMap<byte[], DataBlockEncoding>(Bytes.BYTES_COMPARATOR);
         for (Map.Entry<byte[], String> e : stringMap.entrySet()) {
             encoderMap.put(e.getKey(), DataBlockEncoding.valueOf((e.getValue())));
         }
         return encoderMap;
     }
 
-
     /**
      * Run inside the task to deserialize column family to given conf value map.
      *
@@ -547,8 +503,7 @@ public class HFileOutputFormat3
      * @param confName conf key to read from the configuration
      * @return a map of column family to the given configuration value
      */
-    private static Map<byte[], String> createFamilyConfValueMap(
-            Configuration conf, String confName) {
+    private static Map<byte[], String> createFamilyConfValueMap(Configuration conf, String confName) {
         Map<byte[], String> confValMap = new TreeMap<byte[], String>(Bytes.BYTES_COMPARATOR);
         String confVal = conf.get(confName, "");
         for (String familyConf : confVal.split("&")) {
@@ -557,7 +512,7 @@ public class HFileOutputFormat3
                 continue;
             }
             try {
-                confValMap.put(URLDecoder.decode(familySplit[0], "UTF-8").getBytes(),
+                confValMap.put(URLDecoder.decode(familySplit[0], "UTF-8").getBytes(StandardCharsets.UTF_8),
                         URLDecoder.decode(familySplit[1], "UTF-8"));
             } catch (UnsupportedEncodingException e) {
                 // will not happen with UTF-8 encoding
@@ -571,8 +526,7 @@ public class HFileOutputFormat3
      * Configure <code>job</code> with a TotalOrderPartitioner, partitioning against
      * <code>splitPoints</code>. Cleans up the partitions file after job exists.
      */
-    static void configurePartitioner(Job job, List<ImmutableBytesWritable> splitPoints)
-            throws IOException {
+    static void configurePartitioner(Job job, List<ImmutableBytesWritable> splitPoints) throws IOException {
         Configuration conf = job.getConfiguration();
         // create the partitions file
         FileSystem fs = FileSystem.get(conf);
@@ -595,13 +549,12 @@ public class HFileOutputFormat3
      * @throws IOException
      *           on failure to read column family descriptors
      */
-    @edu.umd.cs.findbugs.annotations.SuppressWarnings(
-            value="RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
+    @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
     @VisibleForTesting
     static void configureCompression(Configuration conf, HTableDescriptor tableDescriptor)
             throws UnsupportedEncodingException {
         StringBuilder compressionConfigValue = new StringBuilder();
-        if(tableDescriptor == null){
+        if (tableDescriptor == null) {
             // could happen with mock table instance
             return;
         }
@@ -611,11 +564,9 @@ public class HFileOutputFormat3
             if (i++ > 0) {
                 compressionConfigValue.append('&');
             }
-            compressionConfigValue.append(URLEncoder.encode(
-                    familyDescriptor.getNameAsString(), "UTF-8"));
+            compressionConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
             compressionConfigValue.append('=');
-            compressionConfigValue.append(URLEncoder.encode(
-                    familyDescriptor.getCompression().getName(), "UTF-8"));
+            compressionConfigValue.append(URLEncoder.encode(familyDescriptor.getCompression().getName(), "UTF-8"));
         }
         // Get rid of the last ampersand
         conf.set(COMPRESSION_FAMILIES_CONF_KEY, compressionConfigValue.toString());
@@ -644,11 +595,9 @@ public class HFileOutputFormat3
             if (i++ > 0) {
                 blockSizeConfigValue.append('&');
             }
-            blockSizeConfigValue.append(URLEncoder.encode(
-                    familyDescriptor.getNameAsString(), "UTF-8"));
+            blockSizeConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
             blockSizeConfigValue.append('=');
-            blockSizeConfigValue.append(URLEncoder.encode(
-                    String.valueOf(familyDescriptor.getBlocksize()), "UTF-8"));
+            blockSizeConfigValue.append(URLEncoder.encode(String.valueOf(familyDescriptor.getBlocksize()), "UTF-8"));
         }
         // Get rid of the last ampersand
         conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, blockSizeConfigValue.toString());
@@ -677,8 +626,7 @@ public class HFileOutputFormat3
             if (i++ > 0) {
                 bloomTypeConfigValue.append('&');
             }
-            bloomTypeConfigValue.append(URLEncoder.encode(
-                    familyDescriptor.getNameAsString(), "UTF-8"));
+            bloomTypeConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
             bloomTypeConfigValue.append('=');
             String bloomType = familyDescriptor.getBloomFilterType().toString();
             if (bloomType == null) {
@@ -699,8 +647,8 @@ public class HFileOutputFormat3
      *           on failure to read column family descriptors
      */
     @VisibleForTesting
-    static void configureDataBlockEncoding(HTableDescriptor tableDescriptor,
-                                           Configuration conf) throws UnsupportedEncodingException {
+    static void configureDataBlockEncoding(HTableDescriptor tableDescriptor, Configuration conf)
+            throws UnsupportedEncodingException {
         if (tableDescriptor == null) {
             // could happen with mock table instance
             return;
@@ -712,17 +660,14 @@ public class HFileOutputFormat3
             if (i++ > 0) {
                 dataBlockEncodingConfigValue.append('&');
             }
-            dataBlockEncodingConfigValue.append(
-                    URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
+            dataBlockEncodingConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8"));
             dataBlockEncodingConfigValue.append('=');
             DataBlockEncoding encoding = familyDescriptor.getDataBlockEncoding();
             if (encoding == null) {
                 encoding = DataBlockEncoding.NONE;
             }
-            dataBlockEncodingConfigValue.append(URLEncoder.encode(encoding.toString(),
-                    "UTF-8"));
+            dataBlockEncodingConfigValue.append(URLEncoder.encode(encoding.toString(), "UTF-8"));
         }
-        conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,
-                dataBlockEncodingConfigValue.toString());
+        conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY, dataBlockEncodingConfigValue.toString());
     }
 }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
index 6e03b7e..f1b4657 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
@@ -20,6 +20,7 @@ package org.apache.kylin.storage.hbase.util;
 
 import java.io.IOException;
 
+import java.util.Locale;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
@@ -44,7 +45,7 @@ public class CleanHtableCLI extends AbstractApplication {
         Admin hbaseAdmin = conn.getAdmin();
 
         for (HTableDescriptor descriptor : hbaseAdmin.listTables()) {
-            String name = descriptor.getNameAsString().toLowerCase();
+            String name = descriptor.getNameAsString().toLowerCase(Locale.ROOT);
             if (name.startsWith("kylin") || name.startsWith("_kylin")) {
                 String x = descriptor.getValue(IRealizationConstants.HTableTag);
                 System.out.println("table name " + descriptor.getNameAsString() + " host: " + x);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index 292d9d6..00635ba 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
@@ -336,7 +337,7 @@ public class CubeMigrationCLI {
         case COPY_DICT_OR_SNAPSHOT: {
             String item = (String) opt.params[0];
 
-            if (item.toLowerCase().endsWith(".dict")) {
+            if (item.toLowerCase(Locale.ROOT).endsWith(".dict")) {
                 DictionaryManager dstDictMgr = DictionaryManager.getInstance(dstConfig);
                 DictionaryManager srcDicMgr = DictionaryManager.getInstance(srcConfig);
                 DictionaryInfo dictSrc = srcDicMgr.getDictionaryInfo(item);
@@ -368,7 +369,7 @@ public class CubeMigrationCLI {
                     logger.info("Item " + item + " is dup, instead " + dictSaved.getResourcePath() + " is reused");
                 }
 
-            } else if (item.toLowerCase().endsWith(".snapshot")) {
+            } else if (item.toLowerCase(Locale.ROOT).endsWith(".snapshot")) {
                 SnapshotManager dstSnapMgr = SnapshotManager.getInstance(dstConfig);
                 SnapshotManager srcSnapMgr = SnapshotManager.getInstance(srcConfig);
                 SnapshotTable snapSrc = srcSnapMgr.getSnapshotTable(item);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index 23ec77f..6cd29d2 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -26,6 +26,7 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.CountDownLatch;
@@ -115,7 +116,7 @@ public class DeployCoprocessorCLI {
             List<String> tableNames = getHTableNames(kylinConfig);
             logger.info("Identify tables " + tableNames);
 
-            String filterType = args[curIdx++].toLowerCase();
+            String filterType = args[curIdx++].toLowerCase(Locale.ROOT);
             if (filterType.equals("-table")) {
                 tableNames = filterByTables(tableNames, Arrays.asList(args).subList(curIdx, args.length));
             } else if (filterType.equals("-cube")) {
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
index a317110..b7e97a1 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.storage.hbase.util;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 import java.util.Random;
 
@@ -46,8 +47,8 @@ import com.google.common.collect.Lists;
 public class GridTableHBaseBenchmark {
 
     private static final String TEST_TABLE = "GridTableTest";
-    private static final byte[] CF = "F".getBytes();
-    private static final byte[] QN = "C".getBytes();
+    private static final byte[] CF = "F".getBytes(StandardCharsets.UTF_8);
+    private static final byte[] QN = "C".getBytes(StandardCharsets.UTF_8);
     private static final int N_ROWS = 10000;
     private static final int CELL_SIZE = 128 * 1024; // 128 KB
     private static final double DFT_HIT_RATIO = 0.3;
@@ -83,7 +84,8 @@ public class GridTableHBaseBenchmark {
         Hits hits = new Hits(N_ROWS, hitRatio, indexRatio);
 
         for (int i = 0; i < ROUND; i++) {
-            System.out.println("==================================== ROUND " + (i + 1) + " ========================================");
+            System.out.println("==================================== ROUND " + (i + 1)
+                    + " ========================================");
             testRowScanWithIndex(conn, hits.getHitsForRowScanWithIndex());
             testRowScanNoIndexFullScan(conn, hits.getHitsForRowScanNoIndex());
             testRowScanNoIndexSkipScan(conn, hits.getHitsForRowScanNoIndex());
@@ -386,7 +388,8 @@ public class GridTableHBaseBenchmark {
         public void markEnd() {
             endTime = System.currentTimeMillis();
             System.out.println();
-            System.out.println(name + " ends, " + (endTime - startTime) + " ms, " + rowsRead + " rows read, " + bytesRead + " bytes read");
+            System.out.println(name + " ends, " + (endTime - startTime) + " ms, " + rowsRead + " rows read, "
+                    + bytesRead + " bytes read");
         }
     }
 
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
index 8dd2164..47f4c58 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
@@ -19,12 +19,15 @@
 package org.apache.kylin.storage.hbase.util;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Arrays;
 import java.util.Calendar;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
+import java.util.TimeZone;
 import java.util.concurrent.Semaphore;
 
 import org.apache.commons.io.IOUtils;
@@ -54,8 +57,8 @@ public class HbaseStreamingInput {
     private static final Logger logger = LoggerFactory.getLogger(HbaseStreamingInput.class);
 
     private static final int CELL_SIZE = 128 * 1024; // 128 KB
-    private static final byte[] CF = "F".getBytes();
-    private static final byte[] QN = "C".getBytes();
+    private static final byte[] CF = "F".getBytes(StandardCharsets.UTF_8);
+    private static final byte[] QN = "C".getBytes(StandardCharsets.UTF_8);
 
     public static void createTable(String tableName) throws IOException {
         Connection conn = getConnection();
@@ -197,7 +200,8 @@ public class HbaseStreamingInput {
                         logger.error("value size invalid!!!!!");
                     }
 
-                    hash += Arrays.hashCode(Arrays.copyOfRange(value, cell.getValueOffset(), cell.getValueLength() + cell.getValueOffset()));
+                    hash += Arrays.hashCode(Arrays.copyOfRange(value, cell.getValueOffset(),
+                            cell.getValueLength() + cell.getValueOffset()));
                     rowCount++;
                 }
                 scanner.close();
@@ -231,8 +235,8 @@ public class HbaseStreamingInput {
     }
 
     private static String formatTime(long time) {
-        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
-        Calendar cal = Calendar.getInstance();
+        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss", Locale.ROOT);
+        Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"), Locale.ROOT);
         cal.setTimeInMillis(time);
         return dateFormat.format(cal.getTime());
     }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
index 42a54c8..3f290ac 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+import java.util.Locale;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -69,11 +70,11 @@ public class UpdateHTableHostCLI {
         }
 
         List<String> tableNames = getHTableNames(KylinConfig.getInstanceFromEnv());
-        if (!args[0].toLowerCase().equals("-from")) {
+        if (!args[0].toLowerCase(Locale.ROOT).equals("-from")) {
             printUsageAndExit();
         }
-        String oldHostValue = args[1].toLowerCase();
-        String filterType = args[2].toLowerCase();
+        String oldHostValue = args[1].toLowerCase(Locale.ROOT);
+        String filterType = args[2].toLowerCase(Locale.ROOT);
         if (filterType.equals("-table")) {
             tableNames = filterByTables(tableNames, Arrays.asList(args).subList(3, args.length));
         } else if (filterType.equals("-cube")) {
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
index d2b3488..0b83af4 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
@@ -19,8 +19,11 @@
 package org.apache.kylin.storage.hbase.common;
 
 import java.io.File;
-import java.io.FileWriter;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
@@ -53,9 +56,9 @@ public class HiveJDBCClientTest {
 
         if (!testFile.exists()) {
 
-            FileWriter writer;
+            Writer writer;
             try {
-                writer = new FileWriter(testFile);
+                writer = new OutputStreamWriter(new FileOutputStream(testFile), StandardCharsets.UTF_8);
                 writer.write("1 a\n");
                 writer.write("2 b\n");
 
@@ -86,7 +89,8 @@ public class HiveJDBCClientTest {
         Statement stmt = con.createStatement();
         String tableName = "testHiveDriverTable";
         stmt.execute("drop table if exists " + tableName);
-        stmt.execute("create table " + tableName + " (key int, value string) row format delimited fields terminated by ' '");
+        stmt.execute(
+                "create table " + tableName + " (key int, value string) row format delimited fields terminated by ' '");
         // show tables
         String sql = "show tables '" + tableName + "'";
         System.out.println("Running: " + sql);
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
index 291072f..c08faeb 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
@@ -25,8 +25,10 @@ import java.util.Calendar;
 import java.util.Collection;
 import java.util.Date;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
 
+import java.util.TimeZone;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.metadata.filter.CaseTupleFilter;
 import org.apache.kylin.metadata.filter.ColumnTupleFilter;
@@ -101,12 +103,12 @@ public class FilterBaseTest extends LocalFileMetadataTestCase {
         compareFilter.addChild(columnFilter);
 
         List<String> inValues = Lists.newArrayList();
-        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
+        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd", Locale.ROOT);
         Date startDate = simpleDateFormat.parse("1970-01-01");
         Date endDate = simpleDateFormat.parse("2100-01-01");
-        Calendar start = Calendar.getInstance();
+        Calendar start = Calendar.getInstance(TimeZone.getTimeZone("GMT"), Locale.ROOT);
         start.setTime(startDate);
-        Calendar end = Calendar.getInstance();
+        Calendar end = Calendar.getInstance(TimeZone.getTimeZone("GMT"), Locale.ROOT);
         end.setTime(endDate);
         for (Date date = start.getTime(); start.before(end); start.add(Calendar.DATE, 1), date = start.getTime()) {
             inValues.add(simpleDateFormat.format(date));
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapperTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapperTest.java
index eba4a37..8aeeca4 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapperTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapperTest.java
@@ -21,6 +21,7 @@ package org.apache.kylin.storage.hbase.steps;
 import static org.junit.Assert.assertEquals;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
 import org.apache.hadoop.hbase.KeyValue;
@@ -68,13 +69,13 @@ public class CubeHFileMapperTest {
         Pair<RowKeyWritable, KeyValue> p2 = result.get(1);
 
         assertEquals(key, p1.getFirst());
-        assertEquals("cf1", new String(p1.getSecond().getFamily()));
-        assertEquals("usd_amt", new String(p1.getSecond().getQualifier()));
-        assertEquals("35.43", new String(p1.getSecond().getValue()));
+        assertEquals("cf1", new String(p1.getSecond().getFamily(), StandardCharsets.UTF_8));
+        assertEquals("usd_amt", new String(p1.getSecond().getQualifier(), StandardCharsets.UTF_8));
+        assertEquals("35.43", new String(p1.getSecond().getValue(), StandardCharsets.UTF_8));
 
         assertEquals(key, p2.getFirst());
-        assertEquals("cf1", new String(p2.getSecond().getFamily()));
-        assertEquals("item_count", new String(p2.getSecond().getQualifier()));
-        assertEquals("2", new String(p2.getSecond().getValue()));
+        assertEquals("cf1", new String(p2.getSecond().getFamily(), StandardCharsets.UTF_8));
+        assertEquals("item_count", new String(p2.getSecond().getQualifier(), StandardCharsets.UTF_8));
+        assertEquals("2", new String(p2.getSecond().getValue(), StandardCharsets.UTF_8));
     }
 }
diff --git a/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java b/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
index 94df3bc..eb59d20 100644
--- a/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
@@ -24,6 +24,7 @@ import java.io.IOException;
 import java.nio.charset.Charset;
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.Locale;
 
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
@@ -84,11 +85,9 @@ public abstract class AbstractInfoExtractor extends AbstractApplication {
... 244 lines suppressed ...