You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hugegraph.apache.org by ji...@apache.org on 2022/11/21 08:35:23 UTC

[incubator-hugegraph-toolchain] branch clean-loader created (now 9fcd41d5)

This is an automated email from the ASF dual-hosted git repository.

jin pushed a change to branch clean-loader
in repository https://gitbox.apache.org/repos/asf/incubator-hugegraph-toolchain.git


      at 9fcd41d5 refact(loader): clean code & import style

This branch includes the following new commits:

     new 9fcd41d5 refact(loader): clean code & import style

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[incubator-hugegraph-toolchain] 01/01: refact(loader): clean code & import style

Posted by ji...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jin pushed a commit to branch clean-loader
in repository https://gitbox.apache.org/repos/asf/incubator-hugegraph-toolchain.git

commit 9fcd41d5e2f01df7c92bb2b42628258d885d315b
Author: imbajin <ji...@apache.org>
AuthorDate: Mon Nov 21 16:34:44 2022 +0800

    refact(loader): clean code & import style
    
    Note: ensure the hubble building right because it used loader's method
---
 .../org/apache/hugegraph/entity/load/LoadTask.java |   2 +-
 .../hugegraph/service/load/LoadTaskService.java    |   6 +-
 .../apache/hugegraph/loader/HugeGraphLoader.java   |  33 ++--
 .../apache/hugegraph/loader/MappingConverter.java  |   6 +-
 .../hugegraph/loader/builder/EdgeBuilder.java      |  14 +-
 .../hugegraph/loader/builder/ElementBuilder.java   |  15 +-
 .../hugegraph/loader/builder/SchemaCache.java      |   1 +
 .../hugegraph/loader/builder/VertexBuilder.java    |   4 +-
 .../apache/hugegraph/loader/constant/ElemType.java |   2 +-
 .../hugegraph/loader/executor/LoadContext.java     |   2 +-
 .../hugegraph/loader/executor/LoadOptions.java     |  22 ++-
 .../hugegraph/loader/failure/FailLogger.java       |  29 ++--
 .../loader/flink/HugeGraphDeserialization.java     |   5 +-
 .../loader/flink/HugeGraphFlinkCDCLoader.java      |   4 +-
 .../loader/flink/HugeGraphSinkFunction.java        |   2 +-
 .../hugegraph/loader/mapping/EdgeMapping.java      |   1 +
 .../hugegraph/loader/mapping/ElementMapping.java   |  11 +-
 .../hugegraph/loader/mapping/InputStruct.java      |   1 +
 .../hugegraph/loader/mapping/LoadMapping.java      |   7 +-
 .../hugegraph/loader/mapping/VertexMapping.java    |   1 +
 .../loader/metrics/LoadDistributeMetrics.java      |  20 ++-
 .../hugegraph/loader/metrics/LoadSummary.java      |   7 +-
 .../hugegraph/loader/metrics/RangesTimer.java      |   8 +-
 .../hugegraph/loader/parser/TextLineParser.java    |   6 +-
 .../hugegraph/loader/progress/InputProgress.java   |   2 +-
 .../hugegraph/loader/progress/LoadProgress.java    |  11 +-
 .../hugegraph/loader/reader/InputReader.java       |   4 +-
 .../loader/reader/file/FileLineFetcher.java        |  18 +--
 .../hugegraph/loader/reader/file/FileReader.java   |   9 +-
 .../loader/reader/file/LocalFileReader.java        |   8 +-
 .../loader/reader/file/OrcFileLineFetcher.java     |   6 +-
 .../loader/reader/hdfs/HDFSFileReader.java         |  10 +-
 .../hugegraph/loader/reader/jdbc/JDBCReader.java   |   3 +-
 .../hugegraph/loader/reader/jdbc/JDBCUtil.java     |  14 +-
 .../hugegraph/loader/reader/jdbc/RowFetcher.java   |   2 +-
 .../loader/serializer/DeserializeException.java    |   3 +-
 .../loader/serializer/InputProgressDeser.java      |  13 +-
 .../loader/serializer/InputSourceDeser.java        |   7 +-
 .../hugegraph/loader/source/AbstractSource.java    |   4 +-
 .../hugegraph/loader/source/InputSource.java       |   1 +
 .../hugegraph/loader/source/file/Compression.java  |   4 +-
 .../hugegraph/loader/source/file/FileFormat.java   |   2 +-
 .../hugegraph/loader/source/file/FileSource.java   |   1 +
 .../hugegraph/loader/source/file/ListFormat.java   |   1 +
 .../hugegraph/loader/source/file/SkippedLine.java  |   1 +
 .../hugegraph/loader/source/hdfs/HDFSSource.java   |   1 +
 .../loader/source/hdfs/KerberosConfig.java         |   1 +
 .../hugegraph/loader/source/jdbc/JDBCSource.java   |   1 +
 .../hugegraph/loader/source/jdbc/JDBCVendor.java   |  12 +-
 .../loader/spark/HugeGraphSparkLoader.java         |   2 +-
 .../hugegraph/loader/struct/EdgeStructV1.java      |   1 +
 .../hugegraph/loader/struct/ElementStructV1.java   |   1 +
 .../hugegraph/loader/struct/GraphStructV1.java     |   9 +-
 .../hugegraph/loader/struct/VertexStructV1.java    |   1 +
 .../hugegraph/loader/task/BatchInsertTask.java     |   2 +-
 .../apache/hugegraph/loader/task/InsertTask.java   |   3 +-
 .../hugegraph/loader/task/ParseTaskBuilder.java    |   2 +-
 .../hugegraph/loader/task/SingleInsertTask.java    |   2 +-
 .../apache/hugegraph/loader/util/DataTypeUtil.java |  53 +++---
 .../org/apache/hugegraph/loader/util/DateUtil.java |   3 +-
 .../org/apache/hugegraph/loader/util/JsonUtil.java |   9 +-
 .../org/apache/hugegraph/loader/util/LoadUtil.java |   1 +
 .../apache/hugegraph/loader/util/MappingUtil.java  |   6 +-
 .../org/apache/hugegraph/loader/util/Printer.java  |   6 +-
 .../hugegraph/loader/test/functional/DBUtil.java   |   3 +-
 .../loader/test/functional/FileLoadTest.java       |   3 +-
 .../hugegraph/loader/test/functional/HDFSUtil.java |  41 +++--
 .../hugegraph/loader/test/functional/IOUtil.java   |  30 ++--
 .../loader/test/functional/JDBCLoadTest.java       |   2 +-
 .../hugegraph/loader/test/functional/LoadTest.java |  16 +-
 .../hugegraph/loader/test/unit/DateUtilTest.java   |   2 +-
 .../loader/test/unit/LoadProgressTest.java         | 104 ++++++------
 .../loader/test/unit/MappingConverterTest.java     | 177 +++++++++++----------
 .../hugegraph/loader/test/unit/UnitTestSuite.java  |  10 +-
 74 files changed, 407 insertions(+), 430 deletions(-)

diff --git a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/entity/load/LoadTask.java b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/entity/load/LoadTask.java
index 3d18892f..9a2d6d9c 100644
--- a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/entity/load/LoadTask.java
+++ b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/entity/load/LoadTask.java
@@ -182,7 +182,7 @@ public class LoadTask implements Runnable {
                     this.status = LoadStatus.FAILED;
                 }
             }
-            this.fileReadLines = this.context().newProgress().totalInputReaded();
+            this.fileReadLines = this.context().newProgress().totalInputRead();
             this.lastDuration += this.context().summary().totalTime();
             this.currDuration = 0L;
         } finally {
diff --git a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/load/LoadTaskService.java b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/load/LoadTaskService.java
index 9c87fda1..47809961 100644
--- a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/load/LoadTaskService.java
+++ b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/load/LoadTaskService.java
@@ -304,16 +304,16 @@ public class LoadTaskService {
             try {
                 if (task.getStatus().inRunning()) {
                     LoadContext context = task.context();
-                    long readLines = context.newProgress().totalInputReaded();
+                    long readLines = context.newProgress().totalInputRead();
                     if (readLines == 0L) {
                         /*
                          * When the Context is just constructed, newProgress
                          * is empty. Only after parsing is started will use
                          * oldProgress and incrementally update newProgress,
-                         * if get totalInputReaded value during this process,
+                         * if get totalInputRead value during this process,
                          * it will return 0, so need read it from oldProgress
                          */
-                        readLines = context.oldProgress().totalInputReaded();
+                        readLines = context.oldProgress().totalInputRead();
                     }
                     task.setFileReadLines(readLines);
                     task.setCurrDuration(context.summary().totalTime());
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/HugeGraphLoader.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/HugeGraphLoader.java
index fbfb3f5e..baba2a01 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/HugeGraphLoader.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/HugeGraphLoader.java
@@ -66,8 +66,8 @@ public final class HugeGraphLoader {
         try {
             loader = new HugeGraphLoader(args);
         } catch (Throwable e) {
-            Printer.printError("Failed to start loading", e);
-            return;
+            Printer.printError("Failed to start loading", LoadUtil.targetRuntimeException(e));
+            throw e;
         }
         loader.load();
     }
@@ -188,13 +188,13 @@ public final class HugeGraphLoader {
         if (this.context.options().checkVertex) {
             LOG.info("Forced to load vertices before edges since set " +
                      "option check-vertex=true");
-            SplittedInputStructs splitted = this.splitStructs(structs);
+            SplitInputStructs split = this.splitStructs(structs);
             // Load all vertex structs
-            this.loadStructs(splitted.vertexInputStructs);
+            this.loadStructs(split.vertexInputStructs);
             // Wait all vertex load tasks finished
             this.manager.waitFinished("vertex insert tasks");
             // Load all edge structs
-            this.loadStructs(splitted.edgeInputStructs);
+            this.loadStructs(split.edgeInputStructs);
         } else {
             // Load vertex and edge structs concurrent in the same input
             this.loadStructs(structs);
@@ -223,7 +223,7 @@ public final class HugeGraphLoader {
     }
 
     /**
-     * TODO: Seperate classes: ReadHandler -> ParseHandler -> InsertHandler
+     * TODO: Separate classes: ReadHandler -> ParseHandler -> InsertHandler
      * Let load task worked in pipeline mode
      */
     private void loadStruct(InputStruct struct, InputReader reader) {
@@ -231,8 +231,7 @@ public final class HugeGraphLoader {
         LoadMetrics metrics = this.context.summary().metrics(struct);
         metrics.startInFlight();
 
-        ParseTaskBuilder taskBuilder = new ParseTaskBuilder(this.context,
-                                                            struct);
+        ParseTaskBuilder taskBuilder = new ParseTaskBuilder(this.context, struct);
         final int batchSize = this.context.options().batchSize;
         List<Line> lines = new ArrayList<>(batchSize);
         for (boolean finished = false; !finished;) {
@@ -251,7 +250,7 @@ public final class HugeGraphLoader {
                 metrics.increaseReadFailure();
                 this.handleReadFailure(struct, e);
             }
-            // If readed max allowed lines, stop loading
+            // If read max allowed lines, stop loading
             boolean reachedMaxReadLines = this.reachedMaxReadLines();
             if (reachedMaxReadLines) {
                 finished = true;
@@ -300,7 +299,7 @@ public final class HugeGraphLoader {
 
     private void handleReadFailure(InputStruct struct, ReadException e) {
         LOG.error("Read {} error", struct, e);
-        this.context.occuredError();
+        this.context.occurredError();
         LoadOptions options = this.context.options();
         if (options.testMode) {
             throw e;
@@ -337,21 +336,21 @@ public final class HugeGraphLoader {
         }
     }
 
-    private SplittedInputStructs splitStructs(List<InputStruct> structs) {
-        SplittedInputStructs splitted = new SplittedInputStructs();
+    private SplitInputStructs splitStructs(List<InputStruct> structs) {
+        SplitInputStructs split = new SplitInputStructs();
         for (InputStruct struct : structs) {
             InputStruct result = struct.extractVertexStruct();
             if (result != InputStruct.EMPTY) {
-                splitted.vertexInputStructs.add(result);
+                split.vertexInputStructs.add(result);
             }
         }
         for (InputStruct struct : structs) {
             InputStruct result = struct.extractEdgeStruct();
             if (result != InputStruct.EMPTY) {
-                splitted.edgeInputStructs.add(result);
+                split.edgeInputStructs.add(result);
             }
         }
-        return splitted;
+        return split;
     }
 
     private boolean reachedMaxReadLines() {
@@ -386,12 +385,12 @@ public final class HugeGraphLoader {
         }
     }
 
-    private static class SplittedInputStructs {
+    private static class SplitInputStructs {
 
         private final List<InputStruct> vertexInputStructs;
         private final List<InputStruct> edgeInputStructs;
 
-        public SplittedInputStructs() {
+        public SplitInputStructs() {
             this.vertexInputStructs = new ArrayList<>();
             this.edgeInputStructs = new ArrayList<>();
         }
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/MappingConverter.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/MappingConverter.java
index 43f2b4cc..f79ba339 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/MappingConverter.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/MappingConverter.java
@@ -42,14 +42,14 @@ public final class MappingConverter {
         File file = FileUtils.getFile(input);
         if (!file.exists() || !file.isFile()) {
             LOG.error("The file '{}' doesn't exists or not a file", input);
-            throw new IllegalArgumentException(String.format(
-                      "The file '%s' doesn't exists or not a file", input));
+            throw new IllegalArgumentException(String.format("The file '%s' doesn't exists or " +
+                                                             "not a file", input));
         }
 
         LoadMapping mapping = LoadMapping.of(input);
         String outputPath = getOutputPath(file);
         MappingUtil.write(mapping, outputPath);
-        LOG.info("Convert mapping file successfuly, stored at {}", outputPath);
+        LOG.info("Convert mapping file successfully, stored at {}", outputPath);
     }
 
     public static String getOutputPath(File file) {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/EdgeBuilder.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/EdgeBuilder.java
index 61707905..2706a3a6 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/EdgeBuilder.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/EdgeBuilder.java
@@ -36,7 +36,9 @@ import org.apache.hugegraph.structure.schema.EdgeLabel;
 import org.apache.hugegraph.structure.schema.SchemaLabel;
 import org.apache.hugegraph.structure.schema.VertexLabel;
 import org.apache.hugegraph.util.E;
+
 import com.google.common.collect.ImmutableList;
+
 import org.apache.spark.sql.Row;
 
 public class EdgeBuilder extends ElementBuilder<Edge> {
@@ -110,7 +112,7 @@ public class EdgeBuilder extends ElementBuilder<Edge> {
         }
         return edges;
     }
-    
+
     @Override
     public List<Edge> build(Row row) {
         String[] names = row.schema().fieldNames();
@@ -125,10 +127,8 @@ public class EdgeBuilder extends ElementBuilder<Edge> {
 
         this.lastNames = names;
         EdgeKVPairs kvPairs = this.newEdgeKVPairs();
-        kvPairs.source.extractFromEdge(names, values,
-                this.vertexIdsIndex.sourceIndexes);
-        kvPairs.target.extractFromEdge(names, values,
-                this.vertexIdsIndex.targetIndexes);
+        kvPairs.source.extractFromEdge(names, values, this.vertexIdsIndex.sourceIndexes);
+        kvPairs.target.extractFromEdge(names, values, this.vertexIdsIndex.targetIndexes);
         kvPairs.extractProperties(names, values);
 
         List<Vertex> sources = kvPairs.source.buildVertices(false);
@@ -193,8 +193,8 @@ public class EdgeBuilder extends ElementBuilder<Edge> {
                             "The source/target field must contains some " +
                             "columns when id strategy is CUSTOMIZE");
         } else {
-            throw new IllegalArgumentException(
-                      "Unsupported AUTOMATIC id strategy for hugegraph-loader");
+            throw new IllegalArgumentException("Unsupported AUTOMATIC id strategy " +
+                                               "for hugegraph-loader");
         }
     }
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/ElementBuilder.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/ElementBuilder.java
index 3090b92a..627fe8a8 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/ElementBuilder.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/ElementBuilder.java
@@ -53,7 +53,9 @@ import org.apache.hugegraph.structure.schema.SchemaLabel;
 import org.apache.hugegraph.structure.schema.VertexLabel;
 import org.apache.hugegraph.util.E;
 import org.apache.hugegraph.util.LongEncoding;
+
 import com.google.common.collect.ImmutableList;
+
 import org.apache.spark.sql.Row;
 
 public abstract class ElementBuilder<GE extends GraphElement> {
@@ -166,7 +168,7 @@ public abstract class ElementBuilder<GE extends GraphElement> {
             Collection<String> missed = CollectionUtils.subtract(requiredKeys,
                                                                  keys);
             E.checkArgument(false, "All non-null property keys %s of '%s' " +
-                            "must be setted, but missed keys %s",
+                            "must be set, but missed keys %s",
                             requiredKeys, this.schemaLabel().name(), missed);
         }
     }
@@ -258,8 +260,7 @@ public abstract class ElementBuilder<GE extends GraphElement> {
                                   Object... primaryValues) {
         StringBuilder vertexId = new StringBuilder();
         StringBuilder vertexKeysId = new StringBuilder();
-        for (int i = 0; i < primaryValues.length; i++) {
-            Object value = primaryValues[i];
+        for (Object value : primaryValues) {
             String pkValue;
             if (value instanceof Number || value instanceof Date) {
                 pkValue = LongEncoding.encodeNumber(value);
@@ -391,7 +392,7 @@ public abstract class ElementBuilder<GE extends GraphElement> {
         // The idField(raw field), like: id
         private String idField;
         /*
-         * The multiple idValues(spilted and mapped)
+         * The multiple idValues(split and mapped)
          * like: A|B|C -> [1,2,3]
          */
         private List<Object> idValues;
@@ -494,7 +495,7 @@ public abstract class ElementBuilder<GE extends GraphElement> {
                 }
                 String key = mapping().mappingField(fieldName);
                 if (primaryKeys.contains(key)) {
-                    // Don't put priamry key/values into general properties
+                    // Don't put primary key/values into general properties
                     int index = primaryKeys.indexOf(key);
                     Object pkValue = mappingValue(fieldName, fieldValue);
                     this.pkValues[index] = pkValue;
@@ -566,7 +567,7 @@ public abstract class ElementBuilder<GE extends GraphElement> {
          */
         private String pkName;
         /*
-         * The primary values(splited and mapped)
+         * The primary values(split and mapped)
          * like: m|v -> [marko,vadas]
          */
         private List<Object> pkValues;
@@ -593,7 +594,7 @@ public abstract class ElementBuilder<GE extends GraphElement> {
                 }
                 String key = mapping().mappingField(fieldName);
                 if (!handledPk && primaryKeys.contains(key)) {
-                    // Don't put priamry key/values into general properties
+                    // Don't put primary key/values into general properties
                     List<Object> rawPkValues = splitField(fieldName,
                                                           fieldValue);
                     this.pkValues = rawPkValues.stream().map(rawPkValue -> {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/SchemaCache.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/SchemaCache.java
index 64cd2878..50a0fa5f 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/SchemaCache.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/SchemaCache.java
@@ -29,6 +29,7 @@ import org.apache.hugegraph.loader.exception.LoadException;
 import org.apache.hugegraph.structure.schema.EdgeLabel;
 import org.apache.hugegraph.structure.schema.PropertyKey;
 import org.apache.hugegraph.structure.schema.VertexLabel;
+
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 import com.fasterxml.jackson.annotation.JsonProperty;
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/VertexBuilder.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/VertexBuilder.java
index fde60464..2ba493fc 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/VertexBuilder.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/builder/VertexBuilder.java
@@ -103,8 +103,8 @@ public class VertexBuilder extends ElementBuilder<Vertex> {
                          this.vertexLabel.idStrategy(), name);
         } else {
             // The id strategy is automatic
-            throw new IllegalArgumentException(
-                      "Unsupported AUTOMATIC id strategy for hugegraph-loader");
+            throw new IllegalArgumentException("Unsupported AUTOMATIC id strategy for " +
+                                               "hugegraph-loader");
         }
     }
 }
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/constant/ElemType.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/constant/ElemType.java
index d928baf2..c5fcfc6f 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/constant/ElemType.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/constant/ElemType.java
@@ -25,7 +25,7 @@ public enum ElemType {
 
     EDGE("edges");
 
-    private String name;
+    private final String name;
 
     ElemType(String name) {
         this.name = name;
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/executor/LoadContext.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/executor/LoadContext.java
index f67d9907..6f14a4b9 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/executor/LoadContext.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/executor/LoadContext.java
@@ -108,7 +108,7 @@ public final class LoadContext implements Serializable {
         return this.noError;
     }
 
-    public void occuredError() {
+    public void occurredError() {
         this.noError = false;
     }
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/executor/LoadOptions.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/executor/LoadOptions.java
index 162fd157..5d92f096 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/executor/LoadOptions.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/executor/LoadOptions.java
@@ -325,9 +325,8 @@ public class LoadOptions implements Serializable {
         @Override
         public void validate(String name, String value) {
             if (!SSL_PROTOCOL.contains(value.toLowerCase())) {
-                throw new ParameterException(String.format(
-                          "Invalid --protocol '%s', valid value is %s",
-                          value, SSL_PROTOCOL));
+                throw new ParameterException(String.format("Invalid --protocol '%s', valid " +
+                                                           "value is %s", value, SSL_PROTOCOL));
             }
         }
     }
@@ -338,9 +337,9 @@ public class LoadOptions implements Serializable {
         public void validate(String name, String value) {
             File file = new File(value);
             if (!file.exists() || !file.isDirectory()) {
-                throw new ParameterException(String.format(
-                          "Ensure the directory exists and is indeed a " +
-                          "directory instead of a file: '%s'", value));
+                throw new ParameterException(String.format("Ensure the directory exists and is " +
+                                                           "indeed a directory instead of a " +
+                                                           "file: '%s'", value));
             }
         }
     }
@@ -351,9 +350,9 @@ public class LoadOptions implements Serializable {
         public void validate(String name, String value) {
             File file = new File(value);
             if (!file.exists() || !file.isFile()) {
-                throw new ParameterException(String.format(
-                          "Ensure the file exists and is indeed a file " +
-                          "instead of a directory: '%s'", value));
+                throw new ParameterException(String.format("Ensure the file exists and is " +
+                                                           "indeed a file instead of a " +
+                                                           "directory: '%s'", value));
             }
         }
     }
@@ -364,9 +363,8 @@ public class LoadOptions implements Serializable {
         public void validate(String name, String value) {
             int retry = Integer.parseInt(value);
             if (retry <= 0) {
-                throw new ParameterException(String.format(
-                          "Parameter '%s' should be positive, but got '%s'",
-                          name, value));
+                throw new ParameterException(String.format("Parameter '%s' should be positive, " +
+                                                           "but got '%s'", name, value));
             }
         }
     }
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/failure/FailLogger.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/failure/FailLogger.java
index 9c82907f..1fc89b3f 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/failure/FailLogger.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/failure/FailLogger.java
@@ -22,8 +22,6 @@ package org.apache.hugegraph.loader.failure;
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
@@ -32,6 +30,7 @@ import java.io.OutputStreamWriter;
 import java.io.Reader;
 import java.io.Writer;
 import java.nio.charset.Charset;
+import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.HashSet;
 import java.util.Set;
@@ -44,12 +43,12 @@ import org.apache.hugegraph.loader.exception.ParseException;
 import org.apache.hugegraph.loader.exception.ReadException;
 import org.apache.hugegraph.loader.executor.LoadContext;
 import org.apache.hugegraph.loader.executor.LoadOptions;
-import org.apache.hugegraph.loader.util.LoadUtil;
-import org.slf4j.Logger;
-
 import org.apache.hugegraph.loader.mapping.InputStruct;
+import org.apache.hugegraph.loader.util.LoadUtil;
 import org.apache.hugegraph.util.JsonUtil;
 import org.apache.hugegraph.util.Log;
+import org.slf4j.Logger;
+
 import com.google.common.hash.HashFunction;
 import com.google.common.hash.Hashing;
 
@@ -141,32 +140,30 @@ public final class FailLogger {
 
     private void removeDupLines() {
         Charset charset = Charset.forName(this.struct.input().charset());
-        File dedupFile = new File(this.file.getAbsolutePath() +
-                                   Constants.DEDUP_SUFFIX);
-        try (InputStream is = new FileInputStream(this.file);
+        File dedupFile = new File(this.file.getAbsolutePath() + Constants.DEDUP_SUFFIX);
+        try (InputStream is = Files.newInputStream(this.file.toPath());
              Reader ir = new InputStreamReader(is, charset);
              BufferedReader reader = new BufferedReader(ir);
              // upper is input, below is output
-             OutputStream os = new FileOutputStream(dedupFile);
+             OutputStream os = Files.newOutputStream(dedupFile.toPath());
              Writer ow = new OutputStreamWriter(os, charset);
              BufferedWriter writer = new BufferedWriter(ow)) {
-            Set<Integer> writedLines = new HashSet<>();
+            Set<Integer> wroteLines = new HashSet<>();
             HashFunction hashFunc = Hashing.murmur3_32();
-            for (String tipsLine, dataLine;
-                 (tipsLine = reader.readLine()) != null &&
-                 (dataLine = reader.readLine()) != null;) {
+            for (String tipsLine, dataLine; (tipsLine = reader.readLine()) != null &&
+                                            (dataLine = reader.readLine()) != null; ) {
                 /*
                  * Hash data line to remove duplicate lines
                  * Misjudgment may occur, but the probability is extremely low
                  */
                 int hash = hashFunc.hashString(dataLine, charset).asInt();
-                if (!writedLines.contains(hash)) {
+                if (!wroteLines.contains(hash)) {
                     writer.write(tipsLine);
                     writer.newLine();
                     writer.write(dataLine);
                     writer.newLine();
-                    // Save the hash value of writed line
-                    writedLines.add(hash);
+                    // Save the hash value of wrote line
+                    wroteLines.add(hash);
                 }
             }
         } catch (IOException e) {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphDeserialization.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphDeserialization.java
index 31dafcb9..431e45cc 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphDeserialization.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphDeserialization.java
@@ -43,8 +43,7 @@ public class HugeGraphDeserialization implements DebeziumDeserializationSchema<S
     private static final Logger LOG = Log.logger(HugeGraphDeserialization.class);
 
     @Override
-    public void deserialize(SourceRecord sourceRecord,
-                            Collector<String> collector) throws Exception {
+    public void deserialize(SourceRecord sourceRecord, Collector<String> collector) {
         ObjectMapper mapper = new ObjectMapper();
         ObjectNode result = mapper.createObjectNode();
 
@@ -77,7 +76,7 @@ public class HugeGraphDeserialization implements DebeziumDeserializationSchema<S
 
         result.set(Constants.CDC_DATA, rootNode);
         result.put(Constants.CDC_OP, op);
-        LOG.debug("Loaded data: {}", result.toString());
+        LOG.debug("Loaded data: {}", result);
         collector.collect(result.toString());
     }
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphFlinkCDCLoader.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphFlinkCDCLoader.java
index 71de57e5..2aa4b0c3 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphFlinkCDCLoader.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphFlinkCDCLoader.java
@@ -92,8 +92,8 @@ public class HugeGraphFlinkCDCLoader {
             host = uriBuilder.getHost();
             port = uriBuilder.getPort();
         } catch (URISyntaxException e) {
-            throw new IllegalArgumentException(
-                    String.format("Failed to parse url(%s) to get hostName and port", url), e);
+            throw new IllegalArgumentException(String.format("Failed to parse url(%s) to get " +
+                                                             "hostName and port", url), e);
         }
         return MySqlSource.<String>builder()
                           .hostname(host)
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphSinkFunction.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphSinkFunction.java
index 7e14e310..b5d4740c 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphSinkFunction.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphSinkFunction.java
@@ -48,7 +48,7 @@ public class HugeGraphSinkFunction<T> extends RichSinkFunction<T>
     }
 
     @Override
-    public void invoke(T value, Context context) throws Exception {
+    public void invoke(T value, Context context) {
         this.outputFormat.writeRecord(value);
     }
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/EdgeMapping.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/EdgeMapping.java
index c9340b9c..3277ecdc 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/EdgeMapping.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/EdgeMapping.java
@@ -23,6 +23,7 @@ import java.util.List;
 
 import org.apache.hugegraph.loader.constant.ElemType;
 import org.apache.hugegraph.util.E;
+
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/ElementMapping.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/ElementMapping.java
index 66108703..f41758b9 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/ElementMapping.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/ElementMapping.java
@@ -33,6 +33,7 @@ import org.apache.hugegraph.loader.constant.ElemType;
 import org.apache.hugegraph.loader.source.InputSource;
 import org.apache.hugegraph.structure.graph.UpdateStrategy;
 import org.apache.hugegraph.util.E;
+
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonPropertyOrder;
 import com.google.common.collect.ImmutableSet;
@@ -99,23 +100,23 @@ public abstract class ElementMapping implements Checkable, Serializable {
         }
         List<String> header = Arrays.asList(source.header());
         if (!this.selectedFields.isEmpty()) {
-            E.checkArgument(header.containsAll(this.selectedFields),
+            E.checkArgument(new HashSet<>(header).containsAll(this.selectedFields),
                             "The all keys %s of selected must be existed " +
                             "in header %s", this.selectedFields, header);
         }
         if (!this.ignoredFields.isEmpty()) {
-            E.checkArgument(header.containsAll(this.ignoredFields),
+            E.checkArgument(new HashSet<>(header).containsAll(this.ignoredFields),
                             "The all keys %s of ignored must be existed " +
                             "in header %s", this.ignoredFields, header);
         }
         if (!this.mappingFields.isEmpty()) {
-            E.checkArgument(header.containsAll(this.mappingFields.keySet()),
+            E.checkArgument(new HashSet<>(header).containsAll(this.mappingFields.keySet()),
                             "The all keys %s of field_mapping must be " +
                             "existed in header",
                             this.mappingFields.keySet(), header);
         }
         if (!this.mappingValues.isEmpty()) {
-            E.checkArgument(header.containsAll(this.mappingValues.keySet()),
+            E.checkArgument(new HashSet<>(header).containsAll(this.mappingValues.keySet()),
                             "The all keys %s of value_mapping must be " +
                             "existed in header",
                             this.mappingValues.keySet(), header);
@@ -178,7 +179,7 @@ public abstract class ElementMapping implements Checkable, Serializable {
     }
 
     public long batchSize() {
-       return this.batchSize;
+        return this.batchSize;
     }
 
     public Set<String> selectedFields() {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/InputStruct.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/InputStruct.java
index b545fc01..e25bd292 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/InputStruct.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/InputStruct.java
@@ -28,6 +28,7 @@ import org.apache.commons.lang3.StringUtils;
 import org.apache.hugegraph.loader.constant.Checkable;
 import org.apache.hugegraph.loader.source.InputSource;
 import org.apache.hugegraph.util.E;
+
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonPropertyOrder;
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/LoadMapping.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/LoadMapping.java
index 7a403d25..c21e7b77 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/LoadMapping.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/LoadMapping.java
@@ -42,6 +42,7 @@ import org.apache.hugegraph.loader.util.LoadUtil;
 import org.apache.hugegraph.loader.util.MappingUtil;
 import org.apache.hugegraph.loader.source.file.FileSource;
 import org.apache.hugegraph.util.E;
+
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonPropertyOrder;
@@ -142,7 +143,7 @@ public class LoadMapping implements Checkable {
                                             failureFile.headerFile);
                 }
                 List<String> header = JsonUtil.convertList(json, String.class);
-                source.header(header.toArray(new String[] {}));
+                source.header(header.toArray(new String[]{}));
             }
             // Set failure data path
             source.path(failureFile.dataFile.getAbsolutePath());
@@ -187,8 +188,8 @@ public class LoadMapping implements Checkable {
                 return struct;
             }
         }
-        throw new IllegalArgumentException(String.format(
-                  "There is no input struct with id '%s'", id));
+        throw new IllegalArgumentException(String.format("There is no input struct with id '%s'",
+                                                         id));
     }
 
     private static class FailureFile {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/VertexMapping.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/VertexMapping.java
index 5b12d3f2..ef0f44cd 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/VertexMapping.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/mapping/VertexMapping.java
@@ -20,6 +20,7 @@
 package org.apache.hugegraph.loader.mapping;
 
 import org.apache.hugegraph.loader.constant.ElemType;
+
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/LoadDistributeMetrics.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/LoadDistributeMetrics.java
index 232fdeac..4981b705 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/LoadDistributeMetrics.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/LoadDistributeMetrics.java
@@ -35,8 +35,8 @@ import java.util.Map;
 public final class LoadDistributeMetrics implements Serializable {
 
     private final InputStruct struct;
-    private   Map<String, Metrics> vertexDisMetrics;
-    private   Map<String, Metrics> edgeDisMetrics;
+    private final Map<String, Metrics> vertexDisMetrics;
+    private final Map<String, Metrics> edgeDisMetrics;
 
     public LoadDistributeMetrics(InputStruct struct) {
         this.struct = struct;
@@ -54,16 +54,20 @@ public final class LoadDistributeMetrics implements Serializable {
         for (VertexMapping mapping : this.struct.vertices()) {
             Metrics metrics = this.vertexDisMetrics.get(mapping.label());
             metrics.insertSuccess = sc.longAccumulator(mapping.label() +
-                    Constants.UNDERLINE_STR + Constants.LOAD_DATA_INSERT_SUFFIX);
+                                                       Constants.UNDERLINE_STR +
+                                                       Constants.LOAD_DATA_INSERT_SUFFIX);
             metrics.parseSuccess = sc.longAccumulator(mapping.label() +
-                    Constants.UNDERLINE_STR + Constants.LOAD_DATA_PARSE_SUFFIX);
+                                                      Constants.UNDERLINE_STR +
+                                                      Constants.LOAD_DATA_PARSE_SUFFIX);
         }
         for (EdgeMapping mapping : this.struct.edges()) {
             Metrics metrics = this.edgeDisMetrics.get(mapping.label());
             metrics.insertSuccess = sc.longAccumulator(mapping.label() +
-                    Constants.UNDERLINE_STR + Constants.LOAD_DATA_INSERT_SUFFIX);
+                                                       Constants.UNDERLINE_STR +
+                                                       Constants.LOAD_DATA_INSERT_SUFFIX);
             metrics.parseSuccess = sc.longAccumulator(mapping.label() +
-                    Constants.UNDERLINE_STR + Constants.LOAD_DATA_PARSE_SUFFIX);
+                                                      Constants.UNDERLINE_STR +
+                                                      Constants.LOAD_DATA_PARSE_SUFFIX);
         }
     }
 
@@ -100,7 +104,7 @@ public final class LoadDistributeMetrics implements Serializable {
     }
 
     public Long readVertexInsertSuccess() {
-        Long totalCnt = 0L;
+        long totalCnt = 0L;
         Collection<Metrics> values = vertexDisMetrics.values();
         for (Metrics metrics : values) {
             totalCnt += metrics.insertSuccess();
@@ -109,7 +113,7 @@ public final class LoadDistributeMetrics implements Serializable {
     }
 
     public Long readEdgeInsertSuccess() {
-        Long totalCnt = 0L;
+        long totalCnt = 0L;
         Collection<Metrics> values = edgeDisMetrics.values();
         for (Metrics metrics : values) {
             totalCnt += metrics.insertSuccess();
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/LoadSummary.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/LoadSummary.java
index b2349d99..78d322de 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/LoadSummary.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/LoadSummary.java
@@ -31,7 +31,6 @@ import org.apache.hugegraph.loader.constant.ElemType;
 import org.apache.hugegraph.loader.mapping.InputStruct;
 import org.apache.hugegraph.loader.mapping.LoadMapping;
 import org.apache.hugegraph.util.InsertionOrderUtil;
-;
 
 public final class LoadSummary {
 
@@ -126,14 +125,14 @@ public final class LoadSummary {
 
     public void addTimeRange(ElemType type, long start, long end) {
         RangesTimer timer = type.isVertex() ? this.vertexRangesTimer :
-                                              this.edgeRangesTimer;
+                            this.edgeRangesTimer;
         timer.addTimeRange(start, end);
         this.loadRangesTimer.addTimeRange(start, end);
     }
 
     public void calculateTotalTime(ElemType type) {
         RangesTimer timer = type.isVertex() ? this.vertexRangesTimer :
-                                              this.edgeRangesTimer;
+                            this.edgeRangesTimer;
         AtomicLong elemTime = type.isVertex() ? this.vertexTime : this.edgeTime;
         elemTime.set(timer.totalTime());
         loadTime.set(this.loadRangesTimer.totalTime());
@@ -168,7 +167,7 @@ public final class LoadSummary {
     }
 
     public long loadRate(ElemType type) {
-        // Ensure vetex time and edge time has been set
+        // Ensure vertex time and edge time has been set
         this.calculateTotalTime(type);
 
         boolean isVertex = type.isVertex();
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/RangesTimer.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/RangesTimer.java
index d1855ff9..62345811 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/RangesTimer.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/metrics/RangesTimer.java
@@ -39,7 +39,7 @@ import java.util.List;
  * occupancy   |========|  |=====|
  *                 3     +    2
  * </pre>
- *
+ * <p>
  * It's thread safe
  */
 public class RangesTimer {
@@ -58,7 +58,7 @@ public class RangesTimer {
 
     public synchronized long totalTime() {
         if (!this.ranges.isEmpty()) {
-            long incrTime = this.caculate();
+            long incrTime = this.calculate();
             this.totalTime += incrTime;
             this.ranges.clear();
         }
@@ -67,14 +67,14 @@ public class RangesTimer {
 
     public synchronized void addTimeRange(long start, long end) {
         if (this.ranges.size() >= this.capacity) {
-            long incrTime = this.caculate();
+            long incrTime = this.calculate();
             this.totalTime += incrTime;
             this.ranges.clear();
         }
         this.ranges.add(new TimeRange(start, end));
     }
 
-    private long caculate() {
+    private long calculate() {
         assert !this.ranges.isEmpty();
         this.ranges.sort((o1, o2) -> (int) (o1.start() - o2.start()));
         long time = 0L;
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/parser/TextLineParser.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/parser/TextLineParser.java
index 108ea01e..546283e3 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/parser/TextLineParser.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/parser/TextLineParser.java
@@ -47,9 +47,9 @@ public class TextLineParser implements LineParser {
             int extra = columns.length - header.length;
             if (!this.tailColumnEmpty(columns, extra)) {
                 throw new ReadException(rawLine,
-                          "The column length '%s' doesn't match with " +
-                          "header length '%s' on: %s",
-                          columns.length, header.length, rawLine);
+                                        "The column length '%s' doesn't match with " +
+                                        "header length '%s' on: %s",
+                                        columns.length, header.length, rawLine);
             }
             String[] subColumns = new String[header.length];
             System.arraycopy(columns, 0, subColumns, 0, header.length);
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/progress/InputProgress.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/progress/InputProgress.java
index 37829b5f..8d97f2ea 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/progress/InputProgress.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/progress/InputProgress.java
@@ -23,7 +23,7 @@ import java.util.Set;
 
 import org.apache.hugegraph.loader.mapping.InputStruct;
 import org.apache.hugegraph.loader.source.SourceType;
-import org.apache.hugegraph.util.InsertionOrderUtil;;
+import org.apache.hugegraph.util.InsertionOrderUtil;
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/progress/LoadProgress.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/progress/LoadProgress.java
index 0de17411..9ad611a1 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/progress/LoadProgress.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/progress/LoadProgress.java
@@ -39,6 +39,7 @@ import org.apache.hugegraph.loader.executor.LoadContext;
 import org.apache.hugegraph.loader.executor.LoadOptions;
 import org.apache.hugegraph.loader.mapping.InputStruct;
 import org.apache.hugegraph.util.E;
+
 import com.fasterxml.jackson.annotation.JsonProperty;
 
 /**
@@ -46,13 +47,13 @@ import com.fasterxml.jackson.annotation.JsonProperty;
  * continue loading when the last work was dropped out halfway.
  * The LoadProgress will only be operated by a single thread.
  */
-public final class LoadProgress  {
+public final class LoadProgress {
 
-    @JsonProperty("vertex_propgress")
+    @JsonProperty("vertex_progress")
     private long vertexLoaded;
-    @JsonProperty("edge_propgress")
+    @JsonProperty("edge_progress")
     private long edgeLoaded;
-    @JsonProperty("input_propgress")
+    @JsonProperty("input_progress")
     private final Map<String, InputProgress> inputProgress;
 
     public LoadProgress() {
@@ -81,7 +82,7 @@ public final class LoadProgress  {
         return this.inputProgress;
     }
 
-    public long totalInputReaded() {
+    public long totalInputRead() {
         long count = 0L;
         for (InputProgress inputProgress : this.inputProgress.values()) {
             Set<InputItemProgress> itemProgresses = inputProgress.loadedItems();
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/InputReader.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/InputReader.java
index 56b22f88..367d7cae 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/InputReader.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/InputReader.java
@@ -54,8 +54,8 @@ public interface InputReader extends AutoCloseableIterator<Line> {
             case JDBC:
                 return new JDBCReader((JDBCSource) source);
             default:
-                throw new AssertionError(String.format(
-                          "Unsupported input source '%s'", source.type()));
+                throw new AssertionError(String.format("Unsupported input source '%s'",
+                                                       source.type()));
         }
     }
 }
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/FileLineFetcher.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/FileLineFetcher.java
index 7ed83cee..d6815e3d 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/FileLineFetcher.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/FileLineFetcher.java
@@ -234,7 +234,7 @@ public class FileLineFetcher extends LineFetcher {
 
     private static BufferedReader createBufferedReader(InputStream stream,
                                                        FileSource source)
-                                                       throws Exception {
+            throws Exception {
         E.checkNotNull(stream, "stream");
         try {
             Reader csr = createCompressReader(stream, source);
@@ -251,7 +251,7 @@ public class FileLineFetcher extends LineFetcher {
 
     private static Reader createCompressReader(InputStream stream,
                                                FileSource source)
-                                               throws Exception {
+            throws Exception {
         Compression compression = source.compression();
         String charset = source.charset();
         switch (compression) {
@@ -259,10 +259,9 @@ public class FileLineFetcher extends LineFetcher {
                 return new InputStreamReader(stream, charset);
             case SNAPPY_RAW:
                 Configuration config = new Configuration();
-                CompressionCodec codec = ReflectionUtils.newInstance(
-                                         SnappyCodec.class, config);
+                CompressionCodec codec = ReflectionUtils.newInstance(SnappyCodec.class, config);
                 CompressionInputStream sis = codec.createInputStream(stream,
-                                             codec.createDecompressor());
+                                                                     codec.createDecompressor());
                 return new InputStreamReader(sis, charset);
             case GZIP:
             case BZ2:
@@ -274,8 +273,8 @@ public class FileLineFetcher extends LineFetcher {
             case LZ4_BLOCK:
             case LZ4_FRAMED:
                 CompressorStreamFactory factory = new CompressorStreamFactory();
-                CompressorInputStream cis = factory.createCompressorInputStream(
-                                            compression.string(), stream);
+                CompressorInputStream cis =
+                        factory.createCompressorInputStream(compression.string(), stream);
                 return new InputStreamReader(cis, charset);
             default:
                 throw new LoadException("Unsupported compression format '%s'",
@@ -293,9 +292,8 @@ public class FileLineFetcher extends LineFetcher {
             case JSON:
                 return new JsonLineParser();
             default:
-                throw new AssertionError(String.format(
-                          "Unsupported file format '%s' of source '%s'",
-                          format, source));
+                throw new AssertionError(String.format("Unsupported file format '%s' of " +
+                                                       "source '%s'", format, source));
         }
     }
 }
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/FileReader.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/FileReader.java
index 6d71b276..cf5b0bc5 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/FileReader.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/FileReader.java
@@ -66,8 +66,7 @@ public abstract class FileReader extends AbstractReader {
     protected abstract FileLineFetcher createLineFetcher();
 
     @Override
-    public void init(LoadContext context, InputStruct struct)
-                     throws InitException {
+    public void init(LoadContext context, InputStruct struct) throws InitException {
         this.progress(context, struct);
 
         List<Readable> readableList;
@@ -181,10 +180,10 @@ public abstract class FileReader extends AbstractReader {
     }
 
     private LoadStatus checkLastLoadStatus(Readable readable) {
-        // NOTE: calculate check sum is a bit time consuming
+        // NOTE: calculate check sum is a bit time-consuming
         InputItemProgress input = readable.inputItemProgress();
         InputItemProgress loaded = this.oldProgress.matchLoadedItem(input);
-        // The file has been loaded before and it is not changed
+        // The file has been loaded before, and it is not changed
         if (loaded != null) {
             this.newProgress.addLoadedItem(loaded);
             return LoadStatus.LOADED;
@@ -192,7 +191,7 @@ public abstract class FileReader extends AbstractReader {
 
         InputItemProgress loading = this.oldProgress.matchLoadingItem(input);
         if (loading != null) {
-            // The file has been loaded half before and it is not changed
+            // The file has been loaded half before, and it is not changed
             this.newProgress.addLoadingItem(loading);
             return LoadStatus.LOADED_HALF;
         } else {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/LocalFileReader.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/LocalFileReader.java
index e773ce74..e53190be 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/LocalFileReader.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/LocalFileReader.java
@@ -23,6 +23,7 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -52,9 +53,8 @@ public class LocalFileReader extends FileReader {
         List<Readable> files = new ArrayList<>();
         if (file.isFile()) {
             if (!filter.reserved(file.getName())) {
-                throw new LoadException(
-                          "Please check file name and extensions, ensure " +
-                          "that at least one file is available for reading");
+                throw new LoadException("Please check file name and extensions, ensure that " +
+                                        "at least one file is available for reading");
             }
             files.add(new LocalFile(file));
         } else {
@@ -119,7 +119,7 @@ public class LocalFileReader extends FileReader {
 
         @Override
         public InputStream open() throws IOException {
-            return new FileInputStream(this.file);
+            return Files.newInputStream(this.file.toPath());
         }
 
         @Override
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/OrcFileLineFetcher.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/OrcFileLineFetcher.java
index 038991cb..10101fb4 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/OrcFileLineFetcher.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/OrcFileLineFetcher.java
@@ -88,8 +88,7 @@ public class OrcFileLineFetcher extends FileLineFetcher {
         this.openReader(readable);
         StructObjectInspector inspector;
         try {
-            inspector = (StructObjectInspector)
-                        this.reader.getObjectInspector();
+            inspector = (StructObjectInspector) this.reader.getObjectInspector();
             return this.parseHeader(inspector);
         } finally {
             try {
@@ -107,8 +106,7 @@ public class OrcFileLineFetcher extends FileLineFetcher {
             OrcFile.ReaderOptions options = OrcFile.readerOptions(this.conf);
             this.reader = OrcFile.createReader(path, options);
             this.recordReader = this.reader.rows();
-            this.inspector = (StructObjectInspector) this.reader
-                                                         .getObjectInspector();
+            this.inspector = (StructObjectInspector) this.reader.getObjectInspector();
             this.row = null;
         } catch (IOException e) {
             throw new LoadException("Failed to open orc reader for '%s'",
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/hdfs/HDFSFileReader.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/hdfs/HDFSFileReader.java
index b161328c..a952f785 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/hdfs/HDFSFileReader.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/hdfs/HDFSFileReader.java
@@ -70,9 +70,8 @@ public class HDFSFileReader extends FileReader {
 
     private void enableKerberos(HDFSSource source) throws IOException {
         KerberosConfig kerberosConfig = source.kerberosConfig();
-        if (kerberosConfig != null && kerberosConfig.enable() ) {
-            System.setProperty("java.security.krb5.conf",
-                               kerberosConfig.krb5Conf());
+        if (kerberosConfig != null && kerberosConfig.enable()) {
+            System.setProperty("java.security.krb5.conf", kerberosConfig.krb5Conf());
             UserGroupInformation.setConfiguration(this.conf);
             UserGroupInformation.loginUserFromKeytab(kerberosConfig.principal(),
                                                      kerberosConfig.keyTab());
@@ -106,9 +105,8 @@ public class HDFSFileReader extends FileReader {
         List<Readable> paths = new ArrayList<>();
         if (this.hdfs.isFile(path)) {
             if (!filter.reserved(path.getName())) {
-                throw new LoadException(
-                          "Please check path name and extensions, ensure " +
-                          "that at least one path is available for reading");
+                throw new LoadException("Please check path name and extensions, ensure that " +
+                                        "at least one path is available for reading");
             }
             paths.add(new HDFSFile(this.hdfs, path));
         } else {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/JDBCReader.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/JDBCReader.java
index aabe69b1..8303be14 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/JDBCReader.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/JDBCReader.java
@@ -56,8 +56,7 @@ public class JDBCReader extends AbstractReader {
     }
 
     @Override
-    public void init(LoadContext context, InputStruct struct)
-                     throws InitException {
+    public void init(LoadContext context, InputStruct struct) throws InitException {
         this.progress(context, struct);
         if (!this.source.existsCustomSQL()) {
             try {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/JDBCUtil.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/JDBCUtil.java
index 2a87e52e..2ddb5a63 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/JDBCUtil.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/JDBCUtil.java
@@ -106,22 +106,12 @@ public final class JDBCUtil {
         for (int i = 0; i < length; ++i) {
             char c = sql.charAt(i);
             switch (c) {
-                case '\u0000':
-                    needsEscape = true;
-                    break;
                 case '\n':
-                    needsEscape = true;
-                    break;
                 case '\r':
-                    needsEscape = true;
-                    break;
-                case '\u001a':
-                    needsEscape = true;
-                    break;
                 case '\'':
-                    needsEscape = true;
-                    break;
                 case '\\':
+                case '\u001a':
+                case '\u0000':
                     needsEscape = true;
                     break;
                 default:
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/RowFetcher.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/RowFetcher.java
index d4bab3df..24ac52b6 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/RowFetcher.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/jdbc/RowFetcher.java
@@ -91,7 +91,7 @@ public class RowFetcher {
             throw e;
         }
         E.checkArgument(ArrayUtils.isNotEmpty(this.columns),
-                        "The colmuns of the table '%s' shouldn't be empty",
+                        "The columns of the table '%s' shouldn't be empty",
                         this.source.table());
         return this.columns;
     }
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/DeserializeException.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/DeserializeException.java
index 1d649c2b..dc008e3f 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/DeserializeException.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/DeserializeException.java
@@ -22,9 +22,10 @@ package org.apache.hugegraph.loader.serializer;
 import java.util.Arrays;
 
 import org.apache.hugegraph.loader.exception.LoadException;
+
 import com.fasterxml.jackson.databind.node.JsonNodeType;
 
-public class DeserializeException extends LoadException  {
+public class DeserializeException extends LoadException {
 
     private static final long serialVersionUID = -7837901607110262081L;
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/InputProgressDeser.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/InputProgressDeser.java
index eb11e24b..33cd1f68 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/InputProgressDeser.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/InputProgressDeser.java
@@ -27,6 +27,7 @@ import org.apache.hugegraph.loader.progress.InputItemProgress;
 import org.apache.hugegraph.loader.progress.InputProgress;
 import org.apache.hugegraph.loader.progress.FileItemProgress;
 import org.apache.hugegraph.loader.source.SourceType;
+
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.databind.DeserializationContext;
 import com.fasterxml.jackson.databind.JsonDeserializer;
@@ -42,8 +43,7 @@ public class InputProgressDeser extends JsonDeserializer<InputProgress> {
 
     @Override
     public InputProgress deserialize(JsonParser parser,
-                                     DeserializationContext context)
-                                     throws IOException {
+                                     DeserializationContext context) throws IOException {
         JsonNode node = parser.getCodec().readTree(parser);
         return readInputProgress(node);
     }
@@ -64,15 +64,12 @@ public class InputProgressDeser extends JsonDeserializer<InputProgress> {
             case FILE:
             case HDFS:
                 loadedItems = (Set<InputItemProgress>) (Object)
-                              JsonUtil.convertSet(loadedItemsNode,
-                                                  FileItemProgress.class);
-                loadingItem = JsonUtil.convert(loadingItemNode,
-                                               FileItemProgress.class);
+                              JsonUtil.convertSet(loadedItemsNode, FileItemProgress.class);
+                loadingItem = JsonUtil.convert(loadingItemNode, FileItemProgress.class);
                 break;
             case JDBC:
             default:
-                throw new AssertionError(String.format(
-                          "Unsupported input source '%s'", type));
+                throw new AssertionError(String.format("Unsupported input source '%s'", type));
         }
         return new InputProgress(sourceType, loadedItems, loadingItem);
     }
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/InputSourceDeser.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/InputSourceDeser.java
index 6e8ab253..c3b43fb6 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/InputSourceDeser.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/serializer/InputSourceDeser.java
@@ -27,6 +27,7 @@ import org.apache.hugegraph.loader.source.SourceType;
 import org.apache.hugegraph.loader.source.file.FileSource;
 import org.apache.hugegraph.loader.source.hdfs.HDFSSource;
 import org.apache.hugegraph.loader.source.jdbc.JDBCSource;
+
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.databind.DeserializationContext;
 import com.fasterxml.jackson.databind.JsonDeserializer;
@@ -42,8 +43,7 @@ public class InputSourceDeser extends JsonDeserializer<InputSource> {
 
     @Override
     public InputSource deserialize(JsonParser parser,
-                                   DeserializationContext context)
-                                   throws IOException {
+                                   DeserializationContext context) throws IOException {
         JsonNode node = parser.getCodec().readTree(parser);
         return readInputSource(node);
     }
@@ -69,8 +69,7 @@ public class InputSourceDeser extends JsonDeserializer<InputSource> {
                 objectNode.replace(FIELD_VENDOR, vendorNode);
                 return JsonUtil.convert(node, JDBCSource.class);
             default:
-                throw new AssertionError(String.format(
-                          "Unsupported input source '%s'", type));
+                throw new AssertionError(String.format("Unsupported input source '%s'", type));
         }
     }
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/AbstractSource.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/AbstractSource.java
index c71935b2..34243498 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/AbstractSource.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/AbstractSource.java
@@ -28,6 +28,7 @@ import org.apache.hugegraph.loader.constant.Constants;
 import org.apache.hugegraph.loader.source.file.ListFormat;
 import org.apache.hugegraph.util.CollectionUtil;
 import org.apache.hugegraph.util.E;
+
 import com.fasterxml.jackson.annotation.JsonProperty;
 
 public abstract class AbstractSource implements InputSource, Serializable {
@@ -51,8 +52,7 @@ public abstract class AbstractSource implements InputSource, Serializable {
             E.checkArgument(this.header.length > 0,
                             "The header can't be empty if " +
                             "it has been customized");
-            E.checkArgument(CollectionUtil.allUnique(
-                            Arrays.asList(this.header)),
+            E.checkArgument(CollectionUtil.allUnique(Arrays.asList(this.header)),
                             "The header can't contain duplicate columns, " +
                             "but got %s", Arrays.toString(this.header));
         }
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/InputSource.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/InputSource.java
index 29334157..631537a9 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/InputSource.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/InputSource.java
@@ -21,6 +21,7 @@ package org.apache.hugegraph.loader.source;
 
 import org.apache.hugegraph.loader.constant.Checkable;
 import org.apache.hugegraph.loader.source.file.FileSource;
+
 import com.fasterxml.jackson.annotation.JsonProperty;
 
 public interface InputSource extends Checkable {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/Compression.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/Compression.java
index 39ae693f..ed3e1b30 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/Compression.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/Compression.java
@@ -47,9 +47,9 @@ public enum Compression {
 
     PARQUET("parquet");
 
-    private String name;
+    private final String name;
 
-    private Compression(String name) {
+    Compression(String name) {
         this.name = name;
     }
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/FileFormat.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/FileFormat.java
index 23aad249..901212b8 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/FileFormat.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/FileFormat.java
@@ -29,7 +29,7 @@ public enum FileFormat {
 
     JSON(null);
 
-    private String delimiter;
+    private final String delimiter;
 
     FileFormat(String delimiter) {
         this.delimiter = delimiter;
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/FileSource.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/FileSource.java
index e102bda4..732aab45 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/FileSource.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/FileSource.java
@@ -24,6 +24,7 @@ import org.apache.hugegraph.loader.util.DateUtil;
 import org.apache.hugegraph.loader.source.AbstractSource;
 import org.apache.hugegraph.loader.source.SourceType;
 import org.apache.hugegraph.util.E;
+
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonPropertyOrder;
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/ListFormat.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/ListFormat.java
index 7fae01be..2e598ad2 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/ListFormat.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/ListFormat.java
@@ -23,6 +23,7 @@ import java.util.Collections;
 import java.util.Set;
 
 import org.apache.hugegraph.loader.constant.Constants;
+
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.collect.Sets;
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/SkippedLine.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/SkippedLine.java
index d5b1627d..a1b25e30 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/SkippedLine.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/file/SkippedLine.java
@@ -24,6 +24,7 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.hugegraph.loader.constant.Constants;
+
 import com.fasterxml.jackson.annotation.JsonProperty;
 
 public class SkippedLine implements Serializable {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/hdfs/HDFSSource.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/hdfs/HDFSSource.java
index b7354000..5a556dd6 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/hdfs/HDFSSource.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/hdfs/HDFSSource.java
@@ -28,6 +28,7 @@ import org.apache.commons.lang3.StringUtils;
 import org.apache.hugegraph.loader.source.SourceType;
 import org.apache.hugegraph.loader.source.file.FileSource;
 import org.apache.hugegraph.util.E;
+
 import com.fasterxml.jackson.annotation.JsonProperty;
 
 public class HDFSSource extends FileSource {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/hdfs/KerberosConfig.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/hdfs/KerberosConfig.java
index ea00016c..d94c3183 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/hdfs/KerberosConfig.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/hdfs/KerberosConfig.java
@@ -27,6 +27,7 @@ import org.apache.commons.lang3.StringUtils;
 
 import org.apache.hugegraph.loader.constant.Checkable;
 import org.apache.hugegraph.util.E;
+
 import com.fasterxml.jackson.annotation.JsonProperty;
 
 public class KerberosConfig implements Checkable {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/jdbc/JDBCSource.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/jdbc/JDBCSource.java
index 7f323d8f..3ffff0a7 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/jdbc/JDBCSource.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/jdbc/JDBCSource.java
@@ -23,6 +23,7 @@ import org.apache.hugegraph.loader.source.AbstractSource;
 import org.apache.hugegraph.loader.source.SourceType;
 import org.apache.hugegraph.loader.source.file.FileSource;
 import org.apache.hugegraph.util.E;
+
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonPropertyOrder;
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/jdbc/JDBCVendor.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/jdbc/JDBCVendor.java
index 50d607be..d694c93b 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/jdbc/JDBCVendor.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/source/jdbc/JDBCVendor.java
@@ -32,7 +32,6 @@ import org.apache.hugegraph.util.E;
 public enum JDBCVendor {
 
     MYSQL {
-
         @Override
         public String defaultDriver() {
             return "com.mysql.cj.jdbc.Driver";
@@ -85,7 +84,6 @@ public enum JDBCVendor {
     },
 
     POSTGRESQL {
-
         @Override
         public String defaultDriver() {
             return "org.postgresql.Driver";
@@ -129,7 +127,6 @@ public enum JDBCVendor {
     },
 
     ORACLE {
-
         @Override
         public String defaultDriver() {
             return "oracle.jdbc.driver.OracleDriver";
@@ -152,7 +149,7 @@ public enum JDBCVendor {
         }
 
         /**
-         * NOTE: don't add an semicolon(;) at the end of oracle sql
+         * NOTE: don't add a semicolon(;) at the end of oracle sql
          */
         @Override
         public String buildGetHeaderSql(JDBCSource source) {
@@ -200,7 +197,6 @@ public enum JDBCVendor {
     },
 
     SQLSERVER {
-
         @Override
         public String defaultDriver() {
             return "com.microsoft.sqlserver.jdbc.SQLServerDriver";
@@ -312,7 +308,7 @@ public enum JDBCVendor {
                   .setParameter("rewriteBatchedStatements", "true")
                   .setParameter("useServerPrepStmts", "false")
                   .setParameter("autoReconnect", "true");
-        return JDBC_PREFIX + uriBuilder.toString();
+        return JDBC_PREFIX + uriBuilder;
     }
 
     public abstract String buildGetHeaderSql(JDBCSource source);
@@ -360,8 +356,8 @@ public enum JDBCVendor {
     }
 
     /**
-     * For database which unsupport to select by where (a, b, c) >= (va, vb, vc)
-     * (a, b, c) >= (va, vb, vc) will be convert as follow:
+     * For database which unsupported to select by where (a, b, c) >= (va, vb, vc)
+     * (a, b, c) >= (va, vb, vc) will be convert as follows:
      * ("a" = va AND "b" = vb AND "c" >= vc)
      * OR
      * ("a" = va AND "b" > vb)
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/spark/HugeGraphSparkLoader.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/spark/HugeGraphSparkLoader.java
index e1dfee5c..e4293f5d 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/spark/HugeGraphSparkLoader.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/spark/HugeGraphSparkLoader.java
@@ -156,7 +156,7 @@ public class HugeGraphSparkLoader implements Serializable {
         }
         Long totalInsertSuccessCnt = totalInsertSuccess.value();
         LOG.info("\n ------------The data load task is complete-------------------\n" +
-                 "\n  insertSuccesscnt:\t {} \n ---------------------------------------------\n",
+                 "\n insertSuccessCnt:\t {} \n ---------------------------------------------\n",
                  totalInsertSuccessCnt);
 
         sc.stop();
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/EdgeStructV1.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/EdgeStructV1.java
index 98e5d18a..c0e1aaff 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/EdgeStructV1.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/EdgeStructV1.java
@@ -23,6 +23,7 @@ import java.util.List;
 
 import org.apache.hugegraph.loader.constant.ElemType;
 import org.apache.hugegraph.util.E;
+
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/ElementStructV1.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/ElementStructV1.java
index 017e9141..671c484c 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/ElementStructV1.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/ElementStructV1.java
@@ -33,6 +33,7 @@ import org.apache.hugegraph.loader.source.InputSource;
 import org.apache.hugegraph.structure.graph.UpdateStrategy;
 import org.apache.hugegraph.util.E;
 import org.apache.hugegraph.util.HashUtil;
+
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.collect.ImmutableSet;
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/GraphStructV1.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/GraphStructV1.java
index a12063cf..b71fc124 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/GraphStructV1.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/GraphStructV1.java
@@ -40,6 +40,7 @@ import org.slf4j.Logger;
 
 import org.apache.hugegraph.util.E;
 import org.apache.hugegraph.util.Log;
+
 import com.fasterxml.jackson.annotation.JsonProperty;
 
 /**
@@ -74,9 +75,8 @@ public class GraphStructV1 implements Checkable {
             struct.check();
             return struct;
         } catch (IOException | IllegalArgumentException e) {
-            throw new LoadException(
-                      "Failed to parse graph mapping description file '%s'",
-                      e, options.file);
+            throw new LoadException("Failed to parse graph mapping description file '%s'",
+                                    e, options.file);
         }
     }
 
@@ -110,8 +110,7 @@ public class GraphStructV1 implements Checkable {
         return (List<ES>) ListUtils.union(this.vertexStructs, this.edgeStructs);
     }
 
-    private <ES extends ElementStructV1> void checkNoSameStruct(
-                                              List<ES> structs) {
+    private <ES extends ElementStructV1> void checkNoSameStruct(List<ES> structs) {
         Set<String> uniqueKeys = structs.stream()
                                         .map(ElementStructV1::uniqueKey)
                                         .collect(Collectors.toSet());
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/VertexStructV1.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/VertexStructV1.java
index d154642b..1f853a9e 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/VertexStructV1.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/struct/VertexStructV1.java
@@ -20,6 +20,7 @@
 package org.apache.hugegraph.loader.struct;
 
 import org.apache.hugegraph.loader.constant.ElemType;
+
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/BatchInsertTask.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/BatchInsertTask.java
index 77b95796..b744be51 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/BatchInsertTask.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/BatchInsertTask.java
@@ -79,7 +79,7 @@ public class BatchInsertTask extends InsertTask {
             }
         } while (retryCount > 0 && retryCount <= this.options().retryTimes);
 
-        // TODO:need to write to error log when when insertBatch fails
+        // TODO:need to write to error log when insertBatch fails
         int count = this.batch.size();
         // This metrics just for current element mapping
         this.plusLoadSuccess(count);
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/InsertTask.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/InsertTask.java
index cc2d0783..2a4e82fd 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/InsertTask.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/InsertTask.java
@@ -37,6 +37,7 @@ import org.apache.hugegraph.structure.graph.BatchEdgeRequest;
 import org.apache.hugegraph.structure.graph.BatchVertexRequest;
 import org.apache.hugegraph.structure.graph.Edge;
 import org.apache.hugegraph.structure.graph.Vertex;
+
 import com.google.common.collect.ImmutableSet;
 
 public abstract class InsertTask implements Runnable {
@@ -124,7 +125,7 @@ public abstract class InsertTask implements Runnable {
         HugeClient client = this.context.client();
         List<GraphElement> elements = new ArrayList<>(batch.size());
         batch.forEach(r -> elements.add(r.element()));
-        // CreateIfNotExist dose not support false now
+        // CreateIfNotExist does not support false now
         if (this.type().isVertex()) {
             BatchVertexRequest.Builder req = new BatchVertexRequest.Builder();
             req.vertices((List<Vertex>) (Object) elements)
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/ParseTaskBuilder.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/ParseTaskBuilder.java
index 2dfa7107..9daf498f 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/ParseTaskBuilder.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/ParseTaskBuilder.java
@@ -132,7 +132,7 @@ public final class ParseTaskBuilder {
 
     private void handleParseFailure(ElementMapping mapping, ParseException e) {
         LOG.error("Parse {} error", mapping.type(), e);
-        this.context.occuredError();
+        this.context.occurredError();
         if (this.context.options().testMode) {
             throw e;
         }
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/SingleInsertTask.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/SingleInsertTask.java
index 24fcde2f..b4038bcb 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/SingleInsertTask.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/SingleInsertTask.java
@@ -67,7 +67,7 @@ public class SingleInsertTask extends InsertTask {
 
     private void handleInsertFailure(InsertException e) {
         LOG.error("Single insert {} error", this.type(), e);
-        this.context.occuredError();
+        this.context.occurredError();
         if (this.options().testMode) {
             throw e;
         }
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DataTypeUtil.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DataTypeUtil.java
index 002cb05c..fa73f3d7 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DataTypeUtil.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DataTypeUtil.java
@@ -35,8 +35,9 @@ import org.apache.hugegraph.structure.constant.Cardinality;
 import org.apache.hugegraph.structure.constant.DataType;
 import org.apache.hugegraph.structure.schema.PropertyKey;
 import org.apache.hugegraph.util.E;
-import org.apache.hugegraph.util.InsertionOrderUtil;;
+import org.apache.hugegraph.util.InsertionOrderUtil;
 import org.apache.hugegraph.util.ReflectionUtil;
+
 import com.google.common.base.Splitter;
 import com.google.common.collect.ImmutableSet;
 
@@ -72,8 +73,8 @@ public final class DataTypeUtil {
                 return parseMultiValues(key, value, dataType,
                                         cardinality, source);
             default:
-                throw new AssertionError(String.format(
-                          "Unsupported cardinality: '%s'", cardinality));
+                throw new AssertionError(String.format("Unsupported cardinality: '%s'",
+                                                       cardinality));
         }
     }
 
@@ -81,7 +82,7 @@ public final class DataTypeUtil {
     public static List<Object> splitField(String key, Object rawColumnValue,
                                           InputSource source) {
         E.checkArgument(rawColumnValue != null,
-                        "The value to be splitted can't be null");
+                        "The value to be split can't be null");
         if (rawColumnValue instanceof Collection) {
             return (List<Object>) rawColumnValue;
         }
@@ -97,10 +98,9 @@ public final class DataTypeUtil {
             // trim() is a little time-consuming
             return parseLong(((String) rawValue).trim());
         }
-        throw new IllegalArgumentException(String.format(
-                  "The value(key='%s') must can be casted to Long, " +
-                  "but got '%s'(%s)",
-                  key, rawValue, rawValue.getClass().getName()));
+        throw new IllegalArgumentException(String.format("The value(key='%s') must can be casted" +
+                                                         " to Long, but got '%s'(%s)", key,
+                                                         rawValue, rawValue.getClass().getName()));
     }
 
     public static UUID parseUUID(String key, Object rawValue) {
@@ -119,9 +119,9 @@ public final class DataTypeUtil {
             return new UUID(Long.parseUnsignedLong(high, 16),
                             Long.parseUnsignedLong(low, 16));
         }
-        throw new IllegalArgumentException(String.format(
-                  "Failed to convert value(key='%s') '%s'(%s) to UUID",
-                  key, rawValue, rawValue.getClass()));
+        throw new IllegalArgumentException(String.format("Failed to convert value(key='%s') " +
+                                                         "'%s'(%s) to UUID", key, rawValue,
+                                                         rawValue.getClass()));
     }
 
     private static Object parseSingleValue(String key, Object rawValue,
@@ -202,9 +202,9 @@ public final class DataTypeUtil {
                           key, rawValue, ACCEPTABLE_TRUE, ACCEPTABLE_FALSE));
             }
         }
-        throw new IllegalArgumentException(String.format(
-                  "Failed to convert value(key='%s') '%s'(%s) to Boolean",
-                  key, rawValue, rawValue.getClass()));
+        throw new IllegalArgumentException(String.format("Failed to convert value(key='%s') " +
+                                                         "'%s'(%s) to Boolean", key, rawValue,
+                                                         rawValue.getClass()));
     }
 
     private static Number parseNumber(String key, Object value,
@@ -228,15 +228,14 @@ public final class DataTypeUtil {
                 case DOUBLE:
                     return Double.valueOf(value.toString());
                 default:
-                    throw new AssertionError(String.format(
-                              "Number type only contains Byte, Integer, " +
-                              "Long, Float, Double, but got %s",
-                              dataType.clazz()));
+                    throw new AssertionError(String.format("Number type only contains Byte, " +
+                                                           "Integer, Long, Float, Double, " +
+                                                           "but got %s", dataType.clazz()));
             }
         } catch (NumberFormatException e) {
-            throw new IllegalArgumentException(String.format(
-                      "Failed to convert value(key=%s) '%s'(%s) to Number",
-                      key, value, value.getClass()), e);
+            throw new IllegalArgumentException(String.format("Failed to convert value(key=%s) " +
+                                                             "'%s'(%s) to Number", key, value,
+                                                             value.getClass()), e);
         }
     }
 
@@ -261,16 +260,16 @@ public final class DataTypeUtil {
                     long timestamp = Long.parseLong((String) value);
                     return new Date(timestamp);
                 } catch (NumberFormatException e) {
-                    throw new IllegalArgumentException(String.format(
-                              "Invalid timestamp value '%s'", value));
+                    throw new IllegalArgumentException(String.format("Invalid timestamp value " +
+                                                                     "'%s'", value));
                 }
             } else {
                 return DateUtil.parse((String) value, dateFormat, timeZone);
             }
         }
-        throw new IllegalArgumentException(String.format(
-                  "Failed to convert value(key='%s') '%s'(%s) to Date",
-                  key, value, value.getClass()));
+        throw new IllegalArgumentException(String.format("Failed to convert value(key='%s') " +
+                                                         "'%s'(%s) to Date", key, value,
+                                                         value.getClass()));
     }
 
     private static List<Object> split(String key, String rawValue,
@@ -322,7 +321,7 @@ public final class DataTypeUtil {
     }
 
     /**
-     * Check type of all the values(may be some of list properties) valid
+     * Check type of all the values(maybe some list properties) valid
      */
     private static boolean checkCollectionDataType(String key,
                                                    Collection<?> values,
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DateUtil.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DateUtil.java
index 609360b3..d4e661c3 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DateUtil.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/DateUtil.java
@@ -29,8 +29,7 @@ import org.apache.hugegraph.loader.constant.Constants;
 
 public final class DateUtil {
 
-    private static final Map<String, SafeDateFormat> DATE_FORMATS =
-                                                     new ConcurrentHashMap<>();
+    private static final Map<String, SafeDateFormat> DATE_FORMATS = new ConcurrentHashMap<>();
 
     public static Date parse(String source, String df) {
         return parse(source, df, Constants.TIME_ZONE);
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/JsonUtil.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/JsonUtil.java
index ac9eb718..835457f9 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/JsonUtil.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/JsonUtil.java
@@ -36,6 +36,7 @@ import org.apache.hugegraph.loader.source.InputSource;
 import org.apache.hugegraph.rest.SerializeException;
 import org.apache.hugegraph.util.E;
 import org.apache.hugegraph.util.Log;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.JavaType;
@@ -94,8 +95,8 @@ public final class JsonUtil {
     }
 
     public static <T> Set<T> convertSet(String json, Class<T> clazz) {
-        JavaType type = MAPPER.getTypeFactory().constructCollectionType(
-                                                LinkedHashSet.class, clazz);
+        JavaType type = MAPPER.getTypeFactory()
+                              .constructCollectionType(LinkedHashSet.class, clazz);
         try {
             return MAPPER.readValue(json, type);
         } catch (JsonProcessingException e) {
@@ -105,8 +106,8 @@ public final class JsonUtil {
     }
 
     public static <T> Set<T> convertSet(JsonNode node, Class<T> clazz) {
-        JavaType type = MAPPER.getTypeFactory().constructCollectionType(
-                                                LinkedHashSet.class, clazz);
+        JavaType type = MAPPER.getTypeFactory().
+                              constructCollectionType(LinkedHashSet.class, clazz);
         return MAPPER.convertValue(node, type);
     }
 
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/LoadUtil.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/LoadUtil.java
index c94807a8..d22e9f56 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/LoadUtil.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/LoadUtil.java
@@ -25,6 +25,7 @@ import java.lang.reflect.UndeclaredThrowableException;
 import org.apache.hugegraph.loader.constant.Constants;
 import org.apache.hugegraph.loader.executor.LoadOptions;
 import org.apache.hugegraph.util.E;
+
 import com.beust.jcommander.JCommander;
 
 public final class LoadUtil {
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/MappingUtil.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/MappingUtil.java
index 47e7bf06..aa0d5c49 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/MappingUtil.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/MappingUtil.java
@@ -43,7 +43,8 @@ import org.apache.hugegraph.loader.struct.ElementStructV1;
 import org.apache.hugegraph.loader.struct.GraphStructV1;
 import org.apache.hugegraph.loader.struct.VertexStructV1;
 import org.apache.hugegraph.util.E;
-import org.apache.hugegraph.util.InsertionOrderUtil;;
+import org.apache.hugegraph.util.InsertionOrderUtil;
+
 import com.google.common.collect.ImmutableSet;
 
 @SuppressWarnings("deprecation")
@@ -91,8 +92,7 @@ public final class MappingUtil {
     private static LoadMapping parseV1(String json) {
         GraphStructV1 graphStruct = JsonUtil.fromJson(json,
                                                       GraphStructV1.class);
-        Map<FileSourceKey, InputStruct> fileSourceInputStructs =
-                                        InsertionOrderUtil.newMap();
+        Map<FileSourceKey, InputStruct> fileSourceInputStructs = InsertionOrderUtil.newMap();
         List<InputStruct> jdbcSourceInputStructs = new ArrayList<>();
         for (ElementStructV1 originStruct : graphStruct.structs()) {
             InputSource inputSource = originStruct.input();
diff --git a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/Printer.java b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/Printer.java
index bbe920a9..8d08658e 100644
--- a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/Printer.java
+++ b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/util/Printer.java
@@ -168,10 +168,10 @@ public final class Printer {
     }
 
     private static void printInBackward(long vertexLoaded, long edgeLoaded) {
-        int vlength = String.valueOf(vertexLoaded).length();
-        int elength = String.valueOf(edgeLoaded).length();
+        int vLength = String.valueOf(vertexLoaded).length();
+        int eLength = String.valueOf(edgeLoaded).length();
         System.out.print(vertexLoaded + SLASH + edgeLoaded +
-                         backward(vlength + 1 + elength));
+                         backward(vLength + 1 + eLength));
     }
 
     private static void log(String message) {
diff --git a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/DBUtil.java b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/DBUtil.java
index 9f63c9ab..05ee1d11 100644
--- a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/DBUtil.java
+++ b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/DBUtil.java
@@ -93,8 +93,7 @@ public class DBUtil {
         try (Statement stmt = this.conn.createStatement()) {
             stmt.execute(sql);
         } catch (SQLException e) {
-            throw new RuntimeException(String.format(
-                      "Failed to execute sql '%s'", sql), e);
+            throw new RuntimeException(String.format("Failed to execute sql '%s'", sql), e);
         }
     }
 }
diff --git a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/FileLoadTest.java b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/FileLoadTest.java
index e46310be..a1e4bbd6 100644
--- a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/FileLoadTest.java
+++ b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/FileLoadTest.java
@@ -61,6 +61,7 @@ import org.apache.hugegraph.structure.schema.PropertyKey;
 import org.apache.hugegraph.testutil.Assert;
 import org.apache.hugegraph.testutil.Whitebox;
 import org.apache.hugegraph.util.LongEncoding;
+
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
@@ -91,7 +92,7 @@ public class FileLoadTest extends LoadTest {
     }
 
     /**
-     * NOTE: Unsupport auto create schema
+     * NOTE: Unsupported auto create schema
      */
     //@Test
     public void testAutoCreateSchema() {
diff --git a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/HDFSUtil.java b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/HDFSUtil.java
index 460760ef..2de9ece1 100644
--- a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/HDFSUtil.java
+++ b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/HDFSUtil.java
@@ -24,6 +24,7 @@ import java.io.IOException;
 import java.net.URI;
 import java.nio.charset.Charset;
 import java.util.Arrays;
+import java.util.Objects;
 
 import org.apache.commons.compress.compressors.CompressorException;
 import org.apache.hadoop.conf.Configuration;
@@ -71,8 +72,8 @@ public class HDFSUtil implements IOUtil {
     private static Configuration loadConfiguration() {
         // Just use local hadoop with default config in test
         String fileName = "hdfs_with_core_site_path/core-site.xml";
-        String confPath = HDFSUtil.class.getClassLoader().getResource(fileName)
-                                                         .getPath();
+        String confPath = Objects.requireNonNull(HDFSUtil.class.getClassLoader()
+                                                               .getResource(fileName)).getPath();
         Configuration conf = new Configuration();
         conf.addResource(new Path(confPath));
         return conf;
@@ -84,18 +85,17 @@ public class HDFSUtil implements IOUtil {
         try {
             this.hdfs.mkdirs(path);
         } catch (IOException e) {
-            throw new RuntimeException(String.format(
-                      "Failed to create directory '%s'", path), e);
+            throw new RuntimeException(String.format("Failed to create directory '%s'", path), e);
         }
     }
 
     @Override
     public void write(String fileName, Charset charset,
-                      Compression compression, String... lines) {
+                      Compression compress, String... lines) {
         Path path = new Path(this.storePath, fileName);
         checkPath(path);
 
-        if (compression == Compression.NONE) {
+        if (compress == Compression.NONE) {
             try (FSDataOutputStream fos = this.hdfs.append(path)) {
                 for (String line : lines) {
                     fos.write(line.getBytes(charset));
@@ -103,18 +103,16 @@ public class HDFSUtil implements IOUtil {
                 }
                 fos.flush();
             } catch (IOException e) {
-                throw new RuntimeException(String.format(
-                          "Failed to write lines '%s' to path '%s'",
-                          Arrays.asList(lines), path), e);
+                throw new RuntimeException(String.format("Failed to write lines '%s' to path '%s'",
+                                                         Arrays.asList(lines), path), e);
             }
         } else {
             try (FSDataOutputStream fos = this.hdfs.append(path)) {
-                IOUtil.compress(fos, charset, compression, lines);
+                IOUtil.compress(fos, charset, compress, lines);
             } catch (IOException | CompressorException e) {
-                throw new RuntimeException(String.format(
-                          "Failed to write lines '%s' to file '%s' in '%s' " +
-                          "compression format",
-                          Arrays.asList(lines), path, compression), e);
+                throw new RuntimeException(String.format("Failed to write lines '%s' to file " +
+                                                         "'%s' in '%s' compression format",
+                                                         Arrays.asList(lines), path, compress), e);
             }
         }
     }
@@ -125,8 +123,8 @@ public class HDFSUtil implements IOUtil {
             FileUtil.copy(new File(srcPath), this.hdfs, new Path(destPath),
                           false, this.conf);
         } catch (IOException e) {
-            throw new RuntimeException(String.format(
-                      "Failed to copy file '%s' to '%s'", srcPath, destPath));
+            throw new RuntimeException(String.format("Failed to copy file '%s' to '%s'",
+                                                     srcPath, destPath));
         }
     }
 
@@ -136,8 +134,7 @@ public class HDFSUtil implements IOUtil {
         try {
             this.hdfs.delete(path, true);
         } catch (IOException e) {
-            throw new RuntimeException(String.format(
-                      "Failed to delete file '%s'", path), e);
+            throw new RuntimeException(String.format("Failed to delete file '%s'", path), e);
         }
     }
 
@@ -157,14 +154,12 @@ public class HDFSUtil implements IOUtil {
                 this.hdfs.createNewFile(path);
             } else {
                 if (!this.hdfs.isFile(path)) {
-                    throw new RuntimeException(String.format(
-                              "Please ensure the path '%s' is file",
-                              path.getName()));
+                    throw new RuntimeException(String.format("Please ensure the path '%s' is file",
+                                                             path.getName()));
                 }
             }
         } catch (IOException e) {
-            throw new RuntimeException(String.format(
-                      "Failed to check HDFS path '%s'", path), e);
+            throw new RuntimeException(String.format("Failed to check HDFS path '%s'", path), e);
         }
     }
 }
diff --git a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/IOUtil.java b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/IOUtil.java
index 205ba371..a43ff7e8 100644
--- a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/IOUtil.java
+++ b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/IOUtil.java
@@ -55,25 +55,21 @@ public interface IOUtil {
         this.write(fileName, DEFAULT_CHARSET, Compression.NONE, lines);
     }
 
-    default void write(String fileName, Charset charset,
-                              String... lines) {
+    default void write(String fileName, Charset charset, String... lines) {
         this.write(fileName, charset, Compression.NONE, lines);
     }
 
-    default void write(String fileName, Compression compression,
-                              String... lines) {
+    default void write(String fileName, Compression compression, String... lines) {
         this.write(fileName, DEFAULT_CHARSET, compression, lines);
     }
 
     void write(String fileName, Charset charset,
-                      Compression compression, String... lines);
+               Compression compression, String... lines);
 
-    default void writeOrc(String fileName, TypeInfo typeInfo,
-                                 Object... values) {
+    default void writeOrc(String fileName, TypeInfo typeInfo, Object... values) {
         Path path = new Path(this.storePath(), fileName);
-        ObjectInspector inspector = TypeInfoUtils
-                                    .getStandardJavaObjectInspectorFromTypeInfo(
-                                    typeInfo);
+        ObjectInspector inspector =
+                TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfo);
         OrcFile.WriterOptions options = OrcFile.writerOptions(this.config())
                                                .inspector(inspector);
 
@@ -81,9 +77,8 @@ public interface IOUtil {
         try (Writer writer = OrcFile.createWriter(path, options)) {
             writer.addRow(row);
         } catch (IOException e) {
-            throw new RuntimeException(String.format(
-                      "Failed to write values '%s' to file '%s' in ORC " +
-                      "compression format", row, path), e);
+            throw new RuntimeException(String.format("Failed to write values '%s' to file '%s' " +
+                                                     "in ORC compression format", row, path), e);
         }
     }
 
@@ -93,12 +88,11 @@ public interface IOUtil {
 
     void close();
 
-    static void compress(OutputStream stream, Charset charset,
-                                Compression compression, String... lines)
-                                throws IOException, CompressorException {
+    static void compress(OutputStream stream, Charset charset, Compression compression,
+                         String... lines) throws IOException, CompressorException {
         BufferedOutputStream bos = new BufferedOutputStream(stream);
-        CompressorOutputStream cos = FACTORY.createCompressorOutputStream(
-                                             compression.string(), bos);
+        CompressorOutputStream cos = FACTORY.createCompressorOutputStream(compression.string(),
+                                                                          bos);
         for (String line : lines) {
             cos.write(line.getBytes(charset));
             cos.write("\n".getBytes(charset));
diff --git a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/JDBCLoadTest.java b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/JDBCLoadTest.java
index dbf46545..1d28d040 100644
--- a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/JDBCLoadTest.java
+++ b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/JDBCLoadTest.java
@@ -46,7 +46,7 @@ public class JDBCLoadTest extends LoadTest {
     private static final String USER = "root";
     private static final String PASS = "root";
 
-    private static DBUtil dbUtil = new DBUtil(DRIVER, DB_URL, USER, PASS);
+    private static final DBUtil dbUtil = new DBUtil(DRIVER, DB_URL, USER, PASS);
 
     @BeforeClass
     public static void setUp() {
diff --git a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/LoadTest.java b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/LoadTest.java
index a990f2ab..524b93b2 100644
--- a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/LoadTest.java
+++ b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/functional/LoadTest.java
@@ -96,8 +96,8 @@ public class LoadTest {
     }
 
     private static Map<String, Object> toMap(Object... properties) {
-        Assert.assertTrue("The number of properties must be even",
-                          (properties.length & 0x01) == 0);
+        Assert.assertEquals("The number of properties must be even",
+                            0, (properties.length & 0x01));
         Map<String, Object> map = new LinkedHashMap<>();
         for (int i = 0; i < properties.length; i = i + 2) {
             if (!properties[i].equals(T.ID) && !properties[i].equals(T.LABEL)) {
@@ -107,8 +107,8 @@ public class LoadTest {
         return map;
     }
 
-    public static void assertDateEquals(String expectDate, Object actualDate)
-                                        throws java.text.ParseException {
+    public static void assertDateEquals(String expectDate,
+                                        Object actualDate) throws java.text.ParseException {
         Assert.assertEquals("Date value must be String class",
                             String.class, actualDate.getClass());
         assertDateEquals(expectDate, TimeZone.getTimeZone("GMT+8"),
@@ -116,8 +116,7 @@ public class LoadTest {
     }
 
     public static void assertDateEquals(List<String> expectDates,
-                                        Object actualDates)
-                                        throws java.text.ParseException {
+                                        Object actualDates) throws java.text.ParseException {
         Assert.assertTrue("Date value must be List<String> class",
                           List.class.isAssignableFrom(actualDates.getClass()));
         List<String> actualDateList = (List<String>) actualDates;
@@ -130,9 +129,8 @@ public class LoadTest {
         }
     }
 
-    public static void assertDateEquals(String expectDate, TimeZone expectZone,
-                                        String actualDate, TimeZone actualZone)
-                                        throws java.text.ParseException {
+    public static void assertDateEquals(String expectDate, TimeZone expectZone, String actualDate,
+                                        TimeZone actualZone) throws java.text.ParseException {
         DateFormat expectDF = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
         expectDF.setTimeZone(expectZone);
         long expectTimeStamp = expectDF.parse(expectDate).getTime();
diff --git a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/DateUtilTest.java b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/DateUtilTest.java
index 129a3e0a..f6514f06 100644
--- a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/DateUtilTest.java
+++ b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/DateUtilTest.java
@@ -67,7 +67,7 @@ public class DateUtilTest {
         Assert.assertFalse(DateUtil.checkTimeZone("-08:00"));
         Assert.assertFalse(DateUtil.checkTimeZone("-1"));
         Assert.assertFalse(DateUtil.checkTimeZone("GMT+10:-30"));
-        // hours is 0-23 only
+        // hours 0-23 only
         Assert.assertFalse(DateUtil.checkTimeZone("GMT+24:00"));
         // minutes 00-59 only
         Assert.assertFalse(DateUtil.checkTimeZone("GMT+13:60"));
diff --git a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/LoadProgressTest.java b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/LoadProgressTest.java
index a9ae92a2..c7de3b7d 100644
--- a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/LoadProgressTest.java
+++ b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/LoadProgressTest.java
@@ -31,58 +31,58 @@ public class LoadProgressTest extends LoadTest {
     @Test
     public void testTotalLoaded() {
         String json = "{" +
-                "\"vertex_propgress\": 16," +
-                "\"edge_propgress\": 12," +
-                "\"input_propgress\": {" +
-                "    \"1\":{" +
-                "        \"type\":\"FILE\"," +
-                "        \"loaded_items\":[" +
-                "            {" +
-                "                \"name\":\"vertex_person.csv\"," +
-                "                \"last_modified\":1574346235000," +
-                "                \"checksum\":\"4250397517\"," +
-                "                \"offset\":6" +
-                "            }" +
-                "        ]," +
-                "        \"loading_item\":null" +
-                "    }," +
-                "    \"2\":{" +
-                "        \"type\":\"FILE\"," +
-                "        \"loaded_items\":[" +
-                "            {" +
-                "                \"name\":\"vertex_software.txt\"," +
-                "                \"last_modified\":1575427304000," +
-                "                \"checksum\":\"2992253526\"," +
-                "                \"offset\":2" +
-                "            }" +
-                "        ]," +
-                "        \"loading_item\":null" +
-                "    }," +
-                "    \"3\":{" +
-                "        \"type\":\"FILE\"," +
-                "        \"loaded_items\":[" +
-                "            {" +
-                "                \"name\":\"edge_knows.json\"," +
-                "                \"last_modified\":1576658150000," +
-                "                \"checksum\":\"3108779382\"," +
-                "                \"offset\":2" +
-                "            }" +
-                "        ]," +
-                "        \"loading_item\":null" +
-                "    }," +
-                "    \"4\":{" +
-                "        \"type\":\"FILE\"," +
-                "        \"loaded_items\":[" +
-                "            {" +
-                "                \"name\":\"edge_created.json\"," +
-                "                \"last_modified\":1576659393000," +
-                "                \"checksum\":\"1026646359\"," +
-                "                \"offset\":4" +
-                "            }" +
-                "        ]," +
-                "        \"loading_item\":null" +
-                "    }" +
-                "}}";
+                      "\"vertex_progress\": 16," +
+                      "\"edge_progress\": 12," +
+                      "\"input_progress\": {" +
+                      "    \"1\":{" +
+                      "        \"type\":\"FILE\"," +
+                      "        \"loaded_items\":[" +
+                      "            {" +
+                      "                \"name\":\"vertex_person.csv\"," +
+                      "                \"last_modified\":1574346235000," +
+                      "                \"checksum\":\"4250397517\"," +
+                      "                \"offset\":6" +
+                      "            }" +
+                      "        ]," +
+                      "        \"loading_item\":null" +
+                      "    }," +
+                      "    \"2\":{" +
+                      "        \"type\":\"FILE\"," +
+                      "        \"loaded_items\":[" +
+                      "            {" +
+                      "                \"name\":\"vertex_software.txt\"," +
+                      "                \"last_modified\":1575427304000," +
+                      "                \"checksum\":\"2992253526\"," +
+                      "                \"offset\":2" +
+                      "            }" +
+                      "        ]," +
+                      "        \"loading_item\":null" +
+                      "    }," +
+                      "    \"3\":{" +
+                      "        \"type\":\"FILE\"," +
+                      "        \"loaded_items\":[" +
+                      "            {" +
+                      "                \"name\":\"edge_knows.json\"," +
+                      "                \"last_modified\":1576658150000," +
+                      "                \"checksum\":\"3108779382\"," +
+                      "                \"offset\":2" +
+                      "            }" +
+                      "        ]," +
+                      "        \"loading_item\":null" +
+                      "    }," +
+                      "    \"4\":{" +
+                      "        \"type\":\"FILE\"," +
+                      "        \"loaded_items\":[" +
+                      "            {" +
+                      "                \"name\":\"edge_created.json\"," +
+                      "                \"last_modified\":1576659393000," +
+                      "                \"checksum\":\"1026646359\"," +
+                      "                \"offset\":4" +
+                      "            }" +
+                      "        ]," +
+                      "        \"loading_item\":null" +
+                      "    }" +
+                      "}}";
         LoadProgress progress = JsonUtil.fromJson(json, LoadProgress.class);
         Assert.assertEquals(16, progress.vertexLoaded());
         Assert.assertEquals(12, progress.edgeLoaded());
diff --git a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/MappingConverterTest.java b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/MappingConverterTest.java
index fc71c74c..ace2ee3f 100644
--- a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/MappingConverterTest.java
+++ b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/MappingConverterTest.java
@@ -34,54 +34,54 @@ public class MappingConverterTest {
     @Test
     public void testConvertV1ToV2() throws IOException {
         String v1Json = "{" +
-                "  \"vertices\": [" +
-                "    {" +
-                "      \"label\": \"user\"," +
-                "      \"input\": {" +
-                "        \"type\": \"file\"," +
-                "        \"path\": \"users.dat\"," +
-                "        \"format\": \"TEXT\"," +
-                "        \"delimiter\": \"::\"," +
-                "        \"header\": [\"UserID\", \"Gender\", \"Age\", " +
-                "\"Occupation\", \"Zip-code\"]" +
-                "      }," +
-                "      \"ignored\": [\"Gender\", \"Age\", \"Occupation\", " +
-                "\"Zip-code\"]," +
-                "      \"field_mapping\": {" +
-                "        \"UserID\": \"id\"" +
-                "      }" +
-                "    }" +
-                "  ]," +
-                "  \"edges\": [" +
-                "    {" +
-                "      \"label\": \"rating\"," +
-                "      \"source\": [\"UserID\"]," +
-                "      \"target\": [\"MovieID\"]," +
-                "      \"input\": {" +
-                "        \"type\": \"file\"," +
-                "        \"path\": \"ratings.dat\"," +
-                "        \"format\": \"TEXT\"," +
-                "        \"delimiter\": \"::\"," +
-                "        \"header\": [\"UserID\", \"MovieID\", \"Rating\", " +
-                "\"Timestamp\"]" +
-                "      }," +
-                "      \"ignored\": [\"Timestamp\"]," +
-                "      \"field_mapping\": {" +
-                "        \"UserID\": \"id\"," +
-                "        \"MovieID\": \"id\"," +
-                "        \"Rating\": \"rate\"" +
-                "      }" +
-                "    }" +
-                "  ]," +
-                "  \"backendStoreInfo\":" +
-                "  {" +
-                "    \"edge_tablename\": \"hugegraph:g_oe\"," +
-                "    \"vertex_tablename\": \"hugegraph:g_v\"," +
-                "    \"hbase_zookeeper_quorum\": \"127.0.0.1\"," +
-                "    \"hbase_zookeeper_property_clientPort\": \"2181\"," +
-                "    \"zookeeper_znode_parent\": \"/hbase\"" +
-                "  }" +
-                "}";
+                        "  \"vertices\": [" +
+                        "    {" +
+                        "      \"label\": \"user\"," +
+                        "      \"input\": {" +
+                        "        \"type\": \"file\"," +
+                        "        \"path\": \"users.dat\"," +
+                        "        \"format\": \"TEXT\"," +
+                        "        \"delimiter\": \"::\"," +
+                        "        \"header\": [\"UserID\", \"Gender\", \"Age\", " +
+                        "\"Occupation\", \"Zip-code\"]" +
+                        "      }," +
+                        "      \"ignored\": [\"Gender\", \"Age\", \"Occupation\", " +
+                        "\"Zip-code\"]," +
+                        "      \"field_mapping\": {" +
+                        "        \"UserID\": \"id\"" +
+                        "      }" +
+                        "    }" +
+                        "  ]," +
+                        "  \"edges\": [" +
+                        "    {" +
+                        "      \"label\": \"rating\"," +
+                        "      \"source\": [\"UserID\"]," +
+                        "      \"target\": [\"MovieID\"]," +
+                        "      \"input\": {" +
+                        "        \"type\": \"file\"," +
+                        "        \"path\": \"ratings.dat\"," +
+                        "        \"format\": \"TEXT\"," +
+                        "        \"delimiter\": \"::\"," +
+                        "        \"header\": [\"UserID\", \"MovieID\", \"Rating\", " +
+                        "\"Timestamp\"]" +
+                        "      }," +
+                        "      \"ignored\": [\"Timestamp\"]," +
+                        "      \"field_mapping\": {" +
+                        "        \"UserID\": \"id\"," +
+                        "        \"MovieID\": \"id\"," +
+                        "        \"Rating\": \"rate\"" +
+                        "      }" +
+                        "    }" +
+                        "  ]," +
+                        "  \"backendStoreInfo\":" +
+                        "  {" +
+                        "    \"edge_tablename\": \"hugegraph:g_oe\"," +
+                        "    \"vertex_tablename\": \"hugegraph:g_v\"," +
+                        "    \"hbase_zookeeper_quorum\": \"127.0.0.1\"," +
+                        "    \"hbase_zookeeper_property_clientPort\": \"2181\"," +
+                        "    \"zookeeper_znode_parent\": \"/hbase\"" +
+                        "  }" +
+                        "}";
         String input = "struct.json";
         File inputFile = new File(input);
         Charset charset = StandardCharsets.UTF_8;
@@ -91,46 +91,47 @@ public class MappingConverterTest {
         File outputFile = FileUtils.getFile("struct-v2.json");
         String actualV2Json = FileUtils.readFileToString(outputFile, charset);
         String expectV2Json = "{\"version\":\"2.0\"," +
-                "\"structs\":[{\"id\":\"1\",\"skip\":false," +
-                "\"input\":{\"type\":\"FILE\",\"path\":\"users.dat\"," +
-                "\"file_filter\":{\"extensions\":[\"*\"]}," +
-                "\"format\":\"TEXT\",\"delimiter\":\"::\"," +
-                "\"date_format\":\"yyyy-MM-dd HH:mm:ss\"," +
-                "\"time_zone\":\"GMT+8\",\"skipped_line\":{\"regex\":\"" +
-                "(^#|^//).*|\"},\"compression\":\"NONE\"," +
-                "\"batch_size\":500,\"header\":[\"UserID\",\"Gender\"," +
-                "\"Age\",\"Occupation\",\"Zip-code\"]," +
-                "\"charset\":\"UTF-8\",\"list_format\":null}," +
-                "\"vertices\":[{\"label\":\"user\",\"skip\":false," +
-                "\"id\":null,\"unfold\":false," +
-                "\"field_mapping\":{\"UserID\":\"id\"}," +
-                "\"value_mapping\":{},\"selected\":[]," +
-                "\"ignored\":[\"Occupation\",\"Zip-code\",\"Gender\"," +
-                "\"Age\"],\"null_values\":[\"\"]," +
-                "\"update_strategies\":{},\"batch_size\":500}],\"edges\":[]},{\"id\":\"2\"," +
-                "\"skip\":false,\"input\":{\"type\":\"FILE\"," +
-                "\"path\":\"ratings.dat\"," +
-                "\"file_filter\":{\"extensions\":[\"*\"]}," +
-                "\"format\":\"TEXT\",\"delimiter\":\"::\"," +
-                "\"date_format\":\"yyyy-MM-dd HH:mm:ss\"," +
-                "\"time_zone\":\"GMT+8\",\"skipped_line\":{\"regex\":\"" +
-                "(^#|^//).*|\"},\"compression\":\"NONE\"," +
-                "\"batch_size\":500,\"header\":[\"UserID\",\"MovieID\"," +
-                "\"Rating\",\"Timestamp\"],\"charset\":\"UTF-8\"," +
-                "\"list_format\":null},\"vertices\":[]," +
-                "\"edges\":[{\"label\":\"rating\",\"skip\":false," +
-                "\"source\":[\"UserID\"],\"unfold_source\":false," +
-                "\"target\":[\"MovieID\"],\"unfold_target\":false," +
-                "\"field_mapping\":{\"UserID\":\"id\",\"MovieID\":\"id\"," +
-                "\"Rating\":\"rate\"},\"value_mapping\":{},\"selected\":[]," +
-                "\"ignored\":[\"Timestamp\"],\"null_values\":[\"\"]," +
-                "\"update_strategies\":{},\"batch_size\":500}]}]," +
-                "\"backendStoreInfo\":{" +
-                "\"edge_tablename\":\"hugegraph:g_oe\"," +
-                "\"vertex_tablename\":\"hugegraph:g_v\"," +
-                "\"hbase_zookeeper_quorum\":\"127.0.0.1\"," +
-                "\"hbase_zookeeper_property_clientPort\":\"2181\"," +
-                "\"zookeeper_znode_parent\":\"/hbase\"}}";
+                              "\"structs\":[{\"id\":\"1\",\"skip\":false," +
+                              "\"input\":{\"type\":\"FILE\",\"path\":\"users.dat\"," +
+                              "\"file_filter\":{\"extensions\":[\"*\"]}," +
+                              "\"format\":\"TEXT\",\"delimiter\":\"::\"," +
+                              "\"date_format\":\"yyyy-MM-dd HH:mm:ss\"," +
+                              "\"time_zone\":\"GMT+8\",\"skipped_line\":{\"regex\":\"" +
+                              "(^#|^//).*|\"},\"compression\":\"NONE\"," +
+                              "\"batch_size\":500,\"header\":[\"UserID\",\"Gender\"," +
+                              "\"Age\",\"Occupation\",\"Zip-code\"]," +
+                              "\"charset\":\"UTF-8\",\"list_format\":null}," +
+                              "\"vertices\":[{\"label\":\"user\",\"skip\":false," +
+                              "\"id\":null,\"unfold\":false," +
+                              "\"field_mapping\":{\"UserID\":\"id\"}," +
+                              "\"value_mapping\":{},\"selected\":[]," +
+                              "\"ignored\":[\"Occupation\",\"Zip-code\",\"Gender\"," +
+                              "\"Age\"],\"null_values\":[\"\"]," +
+                              "\"update_strategies\":{},\"batch_size\":500}],\"edges\":[]}," +
+                              "{\"id\":\"2\"," +
+                              "\"skip\":false,\"input\":{\"type\":\"FILE\"," +
+                              "\"path\":\"ratings.dat\"," +
+                              "\"file_filter\":{\"extensions\":[\"*\"]}," +
+                              "\"format\":\"TEXT\",\"delimiter\":\"::\"," +
+                              "\"date_format\":\"yyyy-MM-dd HH:mm:ss\"," +
+                              "\"time_zone\":\"GMT+8\",\"skipped_line\":{\"regex\":\"" +
+                              "(^#|^//).*|\"},\"compression\":\"NONE\"," +
+                              "\"batch_size\":500,\"header\":[\"UserID\",\"MovieID\"," +
+                              "\"Rating\",\"Timestamp\"],\"charset\":\"UTF-8\"," +
+                              "\"list_format\":null},\"vertices\":[]," +
+                              "\"edges\":[{\"label\":\"rating\",\"skip\":false," +
+                              "\"source\":[\"UserID\"],\"unfold_source\":false," +
+                              "\"target\":[\"MovieID\"],\"unfold_target\":false," +
+                              "\"field_mapping\":{\"UserID\":\"id\",\"MovieID\":\"id\"," +
+                              "\"Rating\":\"rate\"},\"value_mapping\":{},\"selected\":[]," +
+                              "\"ignored\":[\"Timestamp\"],\"null_values\":[\"\"]," +
+                              "\"update_strategies\":{},\"batch_size\":500}]}]," +
+                              "\"backendStoreInfo\":{" +
+                              "\"edge_tablename\":\"hugegraph:g_oe\"," +
+                              "\"vertex_tablename\":\"hugegraph:g_v\"," +
+                              "\"hbase_zookeeper_quorum\":\"127.0.0.1\"," +
+                              "\"hbase_zookeeper_property_clientPort\":\"2181\"," +
+                              "\"zookeeper_znode_parent\":\"/hbase\"}}";
         Assert.assertEquals(expectV2Json, actualV2Json);
 
         FileUtils.forceDelete(inputFile);
diff --git a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/UnitTestSuite.java b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/UnitTestSuite.java
index 2ea692ea..dc8df6b3 100644
--- a/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/UnitTestSuite.java
+++ b/hugegraph-loader/src/test/java/org/apache/hugegraph/loader/test/unit/UnitTestSuite.java
@@ -24,11 +24,11 @@ import org.junit.runners.Suite;
 
 @RunWith(Suite.class)
 @Suite.SuiteClasses({
-    LineTest.class,
-    DateUtilTest.class,
-    MappingConverterTest.class,
-    LoadProgressTest.class,
-    RangesTimerTest.class
+        LineTest.class,
+        DateUtilTest.class,
+        MappingConverterTest.class,
+        LoadProgressTest.class,
+        RangesTimerTest.class
 })
 public class UnitTestSuite {
 }