You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by le...@apache.org on 2019/11/19 10:44:50 UTC

[incubator-hudi] branch master updated: [HUDI-346] Set allowMultipleEmptyLines to false for EmptyLineSeparator rule (#1025)

This is an automated email from the ASF dual-hosted git repository.

leesf pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 804e348  [HUDI-346] Set allowMultipleEmptyLines to false for EmptyLineSeparator rule (#1025)
804e348 is described below

commit 804e348d0e8176ceced046fa9e87963907aecc38
Author: 谢磊 <la...@163.com>
AuthorDate: Tue Nov 19 18:44:42 2019 +0800

    [HUDI-346] Set allowMultipleEmptyLines to false for EmptyLineSeparator rule (#1025)
---
 hudi-cli/src/main/java/org/apache/hudi/cli/HoodieCLI.java             | 1 -
 .../src/main/java/org/apache/hudi/cli/commands/RepairsCommand.java    | 1 -
 .../src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java | 3 ---
 .../src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java  | 1 -
 .../src/main/java/org/apache/hudi/config/HoodieCompactionConfig.java  | 1 -
 .../src/main/java/org/apache/hudi/config/HoodieMetricsConfig.java     | 1 -
 .../src/main/java/org/apache/hudi/config/HoodieWriteConfig.java       | 1 -
 .../hudi/exception/HoodieDependentSystemUnavailableException.java     | 1 -
 .../src/main/java/org/apache/hudi/func/BulkInsertMapFunction.java     | 1 -
 .../src/main/java/org/apache/hudi/func/LazyIterableIterator.java      | 1 -
 hudi-client/src/main/java/org/apache/hudi/index/HoodieIndex.java      | 2 --
 .../src/main/java/org/apache/hudi/index/InMemoryHashIndex.java        | 1 -
 .../src/main/java/org/apache/hudi/index/bloom/HoodieBloomIndex.java   | 1 -
 hudi-client/src/main/java/org/apache/hudi/io/HoodieCleanHelper.java   | 1 -
 hudi-client/src/main/java/org/apache/hudi/io/HoodieIOHandle.java      | 1 -
 hudi-client/src/main/java/org/apache/hudi/io/HoodieMergeHandle.java   | 1 -
 .../apache/hudi/io/compact/strategy/DayBasedCompactionStrategy.java   | 1 -
 .../src/main/java/org/apache/hudi/io/storage/HoodieParquetWriter.java | 1 -
 .../src/main/java/org/apache/hudi/table/HoodieCopyOnWriteTable.java   | 1 -
 hudi-client/src/main/java/org/apache/hudi/table/RollbackExecutor.java | 1 -
 hudi-client/src/main/java/org/apache/hudi/table/WorkloadProfile.java  | 1 -
 hudi-client/src/test/java/HoodieClientExample.java                    | 1 -
 hudi-client/src/test/java/org/apache/hudi/TestConsistencyGuard.java   | 1 -
 .../src/test/java/org/apache/hudi/common/HoodieClientTestUtils.java   | 1 -
 .../src/test/java/org/apache/hudi/common/TestRawTripPayload.java      | 2 --
 .../test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java   | 2 --
 .../java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java  | 1 -
 .../src/test/java/org/apache/hudi/table/TestCopyOnWriteTable.java     | 4 ----
 .../src/main/java/org/apache/hudi/avro/HoodieAvroWriteSupport.java    | 1 -
 .../src/main/java/org/apache/hudi/common/HoodieJsonPayload.java       | 1 -
 .../java/org/apache/hudi/common/model/HoodiePartitionMetadata.java    | 1 -
 .../src/main/java/org/apache/hudi/common/model/HoodieRecord.java      | 1 -
 .../src/main/java/org/apache/hudi/common/table/HoodieTableConfig.java | 1 -
 .../main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java | 2 --
 .../java/org/apache/hudi/common/table/log/HoodieLogFileReader.java    | 1 -
 .../main/java/org/apache/hudi/common/table/log/HoodieLogFormat.java   | 1 -
 .../org/apache/hudi/common/table/log/block/HoodieDeleteBlock.java     | 1 -
 .../org/apache/hudi/common/table/timeline/HoodieActiveTimeline.java   | 2 --
 .../org/apache/hudi/common/table/timeline/HoodieArchivedTimeline.java | 1 -
 .../java/org/apache/hudi/common/table/timeline/dto/InstantDTO.java    | 1 -
 .../apache/hudi/common/table/view/AbstractTableFileSystemView.java    | 1 -
 .../java/org/apache/hudi/common/table/view/FileSystemViewManager.java | 1 -
 hudi-common/src/main/java/org/apache/hudi/common/util/AvroUtils.java  | 1 -
 .../src/main/java/org/apache/hudi/common/util/ConsistencyGuard.java   | 1 -
 hudi-common/src/main/java/org/apache/hudi/common/util/FSUtils.java    | 2 --
 .../src/main/java/org/apache/hudi/common/util/HoodieAvroUtils.java    | 1 -
 .../src/main/java/org/apache/hudi/common/util/SerializationUtils.java | 1 -
 .../src/main/java/org/apache/hudi/common/util/TimelineDiffHelper.java | 1 -
 .../org/apache/hudi/common/util/queue/BoundedInMemoryExecutor.java    | 2 --
 .../apache/hudi/common/util/queue/BoundedInMemoryQueueConsumer.java   | 2 --
 .../java/org/apache/hudi/common/table/TestHoodieTableMetaClient.java  | 1 -
 .../java/org/apache/hudi/common/table/log/TestHoodieLogFormat.java    | 2 --
 .../org/apache/hudi/common/table/view/TestIncrementalFSViewSync.java  | 2 --
 .../org/apache/hudi/common/util/TestDFSPropertiesConfiguration.java   | 1 -
 .../test/java/org/apache/hudi/common/util/TestHoodieAvroUtils.java    | 1 -
 .../src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java | 1 -
 .../java/org/apache/hudi/hadoop/SafeParquetRecordReaderWrapper.java   | 1 -
 .../apache/hudi/hadoop/realtime/HoodieParquetRealtimeInputFormat.java | 1 -
 .../java/org/apache/hudi/hadoop/realtime/HoodieRealtimeFileSplit.java | 1 -
 .../src/test/java/org/apache/hudi/hadoop/InputFormatTestUtil.java     | 1 -
 .../test/java/org/apache/hudi/hadoop/TestHoodieROTablePathFilter.java | 2 +-
 .../java/org/apache/hudi/hadoop/TestRecordReaderValueIterator.java    | 1 -
 hudi-hive/src/main/java/org/apache/hudi/hive/HiveSyncTool.java        | 1 -
 hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java    | 3 ---
 hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java     | 2 --
 hudi-hive/src/test/java/org/apache/hudi/hive/TestHiveSyncTool.java    | 1 -
 .../src/test/java/org/apache/hudi/hive/util/HiveTestService.java      | 3 ---
 hudi-spark/src/main/java/org/apache/hudi/DataSourceUtils.java         | 1 -
 .../src/main/java/org/apache/hudi/utilities/HoodieCompactor.java      | 1 -
 .../main/java/org/apache/hudi/utilities/deltastreamer/DeltaSync.java  | 2 --
 .../org/apache/hudi/utilities/deltastreamer/HoodieDeltaStreamer.java  | 3 ---
 .../apache/hudi/utilities/deltastreamer/SchedulerConfGenerator.java   | 1 -
 .../org/apache/hudi/utilities/deltastreamer/SourceFormatAdapter.java  | 1 -
 .../org/apache/hudi/utilities/keygen/TimestampBasedKeyGenerator.java  | 1 -
 .../main/java/org/apache/hudi/utilities/perf/TimelineServerPerf.java  | 1 -
 .../java/org/apache/hudi/utilities/sources/HiveIncrPullSource.java    | 1 -
 .../java/org/apache/hudi/utilities/sources/helpers/AvroConvertor.java | 3 ---
 .../org/apache/hudi/utilities/sources/helpers/KafkaOffsetGen.java     | 2 --
 .../test/java/org/apache/hudi/utilities/TestHDFSParquetImporter.java  | 1 -
 .../test/java/org/apache/hudi/utilities/sources/TestKafkaSource.java  | 2 --
 pom.xml                                                               | 2 +-
 style/checkstyle.xml                                                  | 3 +++
 82 files changed, 5 insertions(+), 108 deletions(-)

diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/HoodieCLI.java b/hudi-cli/src/main/java/org/apache/hudi/cli/HoodieCLI.java
index 71c61a7..d2e6f99 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/HoodieCLI.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/HoodieCLI.java
@@ -35,7 +35,6 @@ public class HoodieCLI {
   public static HoodieTableMetaClient tableMetadata;
   public static HoodieTableMetaClient syncTableMetadata;
 
-
   public enum CLIState {
     INIT, DATASET, SYNC
   }
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RepairsCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RepairsCommand.java
index 84757d9..d0365b5 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RepairsCommand.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RepairsCommand.java
@@ -70,7 +70,6 @@ public class RepairsCommand implements CommandMarker {
     return "Deduplication failed ";
   }
 
-
   @CliCommand(value = "repair addpartitionmeta", help = "Add partition metadata to a dataset, if not present")
   public String addPartitionMeta(
       @CliOption(key = {"dryrun"}, help = "Should we actually add or just print what would be done",
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java
index bcbaad8..c243035 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java
@@ -53,7 +53,6 @@ public class SavepointsCommand implements CommandMarker {
     return HoodieCLI.tableMetadata != null;
   }
 
-
   @CliAvailabilityIndicator({"savepoint create"})
   public boolean isCreateSavepointAvailable() {
     return HoodieCLI.tableMetadata != null;
@@ -127,7 +126,6 @@ public class SavepointsCommand implements CommandMarker {
     return "Savepoint " + commitTime + " rolled back";
   }
 
-
   @CliCommand(value = "savepoints refresh", help = "Refresh the savepoints")
   public String refreshMetaClient() throws IOException {
     HoodieCLI.refreshTableMetadata();
@@ -140,5 +138,4 @@ public class SavepointsCommand implements CommandMarker {
     return new HoodieWriteClient(jsc, config, false);
   }
 
-
 }
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java
index 9d53c56..ec00154 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java
@@ -57,5 +57,4 @@ public class InputStreamConsumer extends Thread {
     stdout.start();
   }
 
-
 }
diff --git a/hudi-client/src/main/java/org/apache/hudi/config/HoodieCompactionConfig.java b/hudi-client/src/main/java/org/apache/hudi/config/HoodieCompactionConfig.java
index cd3cfa8..8ae7df6 100644
--- a/hudi-client/src/main/java/org/apache/hudi/config/HoodieCompactionConfig.java
+++ b/hudi-client/src/main/java/org/apache/hudi/config/HoodieCompactionConfig.java
@@ -132,7 +132,6 @@ public class HoodieCompactionConfig extends DefaultHoodieConfig {
       return this;
     }
 
-
     public Builder withAutoClean(Boolean autoClean) {
       props.setProperty(AUTO_CLEAN_PROP, String.valueOf(autoClean));
       return this;
diff --git a/hudi-client/src/main/java/org/apache/hudi/config/HoodieMetricsConfig.java b/hudi-client/src/main/java/org/apache/hudi/config/HoodieMetricsConfig.java
index 0074c72..903b923 100644
--- a/hudi-client/src/main/java/org/apache/hudi/config/HoodieMetricsConfig.java
+++ b/hudi-client/src/main/java/org/apache/hudi/config/HoodieMetricsConfig.java
@@ -74,7 +74,6 @@ public class HoodieMetricsConfig extends DefaultHoodieConfig {
       return this;
     }
 
-
     public Builder on(boolean metricsOn) {
       props.setProperty(METRICS_ON, String.valueOf(metricsOn));
       return this;
diff --git a/hudi-client/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java b/hudi-client/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java
index 5e7be5c..913baa1 100644
--- a/hudi-client/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java
+++ b/hudi-client/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java
@@ -147,7 +147,6 @@ public class HoodieWriteConfig extends DefaultHoodieConfig {
     return Integer.parseInt(props.getProperty(ROLLBACK_PARALLELISM));
   }
 
-
   public int getWriteBufferLimitBytes() {
     return Integer.parseInt(props.getProperty(WRITE_BUFFER_LIMIT_BYTES, DEFAULT_WRITE_BUFFER_LIMIT_BYTES));
   }
diff --git a/hudi-client/src/main/java/org/apache/hudi/exception/HoodieDependentSystemUnavailableException.java b/hudi-client/src/main/java/org/apache/hudi/exception/HoodieDependentSystemUnavailableException.java
index 4530817..76d679f 100644
--- a/hudi-client/src/main/java/org/apache/hudi/exception/HoodieDependentSystemUnavailableException.java
+++ b/hudi-client/src/main/java/org/apache/hudi/exception/HoodieDependentSystemUnavailableException.java
@@ -18,7 +18,6 @@
 
 package org.apache.hudi.exception;
 
-
 /**
  * <p>
  * Exception thrown when dependent system is not available
diff --git a/hudi-client/src/main/java/org/apache/hudi/func/BulkInsertMapFunction.java b/hudi-client/src/main/java/org/apache/hudi/func/BulkInsertMapFunction.java
index 417574e..b1230f7 100644
--- a/hudi-client/src/main/java/org/apache/hudi/func/BulkInsertMapFunction.java
+++ b/hudi-client/src/main/java/org/apache/hudi/func/BulkInsertMapFunction.java
@@ -27,7 +27,6 @@ import org.apache.hudi.config.HoodieWriteConfig;
 import org.apache.hudi.table.HoodieTable;
 import org.apache.spark.api.java.function.Function2;
 
-
 /**
  * Map function that handles a sorted stream of HoodieRecords
  */
diff --git a/hudi-client/src/main/java/org/apache/hudi/func/LazyIterableIterator.java b/hudi-client/src/main/java/org/apache/hudi/func/LazyIterableIterator.java
index 3f0f4a1..ec05b85 100644
--- a/hudi-client/src/main/java/org/apache/hudi/func/LazyIterableIterator.java
+++ b/hudi-client/src/main/java/org/apache/hudi/func/LazyIterableIterator.java
@@ -52,7 +52,6 @@ public abstract class LazyIterableIterator<I, O> implements Iterable<O>, Iterato
    */
   protected abstract O computeNext();
 
-
   /**
    * Called once, after all elements are processed.
    */
diff --git a/hudi-client/src/main/java/org/apache/hudi/index/HoodieIndex.java b/hudi-client/src/main/java/org/apache/hudi/index/HoodieIndex.java
index 9eb721a..78a7510 100644
--- a/hudi-client/src/main/java/org/apache/hudi/index/HoodieIndex.java
+++ b/hudi-client/src/main/java/org/apache/hudi/index/HoodieIndex.java
@@ -47,7 +47,6 @@ public abstract class HoodieIndex<T extends HoodieRecordPayload> implements Seri
     this.config = config;
   }
 
-
   public static <T extends HoodieRecordPayload> HoodieIndex<T> createIndex(HoodieWriteConfig config,
       JavaSparkContext jsc) throws HoodieIndexException {
     switch (config.getIndexType()) {
@@ -108,7 +107,6 @@ public abstract class HoodieIndex<T extends HoodieRecordPayload> implements Seri
    */
   public abstract boolean canIndexLogFiles();
 
-
   /**
    * An index is "implicit" with respect to storage, if just writing new data to a file slice, updates the index as
    * well. This is used by storage, to save memory footprint in certain cases.
diff --git a/hudi-client/src/main/java/org/apache/hudi/index/InMemoryHashIndex.java b/hudi-client/src/main/java/org/apache/hudi/index/InMemoryHashIndex.java
index 506b8a9..91cd8a8 100644
--- a/hudi-client/src/main/java/org/apache/hudi/index/InMemoryHashIndex.java
+++ b/hudi-client/src/main/java/org/apache/hudi/index/InMemoryHashIndex.java
@@ -38,7 +38,6 @@ import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.api.java.function.Function;
 import org.apache.spark.api.java.function.Function2;
 
-
 /**
  * Hoodie Index implementation backed by an in-memory Hash map.
  * <p>
diff --git a/hudi-client/src/main/java/org/apache/hudi/index/bloom/HoodieBloomIndex.java b/hudi-client/src/main/java/org/apache/hudi/index/bloom/HoodieBloomIndex.java
index d43bf1e..ee3007d 100644
--- a/hudi-client/src/main/java/org/apache/hudi/index/bloom/HoodieBloomIndex.java
+++ b/hudi-client/src/main/java/org/apache/hudi/index/bloom/HoodieBloomIndex.java
@@ -268,7 +268,6 @@ public class HoodieBloomIndex<T extends HoodieRecordPayload> extends HoodieIndex
     }
   }
 
-
   @Override
   public boolean rollbackCommit(String commitTime) {
     // Nope, don't need to do anything.
diff --git a/hudi-client/src/main/java/org/apache/hudi/io/HoodieCleanHelper.java b/hudi-client/src/main/java/org/apache/hudi/io/HoodieCleanHelper.java
index 10b8040..fbc87b9 100644
--- a/hudi-client/src/main/java/org/apache/hudi/io/HoodieCleanHelper.java
+++ b/hudi-client/src/main/java/org/apache/hudi/io/HoodieCleanHelper.java
@@ -170,7 +170,6 @@ public class HoodieCleanHelper<T extends HoodieRecordPayload<T>> implements Seri
     return deletePaths;
   }
 
-
   /**
    * Selects the versions for file for cleaning, such that it
    * <p>
diff --git a/hudi-client/src/main/java/org/apache/hudi/io/HoodieIOHandle.java b/hudi-client/src/main/java/org/apache/hudi/io/HoodieIOHandle.java
index 47491b3..68a9245 100644
--- a/hudi-client/src/main/java/org/apache/hudi/io/HoodieIOHandle.java
+++ b/hudi-client/src/main/java/org/apache/hudi/io/HoodieIOHandle.java
@@ -23,7 +23,6 @@ import org.apache.hudi.common.model.HoodieRecordPayload;
 import org.apache.hudi.config.HoodieWriteConfig;
 import org.apache.hudi.table.HoodieTable;
 
-
 public abstract class HoodieIOHandle<T extends HoodieRecordPayload> {
 
   protected final String instantTime;
diff --git a/hudi-client/src/main/java/org/apache/hudi/io/HoodieMergeHandle.java b/hudi-client/src/main/java/org/apache/hudi/io/HoodieMergeHandle.java
index 81178ec..8821c4e 100644
--- a/hudi-client/src/main/java/org/apache/hudi/io/HoodieMergeHandle.java
+++ b/hudi-client/src/main/java/org/apache/hudi/io/HoodieMergeHandle.java
@@ -86,7 +86,6 @@ public class HoodieMergeHandle<T extends HoodieRecordPayload> extends HoodieWrit
         dataFileToBeMerged);
   }
 
-
   public static Schema createHoodieWriteSchema(Schema originalSchema) {
     return HoodieAvroUtils.addMetadataFields(originalSchema);
   }
diff --git a/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/DayBasedCompactionStrategy.java b/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/DayBasedCompactionStrategy.java
index 79fc347..a72bafe 100644
--- a/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/DayBasedCompactionStrategy.java
+++ b/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/DayBasedCompactionStrategy.java
@@ -16,7 +16,6 @@
  * limitations under the License.
  */
 
-
 package org.apache.hudi.io.compact.strategy;
 
 import com.google.common.annotations.VisibleForTesting;
diff --git a/hudi-client/src/main/java/org/apache/hudi/io/storage/HoodieParquetWriter.java b/hudi-client/src/main/java/org/apache/hudi/io/storage/HoodieParquetWriter.java
index eba4d97..9e9ccbc 100644
--- a/hudi-client/src/main/java/org/apache/hudi/io/storage/HoodieParquetWriter.java
+++ b/hudi-client/src/main/java/org/apache/hudi/io/storage/HoodieParquetWriter.java
@@ -51,7 +51,6 @@ public class HoodieParquetWriter<T extends HoodieRecordPayload, R extends Indexe
   private final String commitTime;
   private final Schema schema;
 
-
   public HoodieParquetWriter(String commitTime, Path file, HoodieParquetConfig parquetConfig, Schema schema)
       throws IOException {
     super(HoodieWrapperFileSystem.convertToHoodiePath(file, parquetConfig.getHadoopConf()),
diff --git a/hudi-client/src/main/java/org/apache/hudi/table/HoodieCopyOnWriteTable.java b/hudi-client/src/main/java/org/apache/hudi/table/HoodieCopyOnWriteTable.java
index 1ff7c52..e8c0f1d 100644
--- a/hudi-client/src/main/java/org/apache/hudi/table/HoodieCopyOnWriteTable.java
+++ b/hudi-client/src/main/java/org/apache/hudi/table/HoodieCopyOnWriteTable.java
@@ -80,7 +80,6 @@ import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.api.java.function.PairFlatMapFunction;
 import scala.Tuple2;
 
-
 /**
  * Implementation of a very heavily read-optimized Hoodie Table where
  * <p>
diff --git a/hudi-client/src/main/java/org/apache/hudi/table/RollbackExecutor.java b/hudi-client/src/main/java/org/apache/hudi/table/RollbackExecutor.java
index 1ba9b43..43ca9a4 100644
--- a/hudi-client/src/main/java/org/apache/hudi/table/RollbackExecutor.java
+++ b/hudi-client/src/main/java/org/apache/hudi/table/RollbackExecutor.java
@@ -212,7 +212,6 @@ public class RollbackExecutor implements Serializable {
     return results;
   }
 
-
   private Map<HeaderMetadataType, String> generateHeader(String commit) {
     // generate metadata
     Map<HeaderMetadataType, String> header = Maps.newHashMap();
diff --git a/hudi-client/src/main/java/org/apache/hudi/table/WorkloadProfile.java b/hudi-client/src/main/java/org/apache/hudi/table/WorkloadProfile.java
index 650b717..9590e7e 100644
--- a/hudi-client/src/main/java/org/apache/hudi/table/WorkloadProfile.java
+++ b/hudi-client/src/main/java/org/apache/hudi/table/WorkloadProfile.java
@@ -49,7 +49,6 @@ public class WorkloadProfile<T extends HoodieRecordPayload> implements Serializa
 
   private final WorkloadStat globalStat;
 
-
   public WorkloadProfile(JavaRDD<HoodieRecord<T>> taggedRecords) {
     this.taggedRecords = taggedRecords;
     this.partitionPathStatMap = new HashMap<>();
diff --git a/hudi-client/src/test/java/HoodieClientExample.java b/hudi-client/src/test/java/HoodieClientExample.java
index 6406b6b..a697402 100644
--- a/hudi-client/src/test/java/HoodieClientExample.java
+++ b/hudi-client/src/test/java/HoodieClientExample.java
@@ -67,7 +67,6 @@ public class HoodieClientExample {
     cli.run();
   }
 
-
   public void run() throws Exception {
 
     SparkConf sparkConf = new SparkConf().setAppName("hoodie-client-example");
diff --git a/hudi-client/src/test/java/org/apache/hudi/TestConsistencyGuard.java b/hudi-client/src/test/java/org/apache/hudi/TestConsistencyGuard.java
index 53b08c2..e5520df 100644
--- a/hudi-client/src/test/java/org/apache/hudi/TestConsistencyGuard.java
+++ b/hudi-client/src/test/java/org/apache/hudi/TestConsistencyGuard.java
@@ -70,7 +70,6 @@ public class TestConsistencyGuard extends HoodieClientTestHarness {
         .asList(basePath + "/partition/path/f1_1-0-2_000.parquet", basePath + "/partition/path/f2_1-0-2_000.parquet"));
   }
 
-
   @Test(expected = TimeoutException.class)
   public void testCheckFailingAppears() throws Exception {
     HoodieClientTestUtils.fakeDataFile(basePath, "partition/path", "000", "f1");
diff --git a/hudi-client/src/test/java/org/apache/hudi/common/HoodieClientTestUtils.java b/hudi-client/src/test/java/org/apache/hudi/common/HoodieClientTestUtils.java
index 8dfb824..7890f3a 100644
--- a/hudi-client/src/test/java/org/apache/hudi/common/HoodieClientTestUtils.java
+++ b/hudi-client/src/test/java/org/apache/hudi/common/HoodieClientTestUtils.java
@@ -92,7 +92,6 @@ public class HoodieClientTestUtils {
     new File(parentPath + "/" + commitTime + suffix).createNewFile();
   }
 
-
   public static void fakeCommitFile(String basePath, String commitTime) throws IOException {
     fakeMetaFile(basePath, commitTime, HoodieTimeline.COMMIT_EXTENSION);
   }
diff --git a/hudi-client/src/test/java/org/apache/hudi/common/TestRawTripPayload.java b/hudi-client/src/test/java/org/apache/hudi/common/TestRawTripPayload.java
index 130fb54..d30b991 100644
--- a/hudi-client/src/test/java/org/apache/hudi/common/TestRawTripPayload.java
+++ b/hudi-client/src/test/java/org/apache/hudi/common/TestRawTripPayload.java
@@ -79,7 +79,6 @@ public class TestRawTripPayload implements HoodieRecordPayload<TestRawTripPayloa
     return partitionPath;
   }
 
-
   @Override
   public TestRawTripPayload preCombine(TestRawTripPayload another) {
     return another;
@@ -129,7 +128,6 @@ public class TestRawTripPayload implements HoodieRecordPayload<TestRawTripPayloa
     return baos.toByteArray();
   }
 
-
   private String unCompressData(byte[] data) throws IOException {
     try (InflaterInputStream iis = new InflaterInputStream(new ByteArrayInputStream(data))) {
       return FileIOUtils.readAsUTFString(iis, dataSize);
diff --git a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java
index 68b820a..d8be596 100644
--- a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java
+++ b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java
@@ -291,7 +291,6 @@ public class TestHoodieBloomIndex extends HoodieClientTestHarness {
     }
   }
 
-
   @Test
   public void testTagLocation() throws Exception {
     // We have some records to be tagged (two different partitions)
@@ -433,7 +432,6 @@ public class TestHoodieBloomIndex extends HoodieClientTestHarness {
     }
   }
 
-
   @Test
   public void testBloomFilterFalseError() throws IOException, InterruptedException {
     // We have two hoodie records
diff --git a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java
index a6cdc40..5da5332 100644
--- a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java
+++ b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java
@@ -196,7 +196,6 @@ public class TestHoodieGlobalBloomIndex extends HoodieClientTestHarness {
     assertEquals(new HashSet<>(Arrays.asList("f4", "f1")), new HashSet<>(recordKeyToFileComps.get("005")));
   }
 
-
   @Test
   public void testTagLocation() throws Exception {
     HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath).build();
diff --git a/hudi-client/src/test/java/org/apache/hudi/table/TestCopyOnWriteTable.java b/hudi-client/src/test/java/org/apache/hudi/table/TestCopyOnWriteTable.java
index 52c6fb4..f9310d3 100644
--- a/hudi-client/src/test/java/org/apache/hudi/table/TestCopyOnWriteTable.java
+++ b/hudi-client/src/test/java/org/apache/hudi/table/TestCopyOnWriteTable.java
@@ -250,7 +250,6 @@ public class TestCopyOnWriteTable extends HoodieClientTestHarness {
     assertEquals(4, writeStatus.getStat().getNumWrites());// 3 rewritten records + 1 new record
   }
 
-
   private List<HoodieRecord> newHoodieRecords(int n, String time) throws Exception {
     List<HoodieRecord> records = new ArrayList<>();
     for (int i = 0; i < n; i++) {
@@ -387,7 +386,6 @@ public class TestCopyOnWriteTable extends HoodieClientTestHarness {
     assertEquals("If the number of records are more than 1150, then there should be a new file", 3, counts);
   }
 
-
   private UpsertPartitioner getUpsertPartitioner(int smallFileSize, int numInserts, int numUpdates, int fileSize,
       String testPartitionPath, boolean autoSplitInserts) throws Exception {
     HoodieWriteConfig config = makeHoodieClientConfigBuilder()
@@ -419,7 +417,6 @@ public class TestCopyOnWriteTable extends HoodieClientTestHarness {
     return partitioner;
   }
 
-
   @Test
   public void testUpsertPartitioner() throws Exception {
     final String testPartitionPath = "2016/09/26";
@@ -429,7 +426,6 @@ public class TestCopyOnWriteTable extends HoodieClientTestHarness {
     assertEquals("Total of 2 insert buckets", 2, insertBuckets.size());
   }
 
-
   @Test
   public void testUpsertPartitionerWithSmallInsertHandling() throws Exception {
     final String testPartitionPath = "2016/09/26";
diff --git a/hudi-common/src/main/java/org/apache/hudi/avro/HoodieAvroWriteSupport.java b/hudi-common/src/main/java/org/apache/hudi/avro/HoodieAvroWriteSupport.java
index bd863ac..f5a427f 100644
--- a/hudi-common/src/main/java/org/apache/hudi/avro/HoodieAvroWriteSupport.java
+++ b/hudi-common/src/main/java/org/apache/hudi/avro/HoodieAvroWriteSupport.java
@@ -39,7 +39,6 @@ public class HoodieAvroWriteSupport extends AvroWriteSupport {
   public static final String HOODIE_MIN_RECORD_KEY_FOOTER = "hoodie_min_record_key";
   public static final String HOODIE_MAX_RECORD_KEY_FOOTER = "hoodie_max_record_key";
 
-
   public HoodieAvroWriteSupport(MessageType schema, Schema avroSchema, BloomFilter bloomFilter) {
     super(schema, avroSchema);
     this.bloomFilter = bloomFilter;
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/HoodieJsonPayload.java b/hudi-common/src/main/java/org/apache/hudi/common/HoodieJsonPayload.java
index 54bdc0a..e96222b 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/HoodieJsonPayload.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/HoodieJsonPayload.java
@@ -80,7 +80,6 @@ public class HoodieJsonPayload implements HoodieRecordPayload<HoodieJsonPayload>
     return baos.toByteArray();
   }
 
-
   private String unCompressData(byte[] data) throws IOException {
     InflaterInputStream iis = new InflaterInputStream(new ByteArrayInputStream(data));
     try {
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/model/HoodiePartitionMetadata.java b/hudi-common/src/main/java/org/apache/hudi/common/model/HoodiePartitionMetadata.java
index 1673871..e967a5d 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/model/HoodiePartitionMetadata.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/model/HoodiePartitionMetadata.java
@@ -51,7 +51,6 @@ public class HoodiePartitionMetadata {
 
   private static Logger log = LogManager.getLogger(HoodiePartitionMetadata.class);
 
-
   /**
    * Construct metadata from existing partition
    */
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecord.java b/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecord.java
index 6e02ecc..eee8eda 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecord.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecord.java
@@ -98,7 +98,6 @@ public class HoodieRecord<T extends HoodieRecordPayload> implements Serializable
     this.data = null;
   }
 
-
   /**
    * Sets the current currentLocation of the record. This should happen exactly-once
    */
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableConfig.java b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableConfig.java
index 4421365..ab1d8a6 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableConfig.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableConfig.java
@@ -114,7 +114,6 @@ public class HoodieTableConfig implements Serializable {
     }
   }
 
-
   /**
    * Read the table type from the table properties and if not found, return the default
    */
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
index 7a770e6..882ce02 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
@@ -267,7 +267,6 @@ public class HoodieTableMetaClient implements Serializable {
     return archivedTimeline;
   }
 
-
   /**
    * Helper method to initialize a dataset, with given basePath, tableType, name, archiveFolder
    */
@@ -410,7 +409,6 @@ public class HoodieTableMetaClient implements Serializable {
     }
   }
 
-
   /**
    * Helper method to scan all hoodie-instant metafiles and construct HoodieInstant objects
    *
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/log/HoodieLogFileReader.java b/hudi-common/src/main/java/org/apache/hudi/common/table/log/HoodieLogFileReader.java
index 312d09e..e911830 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/log/HoodieLogFileReader.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/log/HoodieLogFileReader.java
@@ -294,7 +294,6 @@ class HoodieLogFileReader implements HoodieLogFormat.Reader {
     return new HoodieLogFormatVersion(inputStream.readInt());
   }
 
-
   private boolean readMagic() throws IOException {
     try {
       boolean hasMagic = hasNextMagic();
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/log/HoodieLogFormat.java b/hudi-common/src/main/java/org/apache/hudi/common/table/log/HoodieLogFormat.java
index c0ec90b..7ca1cc0 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/log/HoodieLogFormat.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/log/HoodieLogFormat.java
@@ -97,7 +97,6 @@ public interface HoodieLogFormat {
     public HoodieLogBlock prev() throws IOException;
   }
 
-
   /**
    * Builder class to construct the default log format writer
    */
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieDeleteBlock.java b/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieDeleteBlock.java
index ae0a20d..e4c2ff7 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieDeleteBlock.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/log/block/HoodieDeleteBlock.java
@@ -45,7 +45,6 @@ public class HoodieDeleteBlock extends HoodieLogBlock {
     this.keysToDelete = keysToDelete;
   }
 
-
   private HoodieDeleteBlock(Option<byte[]> content, FSDataInputStream inputStream, boolean readBlockLazily,
       Option<HoodieLogBlockContentLocation> blockContentLocation, Map<HeaderMetadataType, String> header,
       Map<HeaderMetadataType, String> footer) {
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieActiveTimeline.java b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieActiveTimeline.java
index eb82c6b..721d55f 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieActiveTimeline.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieActiveTimeline.java
@@ -171,7 +171,6 @@ public class HoodieActiveTimeline extends HoodieDefaultTimeline {
         (Function<HoodieInstant, Option<byte[]>> & Serializable) this::getInstantDetails);
   }
 
-
   /**
    * Get only the cleaner action (inflight and completed) in the active timeline
    */
@@ -364,7 +363,6 @@ public class HoodieActiveTimeline extends HoodieDefaultTimeline {
     return inflight;
   }
 
-
   private void transitionState(HoodieInstant fromInstant, HoodieInstant toInstant, Option<byte[]> data) {
     Preconditions.checkArgument(fromInstant.getTimestamp().equals(toInstant.getTimestamp()));
     Path commitFilePath = new Path(metaClient.getMetaPath(), toInstant.getFileName());
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieArchivedTimeline.java b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieArchivedTimeline.java
index f579093..552f8e7 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieArchivedTimeline.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/HoodieArchivedTimeline.java
@@ -93,7 +93,6 @@ public class HoodieArchivedTimeline extends HoodieDefaultTimeline {
     in.defaultReadObject();
   }
 
-
   public static Path getArchiveLogPath(String archiveFolder) {
     return new Path(archiveFolder, HOODIE_COMMIT_ARCHIVE_LOG_FILE);
   }
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/dto/InstantDTO.java b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/dto/InstantDTO.java
index 4d51fd6..20beec6 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/dto/InstantDTO.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/timeline/dto/InstantDTO.java
@@ -22,7 +22,6 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.hudi.common.table.timeline.HoodieInstant;
 
-
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class InstantDTO {
 
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/view/AbstractTableFileSystemView.java b/hudi-common/src/main/java/org/apache/hudi/common/table/view/AbstractTableFileSystemView.java
index 74ddb9e..b477f24 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/view/AbstractTableFileSystemView.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/view/AbstractTableFileSystemView.java
@@ -655,7 +655,6 @@ public abstract class AbstractTableFileSystemView implements SyncableFileSystemV
         .map(Option::get);
   }
 
-
   protected Option<HoodieDataFile> getLatestDataFile(HoodieFileGroup fileGroup) {
     return Option
         .fromJavaOptional(fileGroup.getAllDataFiles().filter(df -> !isDataFileDueToPendingCompaction(df)).findFirst());
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/view/FileSystemViewManager.java b/hudi-common/src/main/java/org/apache/hudi/common/table/view/FileSystemViewManager.java
index c9e556a..6a233a2 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/view/FileSystemViewManager.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/view/FileSystemViewManager.java
@@ -130,7 +130,6 @@ public class FileSystemViewManager {
     return new SpillableMapBasedFileSystemView(metaClient, timeline, viewConf);
   }
 
-
   /**
    * Create an in-memory file System view for a dataset
    * 
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/AvroUtils.java b/hudi-common/src/main/java/org/apache/hudi/common/util/AvroUtils.java
index 243a1a3..1b60ffa 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/AvroUtils.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/AvroUtils.java
@@ -111,7 +111,6 @@ public class AvroUtils {
     return serializeAvroMetadata(compactionWorkload, HoodieCompactionPlan.class);
   }
 
-
   public static Option<byte[]> serializeCleanerPlan(HoodieCleanerPlan cleanPlan) throws IOException {
     return serializeAvroMetadata(cleanPlan, HoodieCleanerPlan.class);
   }
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/ConsistencyGuard.java b/hudi-common/src/main/java/org/apache/hudi/common/util/ConsistencyGuard.java
index 0327c62..89190aa 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/ConsistencyGuard.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/ConsistencyGuard.java
@@ -63,7 +63,6 @@ public interface ConsistencyGuard {
    */
   void waitTillAllFilesDisappear(String dirPath, List<String> files) throws IOException, TimeoutException;
 
-
   /**
    * Wait Till target visibility is reached
    * 
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/FSUtils.java b/hudi-common/src/main/java/org/apache/hudi/common/util/FSUtils.java
index 06a2cfa..86e3e87 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/FSUtils.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/FSUtils.java
@@ -109,7 +109,6 @@ public class FSUtils {
     return String.format("%d-%d-%d", taskPartitionId, stageId, taskAttemptId);
   }
 
-
   public static String makeDataFileName(String commitTime, String writeToken, String fileId) {
     return String.format("%s_%s_%s.parquet", fileId, writeToken, commitTime);
   }
@@ -150,7 +149,6 @@ public class FSUtils {
     return fullFileName.split("_")[0];
   }
 
-
   /**
    * Gets all partition paths assuming date partitioning (year, month, day) three levels down.
    */
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/HoodieAvroUtils.java b/hudi-common/src/main/java/org/apache/hudi/common/util/HoodieAvroUtils.java
index 4c0f983..875ba05 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/HoodieAvroUtils.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/HoodieAvroUtils.java
@@ -183,7 +183,6 @@ public class HoodieAvroUtils {
     return record;
   }
 
-
   /**
    * Given a avro record with a given schema, rewrites it into the new schema while setting fields only from the old
    * schema
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/SerializationUtils.java b/hudi-common/src/main/java/org/apache/hudi/common/util/SerializationUtils.java
index cc9d2f1..b686bd1 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/SerializationUtils.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/SerializationUtils.java
@@ -32,7 +32,6 @@ import java.lang.reflect.InvocationTargetException;
 import org.apache.hudi.exception.HoodieSerializationException;
 import org.objenesis.instantiator.ObjectInstantiator;
 
-
 /**
  * {@link SerializationUtils} class internally uses {@link Kryo} serializer for serializing / deserializing objects.
  */
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/TimelineDiffHelper.java b/hudi-common/src/main/java/org/apache/hudi/common/util/TimelineDiffHelper.java
index 2253c31..e4ca221 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/TimelineDiffHelper.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/TimelineDiffHelper.java
@@ -29,7 +29,6 @@ import org.apache.hudi.common.util.collection.Pair;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
-
 public class TimelineDiffHelper {
 
   protected static Logger log = LogManager.getLogger(TimelineDiffHelper.class);
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/queue/BoundedInMemoryExecutor.java b/hudi-common/src/main/java/org/apache/hudi/common/util/queue/BoundedInMemoryExecutor.java
index 835764a..e319994 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/queue/BoundedInMemoryExecutor.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/queue/BoundedInMemoryExecutor.java
@@ -16,7 +16,6 @@
  * limitations under the License.
  */
 
-
 package org.apache.hudi.common.util.queue;
 
 import java.util.Arrays;
@@ -143,7 +142,6 @@ public class BoundedInMemoryExecutor<I, O, E> {
     }
   }
 
-
   public boolean isRemaining() {
     return queue.iterator().hasNext();
   }
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/queue/BoundedInMemoryQueueConsumer.java b/hudi-common/src/main/java/org/apache/hudi/common/util/queue/BoundedInMemoryQueueConsumer.java
index fb73697..d0c2d34 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/queue/BoundedInMemoryQueueConsumer.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/queue/BoundedInMemoryQueueConsumer.java
@@ -20,7 +20,6 @@ package org.apache.hudi.common.util.queue;
 
 import java.util.Iterator;
 
-
 /**
  * Consume entries from queue and execute callback function
  */
@@ -59,5 +58,4 @@ public abstract class BoundedInMemoryQueueConsumer<I, O> {
    */
   protected abstract O getResult();
 
-
 }
diff --git a/hudi-common/src/test/java/org/apache/hudi/common/table/TestHoodieTableMetaClient.java b/hudi-common/src/test/java/org/apache/hudi/common/table/TestHoodieTableMetaClient.java
index 9318f7c..68fb1c9 100644
--- a/hudi-common/src/test/java/org/apache/hudi/common/table/TestHoodieTableMetaClient.java
+++ b/hudi-common/src/test/java/org/apache/hudi/common/table/TestHoodieTableMetaClient.java
@@ -123,5 +123,4 @@ public class TestHoodieTableMetaClient extends HoodieCommonTestHarness {
     assertArrayEquals(new Text("data3").getBytes(), archivedTimeline.getInstantDetails(instant3).get());
   }
 
-
 }
diff --git a/hudi-common/src/test/java/org/apache/hudi/common/table/log/TestHoodieLogFormat.java b/hudi-common/src/test/java/org/apache/hudi/common/table/log/TestHoodieLogFormat.java
index ab85233..dffb742 100644
--- a/hudi-common/src/test/java/org/apache/hudi/common/table/log/TestHoodieLogFormat.java
+++ b/hudi-common/src/test/java/org/apache/hudi/common/table/log/TestHoodieLogFormat.java
@@ -471,7 +471,6 @@ public class TestHoodieLogFormat extends HoodieCommonTestHarness {
 
   }
 
-
   @Test
   public void testAppendAndReadOnCorruptedLog() throws IOException, URISyntaxException, InterruptedException {
     Writer writer =
@@ -556,7 +555,6 @@ public class TestHoodieLogFormat extends HoodieCommonTestHarness {
     reader.close();
   }
 
-
   @Test
   public void testAvroLogRecordReaderBasic() throws IOException, URISyntaxException, InterruptedException {
     Schema schema = HoodieAvroUtils.addMetadataFields(getSimpleSchema());
diff --git a/hudi-common/src/test/java/org/apache/hudi/common/table/view/TestIncrementalFSViewSync.java b/hudi-common/src/test/java/org/apache/hudi/common/table/view/TestIncrementalFSViewSync.java
index 0950f2c..acc8cb4 100644
--- a/hudi-common/src/test/java/org/apache/hudi/common/table/view/TestIncrementalFSViewSync.java
+++ b/hudi-common/src/test/java/org/apache/hudi/common/table/view/TestIncrementalFSViewSync.java
@@ -287,8 +287,6 @@ public class TestIncrementalFSViewSync extends HoodieCommonTestHarness {
    * HELPER METHODS
    *********************************************************************************************************
    */
-
-
   /**
    * Helper to run one or more rounds of cleaning, incrementally syncing the view and then validate
    */
diff --git a/hudi-common/src/test/java/org/apache/hudi/common/util/TestDFSPropertiesConfiguration.java b/hudi-common/src/test/java/org/apache/hudi/common/util/TestDFSPropertiesConfiguration.java
index 71499c6..f58603c 100644
--- a/hudi-common/src/test/java/org/apache/hudi/common/util/TestDFSPropertiesConfiguration.java
+++ b/hudi-common/src/test/java/org/apache/hudi/common/util/TestDFSPropertiesConfiguration.java
@@ -41,7 +41,6 @@ public class TestDFSPropertiesConfiguration {
   private static MiniDFSCluster dfsCluster;
   private static DistributedFileSystem dfs;
 
-
   @BeforeClass
   public static void initClass() throws Exception {
     hdfsTestService = new HdfsTestService();
diff --git a/hudi-common/src/test/java/org/apache/hudi/common/util/TestHoodieAvroUtils.java b/hudi-common/src/test/java/org/apache/hudi/common/util/TestHoodieAvroUtils.java
index a0dc79b..18a9c8d 100644
--- a/hudi-common/src/test/java/org/apache/hudi/common/util/TestHoodieAvroUtils.java
+++ b/hudi-common/src/test/java/org/apache/hudi/common/util/TestHoodieAvroUtils.java
@@ -24,7 +24,6 @@ import org.codehaus.jackson.JsonNode;
 import org.junit.Assert;
 import org.junit.Test;
 
-
 public class TestHoodieAvroUtils {
 
   private static String EXAMPLE_SCHEMA = "{\"type\": \"record\"," + "\"name\": \"testrec\"," + "\"fields\": [ "
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java
index 7b6e7ee..bbd53ec 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/HoodieROTablePathFilter.java
@@ -64,7 +64,6 @@ public class HoodieROTablePathFilter implements PathFilter, Serializable {
 
   private transient FileSystem fs;
 
-
   public HoodieROTablePathFilter() {
     hoodiePathCache = new HashMap<>();
     nonHoodiePathCache = new HashSet<>();
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/SafeParquetRecordReaderWrapper.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/SafeParquetRecordReaderWrapper.java
index f4db128..8a42b13 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/SafeParquetRecordReaderWrapper.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/SafeParquetRecordReaderWrapper.java
@@ -42,7 +42,6 @@ public class SafeParquetRecordReaderWrapper implements RecordReader<NullWritable
   // Number of fields in Value Schema
   private final int numValueFields;
 
-
   public SafeParquetRecordReaderWrapper(RecordReader<NullWritable, ArrayWritable> parquetReader) {
     this.parquetReader = parquetReader;
     ArrayWritable arrayWritable = parquetReader.createValue();
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieParquetRealtimeInputFormat.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieParquetRealtimeInputFormat.java
index bb21116..7a3492e 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieParquetRealtimeInputFormat.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieParquetRealtimeInputFormat.java
@@ -150,7 +150,6 @@ public class HoodieParquetRealtimeInputFormat extends HoodieParquetInputFormat i
     return rtSplits.toArray(new InputSplit[rtSplits.size()]);
   }
 
-
   @Override
   public FileStatus[] listStatus(JobConf job) throws IOException {
     // Call the HoodieInputFormat::listStatus to obtain all latest parquet files, based on commit
diff --git a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeFileSplit.java b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeFileSplit.java
index 2fd5afa..5ae344e 100644
--- a/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeFileSplit.java
+++ b/hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeFileSplit.java
@@ -73,7 +73,6 @@ public class HoodieRealtimeFileSplit extends FileSplit {
     return new String(bytes, StandardCharsets.UTF_8);
   }
 
-
   @Override
   public void write(DataOutput out) throws IOException {
     super.write(out);
diff --git a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/InputFormatTestUtil.java b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/InputFormatTestUtil.java
index 2932a15..7a5b7d4 100644
--- a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/InputFormatTestUtil.java
+++ b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/InputFormatTestUtil.java
@@ -113,7 +113,6 @@ public class InputFormatTestUtil {
     return partitionPath;
   }
 
-
   public static File prepareSimpleParquetDataset(TemporaryFolder basePath, Schema schema, int numberOfFiles,
       int numberOfRecords, String commitNumber) throws Exception {
     basePath.create();
diff --git a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieROTablePathFilter.java b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieROTablePathFilter.java
index 32c4ba8..3d17140 100644
--- a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieROTablePathFilter.java
+++ b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestHoodieROTablePathFilter.java
@@ -32,8 +32,8 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 
-
 /**
+ *
  */
 public class TestHoodieROTablePathFilter extends HoodieCommonTestHarness {
 
diff --git a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestRecordReaderValueIterator.java b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestRecordReaderValueIterator.java
index 3f3f05e..9c050cd 100644
--- a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestRecordReaderValueIterator.java
+++ b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/TestRecordReaderValueIterator.java
@@ -58,7 +58,6 @@ public class TestRecordReaderValueIterator {
       this.entries = entries;
     }
 
-
     @Override
     public boolean next(IntWritable key, Text value) throws IOException {
       if (currIndex >= entries.size()) {
diff --git a/hudi-hive/src/main/java/org/apache/hudi/hive/HiveSyncTool.java b/hudi-hive/src/main/java/org/apache/hudi/hive/HiveSyncTool.java
index 40ac91e..315f575 100644
--- a/hudi-hive/src/main/java/org/apache/hudi/hive/HiveSyncTool.java
+++ b/hudi-hive/src/main/java/org/apache/hudi/hive/HiveSyncTool.java
@@ -157,7 +157,6 @@ public class HiveSyncTool {
     }
   }
 
-
   /**
    * Syncs the list of storage parititions passed in (checks if the partition is in hive, if not adds it or if the
    * partition path does not match, it updates the partition path)
diff --git a/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java b/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
index 1dd592d..ed319ce 100644
--- a/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
+++ b/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
@@ -234,7 +234,6 @@ public class HoodieHiveClient {
     return events;
   }
 
-
   /**
    * Scan table partitions
    */
@@ -531,8 +530,6 @@ public class HoodieHiveClient {
     return responses;
   }
 
-
-
   private void createHiveConnection() {
     if (connection == null) {
       try {
diff --git a/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java b/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java
index 72ff69a..2d4c5b5 100644
--- a/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java
+++ b/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java
@@ -132,7 +132,6 @@ public class SchemaUtil {
     return false;
   }
 
-
   /**
    * Returns equivalent Hive table schema read from a parquet file
    *
@@ -296,7 +295,6 @@ public class SchemaUtil {
     return finalStr;
   }
 
-
   private static String hiveCompatibleFieldName(String fieldName, boolean isNested) {
     String result = fieldName;
     if (isNested) {
diff --git a/hudi-hive/src/test/java/org/apache/hudi/hive/TestHiveSyncTool.java b/hudi-hive/src/test/java/org/apache/hudi/hive/TestHiveSyncTool.java
index b253114..5183d67 100644
--- a/hudi-hive/src/test/java/org/apache/hudi/hive/TestHiveSyncTool.java
+++ b/hudi-hive/src/test/java/org/apache/hudi/hive/TestHiveSyncTool.java
@@ -148,7 +148,6 @@ public class TestHiveSyncTool {
     assertEquals("`map_list` ARRAY< MAP< string, int>>", schemaString);
   }
 
-
   @Test
   public void testBasicSync() throws Exception {
     TestUtil.hiveSyncConfig.useJdbc = this.useJdbc;
diff --git a/hudi-hive/src/test/java/org/apache/hudi/hive/util/HiveTestService.java b/hudi-hive/src/test/java/org/apache/hudi/hive/util/HiveTestService.java
index 924d7a1..4ecf280 100644
--- a/hudi-hive/src/test/java/org/apache/hudi/hive/util/HiveTestService.java
+++ b/hudi-hive/src/test/java/org/apache/hudi/hive/util/HiveTestService.java
@@ -218,8 +218,6 @@ public class HiveTestService {
 
   // XXX: From org.apache.hadoop.hive.metastore.HiveMetaStore,
   // with changes to support binding to a specified IP address (not only 0.0.0.0)
-
-
   private static final class ChainedTTransportFactory extends TTransportFactory {
 
     private final TTransportFactory parentTransFactory;
@@ -236,7 +234,6 @@ public class HiveTestService {
     }
   }
 
-
   private static final class TServerSocketKeepAlive extends TServerSocket {
 
     public TServerSocketKeepAlive(int port) throws TTransportException {
diff --git a/hudi-spark/src/main/java/org/apache/hudi/DataSourceUtils.java b/hudi-spark/src/main/java/org/apache/hudi/DataSourceUtils.java
index da90022..9ce79e9 100644
--- a/hudi-spark/src/main/java/org/apache/hudi/DataSourceUtils.java
+++ b/hudi-spark/src/main/java/org/apache/hudi/DataSourceUtils.java
@@ -160,7 +160,6 @@ public class DataSourceUtils {
     return new HoodieWriteClient<>(jssc, writeConfig, true);
   }
 
-
   public static JavaRDD<WriteStatus> doWriteOperation(HoodieWriteClient client, JavaRDD<HoodieRecord> hoodieRecords,
       String commitTime, String operation) {
     if (operation.equals(DataSourceWriteOptions.BULK_INSERT_OPERATION_OPT_VAL())) {
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java
index 540653d..4f72b24 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java
@@ -35,7 +35,6 @@ import org.apache.log4j.Logger;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.JavaSparkContext;
 
-
 public class HoodieCompactor {
 
   private static volatile Logger logger = LogManager.getLogger(HoodieCompactor.class);
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/DeltaSync.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/DeltaSync.java
index fb16268..73ef8a3 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/DeltaSync.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/DeltaSync.java
@@ -75,7 +75,6 @@ import org.apache.spark.sql.Row;
 import org.apache.spark.sql.SparkSession;
 import scala.collection.JavaConversions;
 
-
 /**
  * Sync's one batch of data to hoodie dataset
  */
@@ -155,7 +154,6 @@ public class DeltaSync implements Serializable {
    */
   private final HoodieTableType tableType;
 
-
   public DeltaSync(HoodieDeltaStreamer.Config cfg, SparkSession sparkSession, SchemaProvider schemaProvider,
       HoodieTableType tableType, TypedProperties props, JavaSparkContext jssc, FileSystem fs, HiveConf hiveConf,
       Function<HoodieWriteClient, Boolean> onInitializingHoodieWriteClient) throws IOException {
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/HoodieDeltaStreamer.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/HoodieDeltaStreamer.java
index 4b90d90..db8b40a 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/HoodieDeltaStreamer.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/HoodieDeltaStreamer.java
@@ -65,7 +65,6 @@ import org.apache.log4j.Logger;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.sql.SparkSession;
 
-
 /**
  * An Utility which can incrementally take the output from {@link HiveIncrementalPuller} and apply it to the target
  * dataset. Does not maintain any state, queries at runtime to see how far behind the target dataset is from the source
@@ -267,11 +266,9 @@ public class HoodieDeltaStreamer implements Serializable {
     @Parameter(names = {"--checkpoint"}, description = "Resume Delta Streamer from this checkpoint.")
     public String checkpoint = null;
 
-
     @Parameter(names = {"--help", "-h"}, help = true)
     public Boolean help = false;
 
-
     public boolean isAsyncCompactionEnabled() {
       return continuousMode && !forceDisableCompaction
           && HoodieTableType.MERGE_ON_READ.equals(HoodieTableType.valueOf(storageType));
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SchedulerConfGenerator.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SchedulerConfGenerator.java
index d519085..cb4477a 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SchedulerConfGenerator.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SchedulerConfGenerator.java
@@ -57,7 +57,6 @@ public class SchedulerConfGenerator {
         compactionMinShare.toString());
   }
 
-
   /**
    * Helper to set Spark Scheduling Configs dynamically
    *
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SourceFormatAdapter.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SourceFormatAdapter.java
index e44ba53..ab3b070 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SourceFormatAdapter.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/deltastreamer/SourceFormatAdapter.java
@@ -44,7 +44,6 @@ public final class SourceFormatAdapter {
 
   private final Source source;
 
-
   public SourceFormatAdapter(Source source) {
     this.source = source;
   }
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/keygen/TimestampBasedKeyGenerator.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/keygen/TimestampBasedKeyGenerator.java
index 5d0bae2..970ec54 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/keygen/TimestampBasedKeyGenerator.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/keygen/TimestampBasedKeyGenerator.java
@@ -48,7 +48,6 @@ public class TimestampBasedKeyGenerator extends SimpleKeyGenerator {
 
   private final String outputDateFormat;
 
-
   /**
    * Supported configs
    */
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/perf/TimelineServerPerf.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/perf/TimelineServerPerf.java
index 81a418f..ef1e986 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/perf/TimelineServerPerf.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/perf/TimelineServerPerf.java
@@ -209,7 +209,6 @@ public class TimelineServerPerf implements Serializable {
     }
   }
 
-
   private static class PerfStats implements Serializable {
 
     private final String partition;
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/HiveIncrPullSource.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/HiveIncrPullSource.java
index 621b6fc..b37a219 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/HiveIncrPullSource.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/HiveIncrPullSource.java
@@ -63,7 +63,6 @@ public class HiveIncrPullSource extends AvroSource {
 
   private final String incrPullRootPath;
 
-
   /**
    * Configs supported
    */
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/AvroConvertor.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/AvroConvertor.java
index 6416cf9..d5c4621 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/AvroConvertor.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/AvroConvertor.java
@@ -50,7 +50,6 @@ public class AvroConvertor implements Serializable {
    */
   private transient Injection<GenericRecord, byte[]> recordInjection;
 
-
   public AvroConvertor(String schemaStr) {
     this.schemaStr = schemaStr;
   }
@@ -79,7 +78,6 @@ public class AvroConvertor implements Serializable {
     }
   }
 
-
   public GenericRecord fromJson(String json) throws IOException {
     initSchema();
     initJsonConvertor();
@@ -90,7 +88,6 @@ public class AvroConvertor implements Serializable {
     return new Schema.Parser().parse(schemaStr);
   }
 
-
   public GenericRecord fromAvroBinary(byte[] avroBinary) {
     initSchema();
     initInjection();
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/KafkaOffsetGen.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/KafkaOffsetGen.java
index 9dd232d..4211af6 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/KafkaOffsetGen.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/KafkaOffsetGen.java
@@ -43,7 +43,6 @@ import scala.collection.mutable.ArrayBuffer;
 import scala.collection.mutable.StringBuilder;
 import scala.util.Either;
 
-
 /**
  * Source to read data from Kafka, incrementally
  */
@@ -250,7 +249,6 @@ public class KafkaOffsetGen {
     return checkpointOffsetReseter ? earliestOffsets : checkpointOffsets;
   }
 
-
   public String getTopicName() {
     return topicName;
   }
diff --git a/hudi-utilities/src/test/java/org/apache/hudi/utilities/TestHDFSParquetImporter.java b/hudi-utilities/src/test/java/org/apache/hudi/utilities/TestHDFSParquetImporter.java
index 2c170a2..045042f 100644
--- a/hudi-utilities/src/test/java/org/apache/hudi/utilities/TestHDFSParquetImporter.java
+++ b/hudi-utilities/src/test/java/org/apache/hudi/utilities/TestHDFSParquetImporter.java
@@ -61,7 +61,6 @@ public class TestHDFSParquetImporter implements Serializable {
   private static MiniDFSCluster dfsCluster;
   private static DistributedFileSystem dfs;
 
-
   @BeforeClass
   public static void initClass() throws Exception {
     hdfsTestService = new HdfsTestService();
diff --git a/hudi-utilities/src/test/java/org/apache/hudi/utilities/sources/TestKafkaSource.java b/hudi-utilities/src/test/java/org/apache/hudi/utilities/sources/TestKafkaSource.java
index 9ac4bf4..241fae0 100644
--- a/hudi-utilities/src/test/java/org/apache/hudi/utilities/sources/TestKafkaSource.java
+++ b/hudi-utilities/src/test/java/org/apache/hudi/utilities/sources/TestKafkaSource.java
@@ -78,7 +78,6 @@ public class TestKafkaSource extends UtilitiesTestBase {
     testUtils.teardown();
   }
 
-
   @Test
   public void testJsonKafkaSource() throws IOException {
 
@@ -132,7 +131,6 @@ public class TestKafkaSource extends UtilitiesTestBase {
     assertEquals(Option.empty(), fetch4AsRows.getBatch());
   }
 
-
   private static HashMap<TopicAndPartition, LeaderOffset> makeOffsetMap(int[] partitions, long[] offsets) {
     HashMap<TopicAndPartition, LeaderOffset> map = new HashMap<>();
     for (int i = 0; i < partitions.length; i++) {
diff --git a/pom.xml b/pom.xml
index 398dbca..aa4d706 100644
--- a/pom.xml
+++ b/pom.xml
@@ -914,7 +914,7 @@
                 </goals>
               </execution>
             </executions>
- 	  </plugin>
+ 	      </plugin>
         </plugins>
       </build>
     </profile>
diff --git a/style/checkstyle.xml b/style/checkstyle.xml
index 649243b..7eab7b4 100644
--- a/style/checkstyle.xml
+++ b/style/checkstyle.xml
@@ -101,6 +101,9 @@
         <module name="ModifierOrder"/>
         <module name="EmptyLineSeparator">
             <property name="allowNoEmptyLineBetweenFields" value="true"/>
+            <property name="allowMultipleEmptyLines" value="false"/>
+            <property name="tokens" value="PACKAGE_DEF, IMPORT, CLASS_DEF, INTERFACE_DEF, ENUM_DEF,
+                         STATIC_INIT, INSTANCE_INIT, METHOD_DEF, CTOR_DEF"/>
         </module>
         <module name="SeparatorWrap">
             <property name="id" value="SeparatorWrapDot"/>