You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@storm.apache.org by et...@apache.org on 2020/03/06 04:53:31 UTC

[storm] branch master updated: [STORM-3594] Add checkstyle rule WhitespaceAfter

This is an automated email from the ASF dual-hosted git repository.

ethanli pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/storm.git


The following commit(s) were added to refs/heads/master by this push:
     new 81933b8  [STORM-3594] Add checkstyle rule WhitespaceAfter
     new 6b02a65  Merge pull request #3220 from Ethanlm/STORM-3594
81933b8 is described below

commit 81933b8978d9c62bcd71fb41c7c10918b9f9ded4
Author: Ethan Li <et...@gmail.com>
AuthorDate: Wed Mar 4 18:24:14 2020 -0600

    [STORM-3594] Add checkstyle rule WhitespaceAfter
---
 .../storm/hive/bolt/BucketTestHiveTopology.java    |  2 +-
 .../org/apache/storm/hive/bolt/HiveTopology.java   | 10 +++----
 .../storm/hive/bolt/HiveTopologyPartitioned.java   | 12 ++++----
 .../storm/hive/trident/TridentHiveTopology.java    | 28 +++++++++---------
 .../org/apache/storm/jdbc/spout/UserSpout.java     |  8 ++---
 .../storm/jdbc/topology/AbstractUserTopology.java  |  8 ++---
 .../topology/UserPersistenceTridentTopology.java   |  4 +--
 .../storm/jms/example/SpringJmsProvider.java       |  4 +--
 .../TridentKafkaClientTopologyWildcardTopics.java  |  2 +-
 .../storm/loadgen/ExecAndProcessLatencyEngine.java |  2 +-
 .../java/org/apache/storm/loadgen/GenLoad.java     |  2 +-
 .../loadgen/HttpForwardingMetricsConsumer.java     |  2 +-
 .../storm/loadgen/HttpForwardingMetricsServer.java |  4 +--
 .../org/apache/storm/loadgen/LoadCompConf.java     |  4 +--
 .../apache/storm/loadgen/LoadMetricsServer.java    | 34 +++++++++++-----------
 .../java/org/apache/storm/loadgen/LoadSpout.java   |  4 +--
 .../org/apache/storm/loadgen/NormalDistStats.java  |  2 +-
 .../apache/storm/loadgen/OutputStreamEngine.java   | 10 +++----
 .../org/apache/storm/loadgen/TopologyLoadConf.java | 14 ++++-----
 .../storm/redis/trident/WordCountTridentRedis.java |  4 +--
 .../trident/WordCountTridentRedisCluster.java      |  4 +--
 .../apache/storm/solr/spout/SolrFieldsSpout.java   |  2 +-
 .../storm/hdfs/security/AutoHDFSCommand.java       |  4 +--
 .../storm/hdfs/security/HdfsSecurityUtil.java      |  4 +--
 .../apache/storm/hdfs/blobstore/HdfsBlobStore.java |  6 ++--
 .../storm/hdfs/blobstore/HdfsBlobStoreFile.java    |  8 ++---
 .../org/apache/storm/kafka/bolt/KafkaBolt.java     | 10 +++----
 .../mapper/FieldNameBasedTupleToKafkaMapper.java   |  2 +-
 .../kafka/bolt/mapper/TupleToKafkaMapper.java      |  2 +-
 .../storm/kafka/spout/ByTopicRecordTranslator.java |  8 ++---
 .../spout/KafkaSpoutRetryExponentialBackoff.java   |  2 +-
 .../apache/storm/kafka/spout/RecordTranslator.java |  2 +-
 .../spout/internal/CommonKafkaSpoutConfig.java     | 16 +++++-----
 .../kafka/spout/internal/ConsumerFactory.java      |  2 +-
 .../kafka/spout/metrics/KafkaOffsetMetric.java     |  4 +--
 .../trident/KafkaTridentSpoutCoordinator.java      |  2 +-
 .../spout/trident/KafkaTridentSpoutOpaque.java     |  2 +-
 .../trident/KafkaTridentSpoutTransactional.java    |  2 +-
 .../trident/mapper/TridentTupleToKafkaMapper.java  |  2 +-
 .../org/apache/storm/kafka/migration/MapUtil.java  |  2 +-
 .../storm/mongodb/trident/state/MongoMapState.java |  2 +-
 .../org/apache/storm/rocketmq/RocketMqUtils.java   |  4 +--
 .../apache/storm/rocketmq/spout/RocketMqSpout.java |  4 +--
 .../apache/storm/flux/model/ConfigMethodDef.java   |  4 +--
 .../org/apache/storm/flux/model/ObjectDef.java     |  4 +--
 .../storm/flux/examples/WordCountClient.java       |  2 +-
 .../storm/flux/wrappers/bolts/FluxShellBolt.java   |  6 ++--
 .../storm/flux/wrappers/spouts/FluxShellSpout.java |  6 ++--
 .../java/org/apache/storm/debug/DebugHelper.java   |  2 +-
 .../apache/storm/sql/runtime/utils/SerdeUtils.java |  6 ++--
 .../src/main/resources/storm/storm_checkstyle.xml  |  3 ++
 .../src/jvm/org/apache/storm/StormSubmitter.java   |  4 +--
 .../jvm/org/apache/storm/executor/Executor.java    |  2 +-
 .../apache/storm/metrics2/StormMetricRegistry.java |  2 +-
 .../apache/storm/pacemaker/PacemakerClient.java    |  2 +-
 .../apache/storm/security/auth/ThriftServer.java   |  2 +-
 .../java/org/apache/storm/clojure/ClojureBolt.java |  2 +-
 .../org/apache/storm/clojure/ClojureSpout.java     |  2 +-
 .../org/apache/storm/command/AdminCommands.java    |  6 ++--
 .../org/apache/storm/command/ShellSubmission.java  |  2 +-
 .../apache/storm/command/UploadCredentials.java    |  4 +--
 .../storm/shade/org/apache/zookeeper/ZkCli.java    | 12 ++++----
 .../apache/storm/daemon/nimbus/HeartbeatCache.java |  4 +--
 .../org/apache/storm/daemon/nimbus/Nimbus.java     |  4 +--
 .../apache/storm/localizer/LocalizedResource.java  |  2 +-
 .../normalization/NormalizedResourceRequest.java   |  6 ++--
 .../java/org/apache/storm/utils/ServerUtils.java   |  2 +-
 .../org/apache/storm/daemon/drpc/DRPCServer.java   |  2 +-
 .../storm/daemon/drpc/webapp/ReqContextFilter.java |  2 +-
 .../storm/daemon/logviewer/LogviewerServer.java    |  2 +-
 .../daemon/logviewer/utils/DirectoryCleaner.java   |  4 +--
 .../java/org/apache/storm/daemon/ui/UIHelpers.java | 32 ++++++++++----------
 .../java/org/apache/storm/daemon/ui/UIServer.java  | 10 +++----
 73 files changed, 201 insertions(+), 198 deletions(-)

diff --git a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java
index 61ea97c..8432c53 100644
--- a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java
+++ b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/BucketTestHiveTopology.java
@@ -74,7 +74,7 @@ public class BucketTestHiveTopology {
         DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper()
                 .withColumnFields(new Fields(colNames)).withTimeAsPartitionField("yyyy/MM/dd");
         HiveOptions hiveOptions;
-        hiveOptions = new HiveOptions(metaStoreUri,dbName,tblName,mapper)
+        hiveOptions = new HiveOptions(metaStoreUri, dbName, tblName, mapper)
             .withTxnsPerBatch(10)
             .withBatchSize(hiveBatchSize);
         // doing below because its affecting storm metrics most likely
diff --git a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopology.java b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopology.java
index 46f6d6d..4cfd3e4 100644
--- a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopology.java
+++ b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopology.java
@@ -44,7 +44,7 @@ public class HiveTopology {
         String metaStoreUri = args[0];
         String dbName = args[1];
         String tblName = args[2];
-        String[] colNames = {"id","name","phone","street","city","state"};
+        String[] colNames = {"id", "name", "phone", "street", "city", "state"};
         Config config = new Config();
         config.setNumWorkers(1);
         UserDataSpout spout = new UserDataSpout();
@@ -52,14 +52,14 @@ public class HiveTopology {
                 .withColumnFields(new Fields(colNames));
         HiveOptions hiveOptions;
         if (args.length == 6) {
-            hiveOptions = new HiveOptions(metaStoreUri,dbName,tblName,mapper)
+            hiveOptions = new HiveOptions(metaStoreUri, dbName, tblName, mapper)
                 .withTxnsPerBatch(10)
                 .withBatchSize(100)
                 .withIdleTimeout(10)
                 .withKerberosKeytab(args[4])
                 .withKerberosPrincipal(args[5]);
         } else {
-            hiveOptions = new HiveOptions(metaStoreUri,dbName,tblName,mapper)
+            hiveOptions = new HiveOptions(metaStoreUri, dbName, tblName, mapper)
                 .withTxnsPerBatch(10)
                 .withBatchSize(100)
                 .withIdleTimeout(10)
@@ -103,7 +103,7 @@ public class HiveTopology {
 
         @Override
         public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            declarer.declare(new Fields("id","name","phone","street","city","state"));
+            declarer.declare(new Fields("id", "name", "phone", "street", "city", "state"));
         }
 
         @Override
@@ -116,7 +116,7 @@ public class HiveTopology {
         @Override
         public void nextTuple() {
             String[] user = sentences[index].split(",");
-            Values values = new Values(Integer.parseInt(user[0]),user[1],user[2],user[3],user[4],user[5]);
+            Values values = new Values(Integer.parseInt(user[0]), user[1], user[2], user[3], user[4], user[5]);
             UUID msgId = UUID.randomUUID();
             this.pending.put(msgId, values);
             this.collector.emit(values, msgId);
diff --git a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java
index 4d31497..ca9fd5f 100644
--- a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java
+++ b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/bolt/HiveTopologyPartitioned.java
@@ -45,8 +45,8 @@ public class HiveTopologyPartitioned {
         String metaStoreUri = args[0];
         String dbName = args[1];
         String tblName = args[2];
-        String[] partNames = {"city","state"};
-        String[] colNames = {"id","name","phone","street"};
+        String[] partNames = {"city", "state"};
+        String[] colNames = {"id", "name", "phone", "street"};
         Config config = new Config();
         config.setNumWorkers(1);
         UserDataSpout spout = new UserDataSpout();
@@ -55,14 +55,14 @@ public class HiveTopologyPartitioned {
             .withPartitionFields(new Fields(partNames));
         HiveOptions hiveOptions;
         if (args.length == 6) {
-            hiveOptions = new HiveOptions(metaStoreUri,dbName,tblName,mapper)
+            hiveOptions = new HiveOptions(metaStoreUri, dbName, tblName, mapper)
                 .withTxnsPerBatch(10)
                 .withBatchSize(1000)
                 .withIdleTimeout(10)
                 .withKerberosKeytab(args[4])
                 .withKerberosPrincipal(args[5]);
         } else {
-            hiveOptions = new HiveOptions(metaStoreUri,dbName,tblName,mapper)
+            hiveOptions = new HiveOptions(metaStoreUri, dbName, tblName, mapper)
                 .withTxnsPerBatch(10)
                 .withBatchSize(1000)
                 .withIdleTimeout(10);
@@ -104,7 +104,7 @@ public class HiveTopologyPartitioned {
 
         @Override
         public void declareOutputFields(OutputFieldsDeclarer declarer) {
-            declarer.declare(new Fields("id","name","phone","street","city","state"));
+            declarer.declare(new Fields("id", "name", "phone", "street", "city", "state"));
         }
 
         @Override
@@ -117,7 +117,7 @@ public class HiveTopologyPartitioned {
         @Override
         public void nextTuple() {
             String[] user = sentences[index].split(",");
-            Values values = new Values(Integer.parseInt(user[0]),user[1],user[2],user[3],user[4],user[5]);
+            Values values = new Values(Integer.parseInt(user[0]), user[1], user[2], user[3], user[4], user[5]);
             UUID msgId = UUID.randomUUID();
             this.pending.put(msgId, values);
             this.collector.emit(values, msgId);
diff --git a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/trident/TridentHiveTopology.java b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/trident/TridentHiveTopology.java
index bb3e00c..1ba734d 100644
--- a/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/trident/TridentHiveTopology.java
+++ b/examples/storm-hive-examples/src/main/java/org/apache/storm/hive/trident/TridentHiveTopology.java
@@ -49,24 +49,24 @@ public class TridentHiveTopology {
         FixedBatchSpout spout = new FixedBatchSpout(batchSize);
         spout.setCycle(true);
         TridentTopology topology = new TridentTopology();
-        Stream stream = topology.newStream("hiveTridentspout1",spout);
-        String[] partNames = {"city","state"};
-        String[] colNames = {"id","name","phone","street"};
-        Fields hiveFields = new Fields("id","name","phone","street","city","state");
+        Stream stream = topology.newStream("hiveTridentspout1", spout);
+        String[] partNames = {"city", "state"};
+        String[] colNames = {"id", "name", "phone", "street"};
+        Fields hiveFields = new Fields("id", "name", "phone", "street", "city", "state");
         DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper()
             .withColumnFields(new Fields(colNames))
             .withPartitionFields(new Fields(partNames));
         HiveOptions hiveOptions;
         if (keytab != null && principal != null) {
-            hiveOptions = new HiveOptions(metaStoreUri,dbName,tblName,mapper)
+            hiveOptions = new HiveOptions(metaStoreUri, dbName, tblName, mapper)
                 .withTxnsPerBatch(10)
                 .withBatchSize(batchSize)
                 .withIdleTimeout(10)
                 .withCallTimeout(30000)
-                .withKerberosKeytab((String)keytab)
-                .withKerberosPrincipal((String)principal);
+                .withKerberosKeytab((String) keytab)
+                .withKerberosPrincipal((String) principal);
         } else  {
-            hiveOptions = new HiveOptions(metaStoreUri,dbName,tblName,mapper)
+            hiveOptions = new HiveOptions(metaStoreUri, dbName, tblName, mapper)
                 .withTxnsPerBatch(10)
                 .withBatchSize(batchSize)
                 .withCallTimeout(30000)
@@ -107,7 +107,7 @@ public class TridentHiveTopology {
         }
         
         try {
-            StormSubmitter.submitTopology(args[3], conf, buildTopology(metaStoreUri, dbName, tblName,null,null));
+            StormSubmitter.submitTopology(args[3], conf, buildTopology(metaStoreUri, dbName, tblName, null, null));
         } catch (SubmitterHookException e) {
             LOG.warn("Topology is submitted but invoking ISubmitterHook failed", e);
         } catch (Exception e) {
@@ -119,10 +119,10 @@ public class TridentHiveTopology {
         int maxBatchSize;
         HashMap<Long, List<List<Object>>> batches = new HashMap<Long, List<List<Object>>>();
         private Values[] outputs = {
-            new Values("1","user1","123456","street1","sunnyvale","ca"),
-            new Values("2","user2","123456","street2","sunnyvale","ca"),
-            new Values("3","user3","123456","street3","san jose","ca"),
-            new Values("4","user4","123456","street4","san jose","ca"),
+            new Values("1", "user1", "123456", "street1", "sunnyvale", "ca"),
+            new Values("2", "user2", "123456", "street2", "sunnyvale", "ca"),
+            new Values("3", "user3", "123456", "street3", "san jose", "ca"),
+            new Values("4", "user4", "123456", "street4", "san jose", "ca"),
         };
         private int index = 0;
         boolean cycle = false;
@@ -137,7 +137,7 @@ public class TridentHiveTopology {
 
         @Override
         public Fields getOutputFields() {
-            return new Fields("id","name","phone","street","city","state");
+            return new Fields("id", "name", "phone", "street", "city", "state");
         }
 
         @Override
diff --git a/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/spout/UserSpout.java b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/spout/UserSpout.java
index fd323c8..6854722 100644
--- a/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/spout/UserSpout.java
+++ b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/spout/UserSpout.java
@@ -35,9 +35,9 @@ public class UserSpout implements IRichSpout {
     boolean isDistributed;
     SpoutOutputCollector collector;
     public static final List<Values> rows = Lists.newArrayList(
-            new Values(1,"peter",System.currentTimeMillis()),
-            new Values(2,"bob",System.currentTimeMillis()),
-            new Values(3,"alice",System.currentTimeMillis()));
+            new Values(1, "peter", System.currentTimeMillis()),
+            new Values(2, "bob", System.currentTimeMillis()),
+            new Values(3, "alice", System.currentTimeMillis()));
 
     public UserSpout() {
         this(true);
@@ -81,7 +81,7 @@ public class UserSpout implements IRichSpout {
 
     @Override
     public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(new Fields("user_id","user_name","create_date"));
+        declarer.declare(new Fields("user_id", "user_name", "create_date"));
     }
 
     @Override
diff --git a/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java
index 76d6c6f..aa7f686 100644
--- a/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java
+++ b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/AbstractUserTopology.java
@@ -82,12 +82,12 @@ public abstract class AbstractUserTopology {
             System.exit(-1);
         }
         Map<String, Object> map = Maps.newHashMap();
-        map.put("dataSourceClassName", args[0]);//com.mysql.jdbc.jdbc2.optional.MysqlDataSource
-        map.put("dataSource.url", args[1]);//jdbc:mysql://localhost/test
-        map.put("dataSource.user", args[2]);//root
+        map.put("dataSourceClassName", args[0]); //com.mysql.jdbc.jdbc2.optional.MysqlDataSource
+        map.put("dataSource.url", args[1]); //jdbc:mysql://localhost/test
+        map.put("dataSource.user", args[2]); //root
 
         if (args.length == 4) {
-            map.put("dataSource.password", args[3]);//password
+            map.put("dataSource.password", args[3]); //password
         }
 
         Config config = new Config();
diff --git a/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistenceTridentTopology.java b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistenceTridentTopology.java
index 6f03e0e..1c38c35 100644
--- a/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistenceTridentTopology.java
+++ b/examples/storm-jdbc-examples/src/main/java/org/apache/storm/jdbc/topology/UserPersistenceTridentTopology.java
@@ -58,11 +58,11 @@ public class UserPersistenceTridentTopology extends AbstractUserTopology {
         Stream stream = topology.newStream("userSpout", new UserSpout());
         TridentState state = topology.newStaticState(jdbcStateFactory);
         stream = stream.stateQuery(state,
-                new Fields("user_id","user_name","create_date"),
+                new Fields("user_id", "user_name", "create_date"),
                 new JdbcQuery(),
                 new Fields("dept_name"));
         stream.partitionPersist(jdbcStateFactory,
-                new Fields("user_id","user_name","dept_name","create_date"),
+                new Fields("user_id", "user_name", "dept_name", "create_date"),
                 new JdbcUpdater(),
                 new Fields());
         return topology.build();
diff --git a/examples/storm-jms-examples/src/main/java/org/apache/storm/jms/example/SpringJmsProvider.java b/examples/storm-jms-examples/src/main/java/org/apache/storm/jms/example/SpringJmsProvider.java
index c6b4c21..b3bd3b7 100644
--- a/examples/storm-jms-examples/src/main/java/org/apache/storm/jms/example/SpringJmsProvider.java
+++ b/examples/storm-jms-examples/src/main/java/org/apache/storm/jms/example/SpringJmsProvider.java
@@ -56,8 +56,8 @@ public class SpringJmsProvider implements JmsProvider {
      */
     public SpringJmsProvider(String appContextClasspathResource, String connectionFactoryBean, String destinationBean) {
         ApplicationContext context = new ClassPathXmlApplicationContext(appContextClasspathResource);
-        this.connectionFactory = (ConnectionFactory)context.getBean(connectionFactoryBean);
-        this.destination = (Destination)context.getBean(destinationBean);
+        this.connectionFactory = (ConnectionFactory) context.getBean(connectionFactoryBean);
+        this.destination = (Destination) context.getBean(destinationBean);
     }
 
     @Override
diff --git a/examples/storm-kafka-client-examples/src/main/java/org/apache/storm/kafka/trident/TridentKafkaClientTopologyWildcardTopics.java b/examples/storm-kafka-client-examples/src/main/java/org/apache/storm/kafka/trident/TridentKafkaClientTopologyWildcardTopics.java
index 7da8f91..f770c75 100644
--- a/examples/storm-kafka-client-examples/src/main/java/org/apache/storm/kafka/trident/TridentKafkaClientTopologyWildcardTopics.java
+++ b/examples/storm-kafka-client-examples/src/main/java/org/apache/storm/kafka/trident/TridentKafkaClientTopologyWildcardTopics.java
@@ -33,7 +33,7 @@ public class TridentKafkaClientTopologyWildcardTopics extends TridentKafkaClient
     private static final Pattern TOPIC_WILDCARD_PATTERN = Pattern.compile("test-trident(-1)?");
 
     @Override
-    protected KafkaTridentSpoutConfig<String,String> newKafkaSpoutConfig(String bootstrapServers) {
+    protected KafkaTridentSpoutConfig<String, String> newKafkaSpoutConfig(String bootstrapServers) {
         return KafkaTridentSpoutConfig.builder(bootstrapServers, TOPIC_WILDCARD_PATTERN)
                 .setProp(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 200)
                 .setRecordTranslator((r) -> new Values(r.value()), new Fields("str"))
diff --git a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/ExecAndProcessLatencyEngine.java b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/ExecAndProcessLatencyEngine.java
index 2f5257d..c2dd81f 100644
--- a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/ExecAndProcessLatencyEngine.java
+++ b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/ExecAndProcessLatencyEngine.java
@@ -35,7 +35,7 @@ public class ExecAndProcessLatencyEngine implements Serializable {
     private final SlowExecutorPattern skewedPattern;
 
     public static long toNano(double ms) {
-        return (long)(ms * NANO_IN_MS);
+        return (long) (ms * NANO_IN_MS);
     }
 
     private final AtomicLong parkOffset = new AtomicLong(0);
diff --git a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/GenLoad.java b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/GenLoad.java
index 141f11a..20fec21 100644
--- a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/GenLoad.java
+++ b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/GenLoad.java
@@ -266,7 +266,7 @@ public class GenLoad {
         //For some reason on the new code if ackers is null we get 0???
         Object ackers = conf.get(Config.TOPOLOGY_ACKER_EXECUTORS);
         Object workers = conf.get(Config.TOPOLOGY_WORKERS);
-        if (ackers == null || ((Number)ackers).intValue() <= 0) {
+        if (ackers == null || ((Number) ackers).intValue() <= 0) {
             if (workers == null) {
                 workers = 1;
             }
diff --git a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/HttpForwardingMetricsConsumer.java b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/HttpForwardingMetricsConsumer.java
index 645000d..f316c76 100644
--- a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/HttpForwardingMetricsConsumer.java
+++ b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/HttpForwardingMetricsConsumer.java
@@ -53,7 +53,7 @@ public class HttpForwardingMetricsConsumer implements IMetricsConsumer {
     @Override
     public void prepare(Map<String, Object> topoConf, Object registrationArgument, TopologyContext context, IErrorReporter errorReporter) { 
         try {
-            url = new URL((String)registrationArgument);
+            url = new URL((String) registrationArgument);
             this.errorReporter = errorReporter;
             serializer = new KryoValuesSerializer(topoConf);
             topologyId = context.getStormId();
diff --git a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/HttpForwardingMetricsServer.java b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/HttpForwardingMetricsServer.java
index aa3e89f..bce3fe6 100644
--- a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/HttpForwardingMetricsServer.java
+++ b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/HttpForwardingMetricsServer.java
@@ -58,7 +58,7 @@ public abstract class HttpForwardingMetricsServer {
         protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
             Input in = new Input(request.getInputStream());
             List<Object> metrics = des.get().deserializeFrom(in);
-            handle((TaskInfo)metrics.get(0), (Collection<DataPoint>)metrics.get(1), (String)metrics.get(2));
+            handle((TaskInfo) metrics.get(0), (Collection<DataPoint>) metrics.get(1), (String) metrics.get(2));
             response.setStatus(HttpServletResponse.SC_OK);
         }
     }
@@ -100,7 +100,7 @@ public abstract class HttpForwardingMetricsServer {
             context.setContextPath("/");
             server.setHandler(context);
  
-            context.addServlet(new ServletHolder(new MetricsCollectionServlet()),"/*");
+            context.addServlet(new ServletHolder(new MetricsCollectionServlet()), "/*");
 
             server.start();
         } catch (RuntimeException e) {
diff --git a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadCompConf.java b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadCompConf.java
index 80f4faf..7240838 100644
--- a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadCompConf.java
+++ b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadCompConf.java
@@ -113,7 +113,7 @@ public class LoadCompConf {
      * @return a copy of this with the parallelism adjusted.
      */
     public LoadCompConf scaleParallel(double v) {
-        return setParallel(Math.max(1, (int)Math.ceil(parallelism * v)));
+        return setParallel(Math.max(1, (int) Math.ceil(parallelism * v)));
     }
 
     /**
@@ -123,7 +123,7 @@ public class LoadCompConf {
      */
     public LoadCompConf setParallel(int newParallelism) {
         //We need to adjust the throughput accordingly (so that it stays the same in aggregate)
-        double throughputAdjustment = ((double)parallelism) / newParallelism;
+        double throughputAdjustment = ((double) parallelism) / newParallelism;
         return new LoadCompConf(id, newParallelism, streams, cpuLoad, memoryLoad, slp).scaleThroughput(throughputAdjustment);
     }
 
diff --git a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadMetricsServer.java b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadMetricsServer.java
index c02de27..b06f977 100644
--- a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadMetricsServer.java
+++ b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadMetricsServer.java
@@ -238,7 +238,7 @@ public class LoadMetricsServer extends HttpForwardingMetricsServer {
         }
 
         public double getCompletedPerSec() {
-            return getCompleted() / (double)timeWindow;
+            return getCompleted() / (double) timeWindow;
         }
 
         public long getAcked() {
@@ -246,7 +246,7 @@ public class LoadMetricsServer extends HttpForwardingMetricsServer {
         }
 
         public double getAckedPerSec() {
-            return acked / (double)timeWindow;
+            return acked / (double) timeWindow;
         }
 
         public long getFailed() {
@@ -422,8 +422,8 @@ public class LoadMetricsServer extends HttpForwardingMetricsServer {
     static {
         //Perhaps there is a better way to do this???
         LinkedHashMap<String, MetricExtractor> tmp = new LinkedHashMap<>();
-        tmp.put("start_time",  new MetricExtractor((m, unit) -> m.startTime(),"s"));
-        tmp.put("end_time",  new MetricExtractor((m, unit) -> m.endTime(), "s"));
+        tmp.put("start_time",  new MetricExtractor((m, unit) -> m.startTime(), "s"));
+        tmp.put("end_time",  new MetricExtractor((m, unit) -> m.endTime(),  "s"));
         tmp.put("rate",  new MetricExtractor((m, unit) -> m.getCompletedPerSec(), "tuple/s"));
         tmp.put("mean", new MetricExtractor((m, unit) -> m.getMeanLatency(unit)));
         tmp.put("99%ile", new MetricExtractor((m, unit) -> m.getLatencyAtPercentile(99.0, unit)));
@@ -454,7 +454,7 @@ public class LoadMetricsServer extends HttpForwardingMetricsServer {
         String buildVersion = VersionInfo.getBuildVersion();
         tmp.put("storm_version", new MetricExtractor((m, unit) -> buildVersion, ""));
         tmp.put("java_version", new MetricExtractor((m, unit) -> System.getProperty("java.vendor")
-            + " " + System.getProperty("java.version"),""));
+            + " " + System.getProperty("java.version"), ""));
         tmp.put("os_arch", new MetricExtractor((m, unit) -> System.getProperty("os.arch"), ""));
         tmp.put("os_name", new MetricExtractor((m, unit) -> System.getProperty("os.name"), ""));
         tmp.put("os_version", new MetricExtractor((m, unit) -> System.getProperty("os.version"), ""));
@@ -593,7 +593,7 @@ public class LoadMetricsServer extends HttpForwardingMetricsServer {
         FixedWidthReporter(String path, Map<String, String> query, Map<String, MetricExtractor> extractorsMap)
             throws FileNotFoundException {
             super(path, query, extractorsMap, "3");
-            int columnWidth = Integer.parseInt(query.getOrDefault("columnWidth", "15")) - 1;//Always have a space in between
+            int columnWidth = Integer.parseInt(query.getOrDefault("columnWidth", "15")) - 1; //Always have a space in between
             doubleFormat = "%," + columnWidth + "." + precision + "f";
             longFormat = "%," + columnWidth + "d";
             stringFormat = "%" + columnWidth + "s";
@@ -1004,30 +1004,30 @@ public class LoadMetricsServer extends HttpForwardingMetricsServer {
         for (IMetricsConsumer.DataPoint dp: dataPoints) {
             if (dp.name.startsWith("comp-lat-histo") && dp.value instanceof Histogram) {
                 synchronized (histo) {
-                    histo.add((Histogram)dp.value);
+                    histo.add((Histogram) dp.value);
                 }
             } else if ("CPU".equals(dp.name) && dp.value instanceof Map) {
-                Map<Object, Object> m = (Map<Object, Object>)dp.value;
+                Map<Object, Object> m = (Map<Object, Object>) dp.value;
                 Object sys = m.get("sys-ms");
                 if (sys instanceof Number) {
-                    systemCpu.getAndAdd(((Number)sys).longValue());
+                    systemCpu.getAndAdd(((Number) sys).longValue());
                 }
                 Object user = m.get("user-ms");
                 if (user instanceof Number) {
-                    userCpu.getAndAdd(((Number)user).longValue());
+                    userCpu.getAndAdd(((Number) user).longValue());
                 }
             } else if (dp.name.startsWith("GC/") && dp.value instanceof Map) {
-                Map<Object, Object> m = (Map<Object, Object>)dp.value;
+                Map<Object, Object> m = (Map<Object, Object>) dp.value;
                 Object count = m.get("count");
                 if (count instanceof Number) {
-                    gcCount.getAndAdd(((Number)count).longValue());
+                    gcCount.getAndAdd(((Number) count).longValue());
                 }
                 Object time = m.get("timeMs");
                 if (time instanceof Number) {
-                    gcMs.getAndAdd(((Number)time).longValue());
+                    gcMs.getAndAdd(((Number) time).longValue());
                 }
             } else if (dp.name.startsWith("memory/") && dp.value instanceof Map) {
-                Map<Object, Object> m = (Map<Object, Object>)dp.value;
+                Map<Object, Object> m = (Map<Object, Object>) dp.value;
                 Object val = m.get("usedBytes");
                 if (val instanceof Number) {
                     MemMeasure mm = memoryBytes.get(worker);
@@ -1036,10 +1036,10 @@ public class LoadMetricsServer extends HttpForwardingMetricsServer {
                         MemMeasure tmp = memoryBytes.putIfAbsent(worker, mm);
                         mm = tmp == null ? mm : tmp;
                     }
-                    mm.update(((Number)val).longValue());
+                    mm.update(((Number) val).longValue());
                 }
             } else if (dp.name.equals("__receive")) {
-                Map<Object, Object> m = (Map<Object, Object>)dp.value;
+                Map<Object, Object> m = (Map<Object, Object>) dp.value;
                 Object pop = m.get("population");
                 Object cap = m.get("capacity");
                 if (pop instanceof Number && cap instanceof Number) {
@@ -1057,7 +1057,7 @@ public class LoadMetricsServer extends HttpForwardingMetricsServer {
                     if (full >= 0.8) {
                         congested.get().put(
                             topologyId + ":" + taskInfo.srcComponentId + ":" + taskInfo.srcTaskId,
-                            "max.spout.pending " + (int)(full * 100) + "%");
+                            "max.spout.pending " + (int) (full * 100) + "%");
                     }
                 }
             }
diff --git a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadSpout.java b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadSpout.java
index 611dc63..5d639d3 100644
--- a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadSpout.java
+++ b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/LoadSpout.java
@@ -134,11 +134,11 @@ public class LoadSpout  extends BaseRichSpout {
 
     @Override
     public void ack(Object id) {
-        ((SentWithTime)id).done();
+        ((SentWithTime) id).done();
     }
 
     @Override
     public void fail(Object id) {
-        replays.add((SentWithTime)id);
+        replays.add((SentWithTime) id);
     }
 }
diff --git a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/NormalDistStats.java b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/NormalDistStats.java
index 8edf660..d7555a5 100644
--- a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/NormalDistStats.java
+++ b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/NormalDistStats.java
@@ -90,7 +90,7 @@ public class NormalDistStats implements Serializable {
         for (Double v: values) {
             sum += v;
             min = Math.min(min, v);
-            max = Math.max(max,v);
+            max = Math.max(max, v);
         }
         double mean = sum / Math.max(count, 1);
         double sdPartial = 0;
diff --git a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/OutputStreamEngine.java b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/OutputStreamEngine.java
index 80111c8..ae23679 100644
--- a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/OutputStreamEngine.java
+++ b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/OutputStreamEngine.java
@@ -29,7 +29,7 @@ import java.util.concurrent.ThreadLocalRandom;
  */
 public class OutputStreamEngine {
     private static final double NANO_PER_SEC = 1_000_000_000.0;
-    private static final long UPDATE_RATE_PERIOD_NS = ((long)NANO_PER_SEC * 2);
+    private static final long UPDATE_RATE_PERIOD_NS = ((long) NANO_PER_SEC * 2);
     private static final String[] KEYS = new String[2048];
 
     static {
@@ -67,11 +67,11 @@ public class OutputStreamEngine {
     private void selectNewRate() {
         double ratePerSecond = stats.rate.nextRandom(rand);
         if (ratePerSecond > 0) {
-            periodNano = Math.max(1, (long)(NANO_PER_SEC / ratePerSecond));
-            emitAmount = Math.max(1, (long)((ratePerSecond / NANO_PER_SEC) * periodNano));
+            periodNano = Math.max(1, (long) (NANO_PER_SEC / ratePerSecond));
+            emitAmount = Math.max(1, (long) ((ratePerSecond / NANO_PER_SEC) * periodNano));
         } else {
             //if it is is 0 or less it really is 1 per 10 seconds.
-            periodNano = (long)NANO_PER_SEC * 10;
+            periodNano = (long) NANO_PER_SEC * 10;
             emitAmount = 1;
         }
     }
@@ -109,7 +109,7 @@ public class OutputStreamEngine {
         if (stats.areKeysSkewed) {
             //We set the stddev of the skewed keys to be 1/5 of the length, but then we use the absolute value
             // of that so everything is skewed towards 0
-            keyIndex = Math.min(KEYS.length - 1 , Math.abs((int)(rand.nextGaussian() * KEYS.length / 5)));
+            keyIndex = Math.min(KEYS.length - 1 , Math.abs((int) (rand.nextGaussian() * KEYS.length / 5)));
         } else {
             keyIndex = rand.nextInt(KEYS.length);
         }
diff --git a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/TopologyLoadConf.java b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/TopologyLoadConf.java
index 4f297ab..e779f5f 100644
--- a/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/TopologyLoadConf.java
+++ b/examples/storm-loadgen/src/main/java/org/apache/storm/loadgen/TopologyLoadConf.java
@@ -88,7 +88,7 @@ public class TopologyLoadConf {
      */
     public static TopologyLoadConf fromConf(File file) throws IOException {
         Yaml yaml = new Yaml(new SafeConstructor());
-        Map<String, Object> yamlConf = (Map<String, Object>)yaml.load(new FileReader(file));
+        Map<String, Object> yamlConf = (Map<String, Object>) yaml.load(new FileReader(file));
         return TopologyLoadConf.fromConf(yamlConf);
     }
 
@@ -100,7 +100,7 @@ public class TopologyLoadConf {
     public static TopologyLoadConf fromConf(Map<String, Object> conf) {
         Map<String, Object> topoConf = null;
         if (conf.containsKey("config")) {
-            topoConf = new HashMap<>((Map<String, Object>)conf.get("config"));
+            topoConf = new HashMap<>((Map<String, Object>) conf.get("config"));
         }
 
         List<LoadCompConf> spouts = new ArrayList<>();
@@ -124,7 +124,7 @@ public class TopologyLoadConf {
             }
         }
 
-        return new TopologyLoadConf((String)conf.get("name"), topoConf, spouts, bolts, streams);
+        return new TopologyLoadConf((String) conf.get("name"), topoConf, spouts, bolts, streams);
     }
 
     /**
@@ -219,7 +219,7 @@ public class TopologyLoadConf {
         if (div > 0) {
             ret = asCharString(div);
         }
-        ret += (char)((int)'a' + remainder);
+        ret += (char) ((int) 'a' + remainder);
         return ret;
     }
 
@@ -383,7 +383,7 @@ public class TopologyLoadConf {
         return new TopologyLoadConf(getUniqueTopoName(), anonymizeTopoConf(topoConf), remappedSpouts, remappedBolts, remappedInputStreams);
     }
 
-    private static Map<String,Object> anonymizeTopoConf(Map<String, Object> topoConf) {
+    private static Map<String, Object> anonymizeTopoConf(Map<String, Object> topoConf) {
         //Only keep important conf keys
         Map<String, Object> ret = new HashMap<>();
         for (Map.Entry<String, Object> entry: topoConf.entrySet()) {
@@ -412,8 +412,8 @@ public class TopologyLoadConf {
             return ret.toString();
         } else {
             List<String> ret = new ArrayList<>();
-            for (String subValue: (Collection<String>)value) {
-                ret.add((String)cleanupChildOpts(subValue));
+            for (String subValue: (Collection<String>) value) {
+                ret.add((String) cleanupChildOpts(subValue));
             }
             return ret.stream().filter((item) -> item != null && !item.isEmpty()).collect(Collectors.toList());
         }
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedis.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedis.java
index b571635..1e9991f 100644
--- a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedis.java
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedis.java
@@ -65,8 +65,8 @@ public class WordCountTridentRedis {
         TridentState state = topology.newStaticState(factory);
         stream = stream.stateQuery(state, new Fields("word"),
                                 new RedisStateQuerier(lookupMapper),
-                                new Fields("columnName","columnValue"));
-        stream.each(new Fields("word","columnValue"), new PrintFunction(), new Fields());
+                                new Fields("columnName", "columnValue"));
+        stream.each(new Fields("word", "columnValue"), new PrintFunction(), new Fields());
         return topology.build();
     }
 
diff --git a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java
index be515d8..7ced2af 100644
--- a/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java
+++ b/examples/storm-redis-examples/src/main/java/org/apache/storm/redis/trident/WordCountTridentRedisCluster.java
@@ -73,8 +73,8 @@ public class WordCountTridentRedisCluster {
         TridentState state = topology.newStaticState(factory);
         stream = stream.stateQuery(state, new Fields("word"),
                                 new RedisClusterStateQuerier(lookupMapper),
-                                new Fields("columnName","columnValue"));
-        stream.each(new Fields("word","columnValue"), new PrintFunction(), new Fields());
+                                new Fields("columnName", "columnValue"));
+        stream.each(new Fields("word", "columnValue"), new PrintFunction(), new Fields());
         return topology.build();
     }
 
diff --git a/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrFieldsSpout.java b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrFieldsSpout.java
index 70b0e08..c1c0145 100644
--- a/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrFieldsSpout.java
+++ b/examples/storm-solr-examples/src/main/java/org/apache/storm/solr/spout/SolrFieldsSpout.java
@@ -67,7 +67,7 @@ public class SolrFieldsSpout extends BaseRichSpout {
     }
 
     public Fields getOutputFields() {
-        return new Fields("id","date","dc_title","author","dynamic_field_txt","non_matching_field");
+        return new Fields("id", "date", "dc_title", "author", "dynamic_field_txt", "non_matching_field");
     }
 
     @Override
diff --git a/external/storm-autocreds/src/main/java/org/apache/storm/hdfs/security/AutoHDFSCommand.java b/external/storm-autocreds/src/main/java/org/apache/storm/hdfs/security/AutoHDFSCommand.java
index 1e9290a..b8e4396 100644
--- a/external/storm-autocreds/src/main/java/org/apache/storm/hdfs/security/AutoHDFSCommand.java
+++ b/external/storm-autocreds/src/main/java/org/apache/storm/hdfs/security/AutoHDFSCommand.java
@@ -43,14 +43,14 @@ public final class AutoHDFSCommand {
     public static void main(String[] args) throws Exception {
         Map<String, Object> conf = new HashMap<>();
         conf.put(STORM_USER_NAME_KEY, args[1]); //with realm e.g. hdfs@WITZEND.COM
-        conf.put(STORM_KEYTAB_FILE_KEY, args[2]);// /etc/security/keytabs/storm.keytab
+        conf.put(STORM_KEYTAB_FILE_KEY, args[2]); // /etc/security/keytabs/storm.keytab
 
         AutoHDFS autoHdfs = new AutoHDFS();
         autoHdfs.prepare(conf);
         AutoHDFSNimbus autoHdfsNimbus = new AutoHDFSNimbus();
         autoHdfsNimbus.prepare(conf);
 
-        Map<String,String> creds  = new HashMap<>();
+        Map<String, String> creds  = new HashMap<>();
         autoHdfsNimbus.populateCredentials(creds, conf, args[0]);
         LOG.info("Got HDFS credentials", autoHdfs.getCredentials(creds));
 
diff --git a/external/storm-autocreds/src/main/java/org/apache/storm/hdfs/security/HdfsSecurityUtil.java b/external/storm-autocreds/src/main/java/org/apache/storm/hdfs/security/HdfsSecurityUtil.java
index a0c8236..0f8ef9b 100644
--- a/external/storm-autocreds/src/main/java/org/apache/storm/hdfs/security/HdfsSecurityUtil.java
+++ b/external/storm-autocreds/src/main/java/org/apache/storm/hdfs/security/HdfsSecurityUtil.java
@@ -54,8 +54,8 @@ public final class HdfsSecurityUtil {
     public static void login(Map<String, Object> conf, Configuration hdfsConfig) throws IOException {
         //If AutoHDFS is specified, do not attempt to login using keytabs, only kept for backward compatibility.
         if (conf.get(TOPOLOGY_AUTO_CREDENTIALS) == null
-                || (!(((List)conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoHDFS.class.getName()))
-                        && !(((List)conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoTGT.class.getName())))) {
+                || (!(((List) conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoHDFS.class.getName()))
+                        && !(((List) conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoTGT.class.getName())))) {
             if (UserGroupInformation.isSecurityEnabled()) {
                 // compareAndSet added because of https://issues.apache.org/jira/browse/STORM-1535
                 if (isLoggedIn.compareAndSet(false, true)) {
diff --git a/external/storm-hdfs-blobstore/src/main/java/org/apache/storm/hdfs/blobstore/HdfsBlobStore.java b/external/storm-hdfs-blobstore/src/main/java/org/apache/storm/hdfs/blobstore/HdfsBlobStore.java
index 5d4884b..0626372 100644
--- a/external/storm-hdfs-blobstore/src/main/java/org/apache/storm/hdfs/blobstore/HdfsBlobStore.java
+++ b/external/storm-hdfs-blobstore/src/main/java/org/apache/storm/hdfs/blobstore/HdfsBlobStore.java
@@ -130,7 +130,7 @@ public class HdfsBlobStore extends BlobStore {
     protected void prepareInternal(Map<String, Object> conf, String overrideBase, Configuration hadoopConf) {
         this.conf = conf;
         if (overrideBase == null) {
-            overrideBase = (String)conf.get(Config.BLOBSTORE_DIR);
+            overrideBase = (String) conf.get(Config.BLOBSTORE_DIR);
         }
         if (overrideBase == null) {
             throw new RuntimeException("You must specify a blobstore directory for HDFS to use!");
@@ -183,7 +183,7 @@ public class HdfsBlobStore extends BlobStore {
     public AtomicOutputStream createBlob(String key, SettableBlobMeta meta, Subject who)
             throws AuthorizationException, KeyAlreadyExistsException {
         if (meta.get_replication_factor() <= 0) {
-            meta.set_replication_factor((int)conf.get(Config.STORM_BLOBSTORE_REPLICATION_FACTOR));
+            meta.set_replication_factor((int) conf.get(Config.STORM_BLOBSTORE_REPLICATION_FACTOR));
         }
         who = checkAndGetSubject(who);
         validateKey(key);
@@ -296,7 +296,7 @@ public class HdfsBlobStore extends BlobStore {
     public void setBlobMeta(String key, SettableBlobMeta meta, Subject who)
             throws AuthorizationException, KeyNotFoundException {
         if (meta.get_replication_factor() <= 0) {
-            meta.set_replication_factor((int)conf.get(Config.STORM_BLOBSTORE_REPLICATION_FACTOR));
+            meta.set_replication_factor((int) conf.get(Config.STORM_BLOBSTORE_REPLICATION_FACTOR));
         }
         who = checkAndGetSubject(who);
         validateKey(key);
diff --git a/external/storm-hdfs-blobstore/src/main/java/org/apache/storm/hdfs/blobstore/HdfsBlobStoreFile.java b/external/storm-hdfs-blobstore/src/main/java/org/apache/storm/hdfs/blobstore/HdfsBlobStoreFile.java
index 564a1e3..45b15b1 100644
--- a/external/storm-hdfs-blobstore/src/main/java/org/apache/storm/hdfs/blobstore/HdfsBlobStoreFile.java
+++ b/external/storm-hdfs-blobstore/src/main/java/org/apache/storm/hdfs/blobstore/HdfsBlobStoreFile.java
@@ -137,18 +137,18 @@ public class HdfsBlobStoreFile extends BlobStoreFile {
         OutputStream out = null;
         FsPermission fileperms = new FsPermission(BLOBSTORE_FILE_PERMISSION);
         try {
-            out = fileSystem.create(path, (short)this.getMetadata().get_replication_factor());
+            out = fileSystem.create(path, (short) this.getMetadata().get_replication_factor());
             fileSystem.setPermission(path, fileperms);
-            fileSystem.setReplication(path, (short)this.getMetadata().get_replication_factor());
+            fileSystem.setReplication(path, (short) this.getMetadata().get_replication_factor());
         } catch (IOException e) {
             //Try to create the parent directory, may not work
             FsPermission dirperms = new FsPermission(HdfsBlobStoreImpl.BLOBSTORE_DIR_PERMISSION);
             if (!fileSystem.mkdirs(path.getParent(), dirperms)) {
                 LOG.warn("error creating parent dir: " + path.getParent());
             }
-            out = fileSystem.create(path, (short)this.getMetadata().get_replication_factor());
+            out = fileSystem.create(path, (short) this.getMetadata().get_replication_factor());
             fileSystem.setPermission(path, dirperms);
-            fileSystem.setReplication(path, (short)this.getMetadata().get_replication_factor());
+            fileSystem.setReplication(path, (short) this.getMetadata().get_replication_factor());
         }
         if (out == null) {
             throw new IOException("Error in creating: " + path);
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/KafkaBolt.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/KafkaBolt.java
index c257c32..f3fc808 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/KafkaBolt.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/KafkaBolt.java
@@ -61,7 +61,7 @@ public class KafkaBolt<K, V> extends BaseTickTupleAwareRichBolt {
 
     private Producer<K, V> producer;
     private OutputCollector collector;
-    private TupleToKafkaMapper<K,V> mapper;
+    private TupleToKafkaMapper<K, V> mapper;
     private KafkaTopicSelector topicSelector;
     private PreparableCallback providedCallback;
     private Properties boltSpecifiedProperties = new Properties();
@@ -76,7 +76,7 @@ public class KafkaBolt<K, V> extends BaseTickTupleAwareRichBolt {
 
     public KafkaBolt() {}
 
-    public KafkaBolt<K,V> withTupleToKafkaMapper(TupleToKafkaMapper<K,V> mapper) {
+    public KafkaBolt<K, V> withTupleToKafkaMapper(TupleToKafkaMapper<K, V> mapper) {
         this.mapper = mapper;
         return this;
     }
@@ -90,12 +90,12 @@ public class KafkaBolt<K, V> extends BaseTickTupleAwareRichBolt {
         return withTopicSelector(new DefaultTopicSelector(topic));
     }
     
-    public KafkaBolt<K,V> withTopicSelector(KafkaTopicSelector selector) {
+    public KafkaBolt<K, V> withTopicSelector(KafkaTopicSelector selector) {
         this.topicSelector = selector;
         return this;
     }
 
-    public KafkaBolt<K,V> withProducerProperties(Properties producerProperties) {
+    public KafkaBolt<K, V> withProducerProperties(Properties producerProperties) {
         this.boltSpecifiedProperties = producerProperties;
         return this;
     }
@@ -116,7 +116,7 @@ public class KafkaBolt<K, V> extends BaseTickTupleAwareRichBolt {
         //for backward compatibility.
         if (mapper == null) {
             LOG.info("Mapper not specified. Setting default mapper to {}", FieldNameBasedTupleToKafkaMapper.class.getSimpleName());
-            this.mapper = new FieldNameBasedTupleToKafkaMapper<K,V>();
+            this.mapper = new FieldNameBasedTupleToKafkaMapper<K, V>();
         }
 
         //for backward compatibility.
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/mapper/FieldNameBasedTupleToKafkaMapper.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/mapper/FieldNameBasedTupleToKafkaMapper.java
index 0302c57..1a1b097 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/mapper/FieldNameBasedTupleToKafkaMapper.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/mapper/FieldNameBasedTupleToKafkaMapper.java
@@ -20,7 +20,7 @@ package org.apache.storm.kafka.bolt.mapper;
 
 import org.apache.storm.tuple.Tuple;
 
-public class FieldNameBasedTupleToKafkaMapper<K,V> implements TupleToKafkaMapper<K, V> {
+public class FieldNameBasedTupleToKafkaMapper<K, V> implements TupleToKafkaMapper<K, V> {
     private static final long serialVersionUID = -8794262989021702349L;
     public static final String BOLT_KEY = "key";
     public static final String BOLT_MESSAGE = "message";
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/mapper/TupleToKafkaMapper.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/mapper/TupleToKafkaMapper.java
index 531c60c..19d4da6 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/mapper/TupleToKafkaMapper.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/bolt/mapper/TupleToKafkaMapper.java
@@ -26,7 +26,7 @@ import org.apache.storm.tuple.Tuple;
  * @param <K> type of key.
  * @param <V> type of value.
  */
-public interface TupleToKafkaMapper<K,V> extends Serializable {
+public interface TupleToKafkaMapper<K, V> extends Serializable {
     
     K getKeyFromTuple(Tuple tuple);
     
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/ByTopicRecordTranslator.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/ByTopicRecordTranslator.java
index 0a37614..ef8203f 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/ByTopicRecordTranslator.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/ByTopicRecordTranslator.java
@@ -33,8 +33,8 @@ import org.apache.storm.tuple.Fields;
  */
 public class ByTopicRecordTranslator<K, V> implements RecordTranslator<K, V> {
     private static final long serialVersionUID = -121699733778988688L;
-    private final RecordTranslator<K,V> defaultTranslator;
-    private final Map<String, RecordTranslator<K,V>> topicToTranslator = new HashMap<>();
+    private final RecordTranslator<K, V> defaultTranslator;
+    private final Map<String, RecordTranslator<K, V>> topicToTranslator = new HashMap<>();
     private final Map<String, Fields> streamToFields = new HashMap<>();
     
     /**
@@ -65,7 +65,7 @@ public class ByTopicRecordTranslator<K, V> implements RecordTranslator<K, V> {
      * @param defaultTranslator a translator that will be used for all topics not explicitly set
      *     with one of the variants of {@link #forTopic(java.lang.String, org.apache.storm.kafka.spout.RecordTranslator) }.
      */
-    public ByTopicRecordTranslator(RecordTranslator<K,V> defaultTranslator) {
+    public ByTopicRecordTranslator(RecordTranslator<K, V> defaultTranslator) {
         this.defaultTranslator = defaultTranslator;
         //This shouldn't throw on a Check, because nothing is configured yet
         cacheNCheckFields(defaultTranslator);
@@ -110,7 +110,7 @@ public class ByTopicRecordTranslator<K, V> implements RecordTranslator<K, V> {
      * @throws IllegalArgumentException if the Fields for the stream this emits to do not match
      *     any already configured Fields for the same stream
      */
-    public ByTopicRecordTranslator<K, V> forTopic(String topic, RecordTranslator<K,V> translator) {
+    public ByTopicRecordTranslator<K, V> forTopic(String topic, RecordTranslator<K, V> translator) {
         if (topicToTranslator.containsKey(topic)) {
             throw new IllegalStateException("Topic " + topic + " is already registered");
         }
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/KafkaSpoutRetryExponentialBackoff.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/KafkaSpoutRetryExponentialBackoff.java
index 0b99a40..ac28802 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/KafkaSpoutRetryExponentialBackoff.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/KafkaSpoutRetryExponentialBackoff.java
@@ -302,7 +302,7 @@ public class KafkaSpoutRetryExponentialBackoff implements KafkaSpoutRetryService
         final long currentTimeNanos = Time.nanoTime();
         final long nextTimeNanos = msgId.numFails() == 1                // numFails = 1, 2, 3, ...
                 ? currentTimeNanos + initialDelay.lengthNanos
-                : currentTimeNanos + delayPeriod.lengthNanos * (long)(Math.pow(2, msgId.numFails() - 1));
+                : currentTimeNanos + delayPeriod.lengthNanos * (long) (Math.pow(2, msgId.numFails() - 1));
         return Math.min(nextTimeNanos, currentTimeNanos + maxDelay.lengthNanos);
     }
 
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/RecordTranslator.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/RecordTranslator.java
index e8d938c..0b48e48 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/RecordTranslator.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/RecordTranslator.java
@@ -40,7 +40,7 @@ public interface RecordTranslator<K, V> extends Serializable, Func<ConsumerRecor
      *     if {@link Builder#setEmitNullTuples(boolean)} is set to {@code false}.
      */
     @Override
-    List<Object> apply(ConsumerRecord<K,V> record);
+    List<Object> apply(ConsumerRecord<K, V> record);
     
     /**
      * Get the fields associated with a stream.  The streams passed in are
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/internal/CommonKafkaSpoutConfig.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/internal/CommonKafkaSpoutConfig.java
index 14c6a02..c028af0 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/internal/CommonKafkaSpoutConfig.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/internal/CommonKafkaSpoutConfig.java
@@ -125,7 +125,7 @@ public abstract class CommonKafkaSpoutConfig<K, V> implements Serializable {
          */
         public T setProp(String key, Object value) {
             kafkaProps.put(key, value);
-            return (T)this;
+            return (T) this;
         }
 
         /**
@@ -133,7 +133,7 @@ public abstract class CommonKafkaSpoutConfig<K, V> implements Serializable {
          */
         public T setProp(Map<String, Object> props) {
             kafkaProps.putAll(props);
-            return (T)this;
+            return (T) this;
         }
 
         /**
@@ -147,7 +147,7 @@ public abstract class CommonKafkaSpoutConfig<K, V> implements Serializable {
                     throw new IllegalArgumentException("Kafka Consumer property keys must be Strings");
                 }
             });
-            return (T)this;
+            return (T) this;
         }
 
         //Spout Settings
@@ -158,7 +158,7 @@ public abstract class CommonKafkaSpoutConfig<K, V> implements Serializable {
          */
         public T setPollTimeoutMs(long pollTimeoutMs) {
             this.pollTimeoutMs = pollTimeoutMs;
-            return (T)this;
+            return (T) this;
         }
 
         /**
@@ -169,12 +169,12 @@ public abstract class CommonKafkaSpoutConfig<K, V> implements Serializable {
          */
         public T setFirstPollOffsetStrategy(FirstPollOffsetStrategy firstPollOffsetStrategy) {
             this.firstPollOffsetStrategy = firstPollOffsetStrategy;
-            return (T)this;
+            return (T) this;
         }
 
         public T setRecordTranslator(RecordTranslator<K, V> translator) {
             this.translator = translator;
-            return (T)this;
+            return (T) this;
         }
 
         /**
@@ -209,7 +209,7 @@ public abstract class CommonKafkaSpoutConfig<K, V> implements Serializable {
          */
         public T setPartitionRefreshPeriodMs(long partitionRefreshPeriodMs) {
             this.partitionRefreshPeriodMs = partitionRefreshPeriodMs;
-            return (T)this;
+            return (T) this;
         }
 
         /**
@@ -218,7 +218,7 @@ public abstract class CommonKafkaSpoutConfig<K, V> implements Serializable {
          */
         public T setStartTimeStamp(long startTimeStamp) {
             this.startTimeStamp = startTimeStamp;
-            return (T)this;
+            return (T) this;
         }
         
         protected Map<String, Object> getKafkaProps() {
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/internal/ConsumerFactory.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/internal/ConsumerFactory.java
index e7f9288..67e9769 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/internal/ConsumerFactory.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/internal/ConsumerFactory.java
@@ -24,5 +24,5 @@ import org.apache.kafka.clients.consumer.Consumer;
  * This is here to enable testing.
  */
 public interface ConsumerFactory<K, V> extends Serializable {
-    Consumer<K,V> createConsumer(Map<String, Object> consumerProps);
+    Consumer<K, V> createConsumer(Map<String, Object> consumerProps);
 }
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/metrics/KafkaOffsetMetric.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/metrics/KafkaOffsetMetric.java
index 496e1d8..04951f0 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/metrics/KafkaOffsetMetric.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/metrics/KafkaOffsetMetric.java
@@ -55,7 +55,7 @@ public class KafkaOffsetMetric<K, V> implements IMetric {
 
     private static final Logger LOG = LoggerFactory.getLogger(KafkaOffsetMetric.class);
     private final Supplier<Map<TopicPartition, OffsetManager>> offsetManagerSupplier;
-    private final Supplier<Consumer<K,V>> consumerSupplier;
+    private final Supplier<Consumer<K, V>> consumerSupplier;
 
     public KafkaOffsetMetric(Supplier<Map<TopicPartition, OffsetManager>> offsetManagerSupplier,
         Supplier<Consumer<K, V>> consumerSupplier) {
@@ -74,7 +74,7 @@ public class KafkaOffsetMetric<K, V> implements IMetric {
             return null;
         }
 
-        Map<String,TopicMetrics> topicMetricsMap = new HashMap<>();
+        Map<String, TopicMetrics> topicMetricsMap = new HashMap<>();
         Set<TopicPartition> topicPartitions = offsetManagers.keySet();
 
         Map<TopicPartition, Long> beginningOffsets;
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutCoordinator.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutCoordinator.java
index d8f097b..09e0931 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutCoordinator.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutCoordinator.java
@@ -35,7 +35,7 @@ import org.apache.storm.trident.spout.IPartitionedTridentSpout;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class KafkaTridentSpoutCoordinator<K,V> implements 
+public class KafkaTridentSpoutCoordinator<K, V> implements
     IOpaquePartitionedTridentSpout.Coordinator<List<Map<String, Object>>>,
     IPartitionedTridentSpout.Coordinator<List<Map<String, Object>>>,
         Serializable {
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutOpaque.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutOpaque.java
index e20c89b..a76cb0e 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutOpaque.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutOpaque.java
@@ -29,7 +29,7 @@ import org.apache.storm.tuple.Fields;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class KafkaTridentSpoutOpaque<K,V> implements IOpaquePartitionedTridentSpout<List<Map<String, Object>>,
+public class KafkaTridentSpoutOpaque<K, V> implements IOpaquePartitionedTridentSpout<List<Map<String, Object>>,
         KafkaTridentSpoutTopicPartition, Map<String, Object>> {
     private static final long serialVersionUID = -8003272486566259640L;
 
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutTransactional.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutTransactional.java
index 2eb3f29..f250050 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutTransactional.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/spout/trident/KafkaTridentSpoutTransactional.java
@@ -25,7 +25,7 @@ import org.apache.storm.task.TopologyContext;
 import org.apache.storm.trident.spout.IPartitionedTridentSpout;
 import org.apache.storm.tuple.Fields;
 
-public class KafkaTridentSpoutTransactional<K,V> implements IPartitionedTridentSpout<List<Map<String, Object>>,
+public class KafkaTridentSpoutTransactional<K, V> implements IPartitionedTridentSpout<List<Map<String, Object>>,
         KafkaTridentSpoutTopicPartition, Map<String, Object>>,
         Serializable {
     private static final long serialVersionUID = 1L;
diff --git a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/trident/mapper/TridentTupleToKafkaMapper.java b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/trident/mapper/TridentTupleToKafkaMapper.java
index 10667e2..7827776 100644
--- a/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/trident/mapper/TridentTupleToKafkaMapper.java
+++ b/external/storm-kafka-client/src/main/java/org/apache/storm/kafka/trident/mapper/TridentTupleToKafkaMapper.java
@@ -21,7 +21,7 @@ package org.apache.storm.kafka.trident.mapper;
 import java.io.Serializable;
 import org.apache.storm.trident.tuple.TridentTuple;
 
-public interface TridentTupleToKafkaMapper<K,V>  extends Serializable {
+public interface TridentTupleToKafkaMapper<K, V>  extends Serializable {
     
     K getKeyFromTuple(TridentTuple tuple);
     
diff --git a/external/storm-kafka-migration/src/main/java/org/apache/storm/kafka/migration/MapUtil.java b/external/storm-kafka-migration/src/main/java/org/apache/storm/kafka/migration/MapUtil.java
index 23477ad..634caad 100644
--- a/external/storm-kafka-migration/src/main/java/org/apache/storm/kafka/migration/MapUtil.java
+++ b/external/storm-kafka-migration/src/main/java/org/apache/storm/kafka/migration/MapUtil.java
@@ -24,7 +24,7 @@ public class MapUtil {
      * Get value for key. Error if value is null or not the expected type.
      */
     public static <T> T getOrError(Map<String, Object> conf, String key) {
-        T ret = (T)conf.get(key);
+        T ret = (T) conf.get(key);
         if (ret == null) {
             throw new RuntimeException(key + " cannot be null");
         }
diff --git a/external/storm-mongodb/src/main/java/org/apache/storm/mongodb/trident/state/MongoMapState.java b/external/storm-mongodb/src/main/java/org/apache/storm/mongodb/trident/state/MongoMapState.java
index 0a3b100..f6cfc43 100644
--- a/external/storm-mongodb/src/main/java/org/apache/storm/mongodb/trident/state/MongoMapState.java
+++ b/external/storm-mongodb/src/main/java/org/apache/storm/mongodb/trident/state/MongoMapState.java
@@ -182,7 +182,7 @@ public class MongoMapState<T> implements IBackingMap<T> {
                 Bson filter = options.queryCreator.createFilterByKeys(keys);
                 Document doc = mongoClient.find(filter);
                 if (doc != null) {
-                    retval.add(this.serializer.deserialize((byte[])doc.get(options.serDocumentField)));
+                    retval.add(this.serializer.deserialize((byte[]) doc.get(options.serDocumentField)));
                 } else {
                     retval.add(null);
                 }
diff --git a/external/storm-rocketmq/src/main/java/org/apache/storm/rocketmq/RocketMqUtils.java b/external/storm-rocketmq/src/main/java/org/apache/storm/rocketmq/RocketMqUtils.java
index bb6eb31..a2d7e7a 100644
--- a/external/storm-rocketmq/src/main/java/org/apache/storm/rocketmq/RocketMqUtils.java
+++ b/external/storm-rocketmq/src/main/java/org/apache/storm/rocketmq/RocketMqUtils.java
@@ -51,7 +51,7 @@ public final class RocketMqUtils {
         Scheme scheme;
         try {
             Class clazz = Class.forName(schemeString);
-            scheme = (Scheme)clazz.newInstance();
+            scheme = (Scheme) clazz.newInstance();
         } catch (Exception e) {
             throw new IllegalArgumentException("Cannot create Scheme for " + schemeString
                     + " due to " + e.getMessage());
@@ -71,7 +71,7 @@ public final class RocketMqUtils {
         ByteBuffer body = ByteBuffer.wrap(msg.getBody());
         if (rawKey != null && scheme instanceof KeyValueScheme) {
             ByteBuffer key = ByteBuffer.wrap(rawKey.getBytes(StandardCharsets.UTF_8));
-            tup = ((KeyValueScheme)scheme).deserializeKeyAndValue(key, body);
+            tup = ((KeyValueScheme) scheme).deserializeKeyAndValue(key, body);
         } else {
             tup = scheme.deserialize(body);
         }
diff --git a/external/storm-rocketmq/src/main/java/org/apache/storm/rocketmq/spout/RocketMqSpout.java b/external/storm-rocketmq/src/main/java/org/apache/storm/rocketmq/spout/RocketMqSpout.java
index 3cc0ba5..c08dd62 100644
--- a/external/storm-rocketmq/src/main/java/org/apache/storm/rocketmq/spout/RocketMqSpout.java
+++ b/external/storm-rocketmq/src/main/java/org/apache/storm/rocketmq/spout/RocketMqSpout.java
@@ -65,7 +65,7 @@ public class RocketMqSpout implements IRichSpout {
     private DefaultMQPushConsumer consumer;
     private SpoutOutputCollector collector;
     private BlockingQueue<ConsumerBatchMessage<List<Object>>> queue;
-    private Map<String,ConsumerBatchMessage<List<Object>>> cache;
+    private Map<String, ConsumerBatchMessage<List<Object>>> cache;
 
     private Properties properties;
     private Scheme scheme;
@@ -121,7 +121,7 @@ public class RocketMqSpout implements IRichSpout {
             throw new RuntimeException(e);
         }
 
-        long defaultBatchProcessTimeout = (long)conf.getOrDefault(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS, 30) * 1000 + 10000;
+        long defaultBatchProcessTimeout = (long) conf.getOrDefault(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS, 30) * 1000 + 10000;
         batchProcessTimeout = getLong(properties, RocketMqConfig.CONSUMER_BATCH_PROCESS_TIMEOUT, defaultBatchProcessTimeout);
 
         queue = new LinkedBlockingQueue<>();
diff --git a/flux/flux-core/src/main/java/org/apache/storm/flux/model/ConfigMethodDef.java b/flux/flux-core/src/main/java/org/apache/storm/flux/model/ConfigMethodDef.java
index 8d32c06..04fec63 100644
--- a/flux/flux-core/src/main/java/org/apache/storm/flux/model/ConfigMethodDef.java
+++ b/flux/flux-core/src/main/java/org/apache/storm/flux/model/ConfigMethodDef.java
@@ -49,9 +49,9 @@ public class ConfigMethodDef {
         List<Object> newVal = new ArrayList<Object>();
         for (Object obj : args) {
             if (obj instanceof LinkedHashMap) {
-                Map<String, Object> map = (Map<String, Object>)obj;
+                Map<String, Object> map = (Map<String, Object>) obj;
                 if (map.containsKey("ref") && map.size() == 1) {
-                    newVal.add(new BeanReference((String)map.get("ref")));
+                    newVal.add(new BeanReference((String) map.get("ref")));
                     this.hasReferences = true;
                 } else if (map.containsKey("reflist") && map.size() == 1) {
                     newVal.add(new BeanListReference((List<String>) map.get("reflist")));
diff --git a/flux/flux-core/src/main/java/org/apache/storm/flux/model/ObjectDef.java b/flux/flux-core/src/main/java/org/apache/storm/flux/model/ObjectDef.java
index 1df9f9f..1ff4fb4 100644
--- a/flux/flux-core/src/main/java/org/apache/storm/flux/model/ObjectDef.java
+++ b/flux/flux-core/src/main/java/org/apache/storm/flux/model/ObjectDef.java
@@ -57,7 +57,7 @@ public class ObjectDef {
         List<Object> newVal = new ArrayList<Object>();
         for (Object obj : constructorArgs) {
             if (obj instanceof LinkedHashMap) {
-                Map<String, Object> map = (Map<String, Object>)obj;
+                Map<String, Object> map = (Map<String, Object>) obj;
                 if (map.containsKey("ref") && map.size() == 1) {
                     newVal.add(new BeanReference((String) map.get("ref")));
                     this.hasReferences = true;
@@ -126,7 +126,7 @@ public class ObjectDef {
         List<Object> newVal = new ArrayList<Object>();
         for (Object obj : factoryArgs) {
             if (obj instanceof LinkedHashMap) {
-                Map map = (Map)obj;
+                Map map = (Map) obj;
                 if (map.containsKey("ref") && map.size() == 1) {
                     newVal.add(new BeanReference((String) map.get("ref")));
                     this.hasReferences = true;
diff --git a/flux/flux-examples/src/main/java/org/apache/storm/flux/examples/WordCountClient.java b/flux/flux-examples/src/main/java/org/apache/storm/flux/examples/WordCountClient.java
index 484d3c1..b216eb5 100644
--- a/flux/flux-examples/src/main/java/org/apache/storm/flux/examples/WordCountClient.java
+++ b/flux/flux-examples/src/main/java/org/apache/storm/flux/examples/WordCountClient.java
@@ -56,7 +56,7 @@ public class WordCountClient {
             System.out.println("HBase configuration:");
             for (Object key : props.keySet()) {
                 System.out.println(key + "=" + props.get(key));
-                config.set((String)key, props.getProperty((String)key));
+                config.set((String) key, props.getProperty((String) key));
             }
         } else {
             System.out.println("Usage: WordCountClient <hbase_config.properties>");
diff --git a/flux/flux-wrappers/src/main/java/org/apache/storm/flux/wrappers/bolts/FluxShellBolt.java b/flux/flux-wrappers/src/main/java/org/apache/storm/flux/wrappers/bolts/FluxShellBolt.java
index 8878107..8f8916b 100644
--- a/flux/flux-wrappers/src/main/java/org/apache/storm/flux/wrappers/bolts/FluxShellBolt.java
+++ b/flux/flux-wrappers/src/main/java/org/apache/storm/flux/wrappers/bolts/FluxShellBolt.java
@@ -157,9 +157,9 @@ public class FluxShellBolt extends ShellBolt implements IRichBolt {
     public void declareOutputFields(OutputFieldsDeclarer declarer) {
         Iterator it = this.outputFields.entrySet().iterator();
         while (it.hasNext()) {
-            Map.Entry entryTuple = (Map.Entry)it.next();
-            String key = (String)entryTuple.getKey();
-            String[] value = (String[])entryTuple.getValue();
+            Map.Entry entryTuple = (Map.Entry) it.next();
+            String key = (String) entryTuple.getKey();
+            String[] value = (String[]) entryTuple.getValue();
             if (key.equals("default")) {
                 declarer.declare(new Fields(value));
             } else {
diff --git a/flux/flux-wrappers/src/main/java/org/apache/storm/flux/wrappers/spouts/FluxShellSpout.java b/flux/flux-wrappers/src/main/java/org/apache/storm/flux/wrappers/spouts/FluxShellSpout.java
index 51ad2c8..89495fe 100644
--- a/flux/flux-wrappers/src/main/java/org/apache/storm/flux/wrappers/spouts/FluxShellSpout.java
+++ b/flux/flux-wrappers/src/main/java/org/apache/storm/flux/wrappers/spouts/FluxShellSpout.java
@@ -159,9 +159,9 @@ public class FluxShellSpout extends ShellSpout implements IRichSpout {
     public void declareOutputFields(OutputFieldsDeclarer declarer) {
         Iterator it = this.outputFields.entrySet().iterator();
         while (it.hasNext()) {
-            Map.Entry entryTuple = (Map.Entry)it.next();
-            String key = (String)entryTuple.getKey();
-            String[] value = (String[])entryTuple.getValue();
+            Map.Entry entryTuple = (Map.Entry) it.next();
+            String key = (String) entryTuple.getKey();
+            String[] value = (String[]) entryTuple.getValue();
             if (key.equals("default")) {
                 declarer.declare(new Fields(value));
             } else {
diff --git a/integration-test/src/main/java/org/apache/storm/debug/DebugHelper.java b/integration-test/src/main/java/org/apache/storm/debug/DebugHelper.java
index 2af94e7..75f0c9b 100644
--- a/integration-test/src/main/java/org/apache/storm/debug/DebugHelper.java
+++ b/integration-test/src/main/java/org/apache/storm/debug/DebugHelper.java
@@ -34,6 +34,6 @@ public class DebugHelper {
 
     public static URL[] getClassPaths() {
         ClassLoader cl = ClassLoader.getSystemClassLoader();
-        return ((URLClassLoader)cl).getURLs();
+        return ((URLClassLoader) cl).getURLs();
     }
 }
diff --git a/sql/storm-sql-runtime/src/jvm/org/apache/storm/sql/runtime/utils/SerdeUtils.java b/sql/storm-sql-runtime/src/jvm/org/apache/storm/sql/runtime/utils/SerdeUtils.java
index 85c3500..36e69eb 100644
--- a/sql/storm-sql-runtime/src/jvm/org/apache/storm/sql/runtime/utils/SerdeUtils.java
+++ b/sql/storm-sql-runtime/src/jvm/org/apache/storm/sql/runtime/utils/SerdeUtils.java
@@ -115,16 +115,16 @@ public final class SerdeUtils {
         if (value instanceof Utf8) {
             ret = value.toString();
         } else if (value instanceof Map<?, ?>) {
-            ret = convertAvroUtf8Map((Map<Object,Object>)value);
+            ret = convertAvroUtf8Map((Map<Object, Object>) value);
         } else if (value instanceof GenericData.Array) {
-            ret = convertAvroUtf8Array((GenericData.Array)value);
+            ret = convertAvroUtf8Array((GenericData.Array) value);
         } else {
             ret = value;
         }
         return ret;
     }
 
-    private static Object convertAvroUtf8Map(Map<Object,Object> value) {
+    private static Object convertAvroUtf8Map(Map<Object, Object> value) {
         Map<Object, Object> map = new HashMap<>(value.size());
         for (Map.Entry<Object, Object> entry : value.entrySet()) {
             Object k = convertAvroUtf8(entry.getKey());
diff --git a/storm-checkstyle/src/main/resources/storm/storm_checkstyle.xml b/storm-checkstyle/src/main/resources/storm/storm_checkstyle.xml
index a2a7689..e5ff95a 100644
--- a/storm-checkstyle/src/main/resources/storm/storm_checkstyle.xml
+++ b/storm-checkstyle/src/main/resources/storm/storm_checkstyle.xml
@@ -28,6 +28,8 @@
       * 4 space indents instead of 2
       * line-length limit is 140 instead of 100
       * removed JavadocMethod
+      * added RedundantModifier
+      * added WhitespaceAfter
     Once checkstyle has the ability to override selected configuration elements from within the Maven
     pom.xml file, then we can remove this file in favor of overriding the provided google_checks.xml file.
     See this issue to track that functionality:
@@ -258,5 +260,6 @@
         </module>
         <module name="CommentsIndentation"/>
         <module name="RedundantModifier"/>
+        <module name="WhitespaceAfter"/>
     </module>
 </module>
diff --git a/storm-client/src/jvm/org/apache/storm/StormSubmitter.java b/storm-client/src/jvm/org/apache/storm/StormSubmitter.java
index 9302852..467c431 100644
--- a/storm-client/src/jvm/org/apache/storm/StormSubmitter.java
+++ b/storm-client/src/jvm/org/apache/storm/StormSubmitter.java
@@ -78,7 +78,7 @@ public class StormSubmitter {
         // Is the topology ZooKeeper authentication configuration unset?
         if (!conf.containsKey(Config.STORM_ZOOKEEPER_TOPOLOGY_AUTH_PAYLOAD)
                 || conf.get(Config.STORM_ZOOKEEPER_TOPOLOGY_AUTH_PAYLOAD) == null
-                || !validateZKDigestPayload((String)conf.get(Config.STORM_ZOOKEEPER_TOPOLOGY_AUTH_PAYLOAD))) {
+                || !validateZKDigestPayload((String) conf.get(Config.STORM_ZOOKEEPER_TOPOLOGY_AUTH_PAYLOAD))) {
             secretPayload = generateZookeeperDigestSecretPayload();
             LOG.info("Generated ZooKeeper secret payload for MD5-digest: " + secretPayload);
         }
@@ -110,7 +110,7 @@ public class StormSubmitter {
      * @throws NotAliveException        if the topology is not alive
      * @throws InvalidTopologyException if any other error happens
      */
-    public static void pushCredentials(String name, Map<String,Object> topoConf, Map<String,String> credentials)
+    public static void pushCredentials(String name, Map<String, Object> topoConf, Map<String, String> credentials)
         throws AuthorizationException, NotAliveException, InvalidTopologyException {
         pushCredentials(name, topoConf, credentials, null);
     }
diff --git a/storm-client/src/jvm/org/apache/storm/executor/Executor.java b/storm-client/src/jvm/org/apache/storm/executor/Executor.java
index b404a2d..bc7ce20 100644
--- a/storm-client/src/jvm/org/apache/storm/executor/Executor.java
+++ b/storm-client/src/jvm/org/apache/storm/executor/Executor.java
@@ -118,7 +118,7 @@ public abstract class Executor implements Callable, JCQueue.Consumer {
     protected final Boolean hasEventLoggers;
     protected final boolean ackingEnabled;
     protected final ErrorReportingMetrics errorReportingMetrics;
-    protected final MpscChunkedArrayQueue<AddressedTuple> pendingEmits = new MpscChunkedArrayQueue<>(1024, (int)Math.pow(2, 30));
+    protected final MpscChunkedArrayQueue<AddressedTuple> pendingEmits = new MpscChunkedArrayQueue<>(1024, (int) Math.pow(2, 30));
     private final AddressedTuple flushTuple;
     protected ExecutorTransfer executorTransfer;
     protected ArrayList<Task> idToTask;
diff --git a/storm-client/src/jvm/org/apache/storm/metrics2/StormMetricRegistry.java b/storm-client/src/jvm/org/apache/storm/metrics2/StormMetricRegistry.java
index c460700..df064a0 100644
--- a/storm-client/src/jvm/org/apache/storm/metrics2/StormMetricRegistry.java
+++ b/storm-client/src/jvm/org/apache/storm/metrics2/StormMetricRegistry.java
@@ -40,7 +40,7 @@ public class StormMetricRegistry {
     public <T> SimpleGauge<T> gauge(
         T initialValue, String name, String topologyId, String componentId, Integer taskId, Integer port) {
         String metricName = metricName(name, topologyId, componentId, taskId, port);
-        return (SimpleGauge<T>)registry.gauge(metricName, () -> new SimpleGauge<>(initialValue));
+        return (SimpleGauge<T>) registry.gauge(metricName, () -> new SimpleGauge<>(initialValue));
     }
 
     public JcMetrics jcMetrics(String name, String topologyId, String componentId, Integer taskId, Integer port) {
diff --git a/storm-client/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java b/storm-client/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java
index 30faf1f..298524e 100644
--- a/storm-client/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java
+++ b/storm-client/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java
@@ -103,7 +103,7 @@ public class PacemakerClient implements ISaslClient {
         ThreadFactory workerFactory = new NettyRenameThreadFactory(this.host + "-pm");
         // 0 means DEFAULT_EVENT_LOOP_THREADS
         // https://github.com/netty/netty/blob/netty-4.1.24.Final/transport/src/main/java/io/netty/channel/MultithreadEventLoopGroup.java#L40
-        int maxWorkers = (int)config.get(Config.PACEMAKER_CLIENT_MAX_THREADS);
+        int maxWorkers = (int) config.get(Config.PACEMAKER_CLIENT_MAX_THREADS);
         this.workerEventLoopGroup = new NioEventLoopGroup(maxWorkers > 0 ? maxWorkers : 0, workerFactory);
         int thriftMessageMaxSize = (Integer) config.get(Config.PACEMAKER_THRIFT_MESSAGE_SIZE_MAX);
         bootstrap = new Bootstrap()
diff --git a/storm-client/src/jvm/org/apache/storm/security/auth/ThriftServer.java b/storm-client/src/jvm/org/apache/storm/security/auth/ThriftServer.java
index 9938b7c..fb45ab7 100644
--- a/storm-client/src/jvm/org/apache/storm/security/auth/ThriftServer.java
+++ b/storm-client/src/jvm/org/apache/storm/security/auth/ThriftServer.java
@@ -52,7 +52,7 @@ public class ThriftServer {
     public void stop() {
         server.stop();
         if (transportPlugin instanceof SaslTransportPlugin) {
-            ((SaslTransportPlugin)transportPlugin).close();
+            ((SaslTransportPlugin) transportPlugin).close();
         }
     }
 
diff --git a/storm-clojure/src/main/java/org/apache/storm/clojure/ClojureBolt.java b/storm-clojure/src/main/java/org/apache/storm/clojure/ClojureBolt.java
index bc37400..b061fe0 100644
--- a/storm-clojure/src/main/java/org/apache/storm/clojure/ClojureBolt.java
+++ b/storm-clojure/src/main/java/org/apache/storm/clojure/ClojureBolt.java
@@ -58,7 +58,7 @@ public class ClojureBolt implements IRichBolt, FinishedCallback {
         IFn hof = ClojureUtil.loadClojureFn(fnSpec.get(0), fnSpec.get(1));
         try {
             IFn preparer = (IFn) hof.applyTo(RT.seq(params));
-            final Map<Keyword,Object> collectorMap = new PersistentArrayMap(new Object[] {
+            final Map<Keyword, Object> collectorMap = new PersistentArrayMap(new Object[] {
                     Keyword.intern(Symbol.create("output-collector")), collector,
                     Keyword.intern(Symbol.create("context")), context});
             List<Object> args = new ArrayList<Object>() {
diff --git a/storm-clojure/src/main/java/org/apache/storm/clojure/ClojureSpout.java b/storm-clojure/src/main/java/org/apache/storm/clojure/ClojureSpout.java
index f68415f..4d7372a 100644
--- a/storm-clojure/src/main/java/org/apache/storm/clojure/ClojureSpout.java
+++ b/storm-clojure/src/main/java/org/apache/storm/clojure/ClojureSpout.java
@@ -57,7 +57,7 @@ public class ClojureSpout implements IRichSpout {
         IFn hof = ClojureUtil.loadClojureFn(fnSpec.get(0), fnSpec.get(1));
         try {
             IFn preparer = (IFn) hof.applyTo(RT.seq(params));
-            final Map<Keyword,Object> collectorMap = new PersistentArrayMap(new Object[] {
+            final Map<Keyword, Object> collectorMap = new PersistentArrayMap(new Object[] {
                 Keyword.intern(Symbol.create("output-collector")), collector,
                 Keyword.intern(Symbol.create("context")), context});
             List<Object> args = new ArrayList<Object>() {
diff --git a/storm-core/src/jvm/org/apache/storm/command/AdminCommands.java b/storm-core/src/jvm/org/apache/storm/command/AdminCommands.java
index 8076b74..0e4acdc 100644
--- a/storm-core/src/jvm/org/apache/storm/command/AdminCommands.java
+++ b/storm-core/src/jvm/org/apache/storm/command/AdminCommands.java
@@ -129,7 +129,7 @@ public class AdminCommands {
 
     private static void prettyPrint(TBase value, int depth, StringBuilder out) {
         if (value == null) {
-            println(out, depth,"null");
+            println(out, depth, "null");
             return;
         }
         println(out, depth, "{");
@@ -165,7 +165,7 @@ public class AdminCommands {
         //Special cases for storm...
         if ("json_conf".equals(key) && o instanceof String) {
             try {
-                o = Utils.parseJson((String)o);
+                o = Utils.parseJson((String) o);
             } catch (Exception e) {
                 LOG.error("Could not parse json_conf as JSON", e);
             }
@@ -182,7 +182,7 @@ public class AdminCommands {
             println(out, depth, "}");
         } else if (o instanceof Collection) {
             println(out, depth, keyStr(key) + "[");
-            for (Object sub: (Collection)o) {
+            for (Object sub: (Collection) o) {
                 prettyPrintKeyValue(null, sub, depth + 1, out);
             }
             println(out, depth, "]");
diff --git a/storm-core/src/jvm/org/apache/storm/command/ShellSubmission.java b/storm-core/src/jvm/org/apache/storm/command/ShellSubmission.java
index 0ef320c..e72f70b 100644
--- a/storm-core/src/jvm/org/apache/storm/command/ShellSubmission.java
+++ b/storm-core/src/jvm/org/apache/storm/command/ShellSubmission.java
@@ -32,7 +32,7 @@ public class ShellSubmission {
             LOG.error("Arguments should be of the form: <path_to_jar> [argument...]");
             System.exit(-1);
         }
-        Map<String,Object> conf = ConfigUtils.readStormConfig();
+        Map<String, Object> conf = ConfigUtils.readStormConfig();
         try (NimbusClient client = NimbusClient.getConfiguredClient(conf)) {
             NimbusSummary ns = client.getClient().getLeader();
             String host = ns.get_host();
diff --git a/storm-core/src/jvm/org/apache/storm/command/UploadCredentials.java b/storm-core/src/jvm/org/apache/storm/command/UploadCredentials.java
index 226e957..48ef76c 100644
--- a/storm-core/src/jvm/org/apache/storm/command/UploadCredentials.java
+++ b/storm-core/src/jvm/org/apache/storm/command/UploadCredentials.java
@@ -81,8 +81,8 @@ public class UploadCredentials {
                     LOG.info("Using topology conf from {} as basis for getting new creds", topologyId);
 
                     Map<String, Object> commandLine = Utils.readCommandLineOpts();
-                    List<String> clCreds = (List<String>)commandLine.get(Config.TOPOLOGY_AUTO_CREDENTIALS);
-                    List<String> topoCreds = (List<String>)topologyConf.get(Config.TOPOLOGY_AUTO_CREDENTIALS);
+                    List<String> clCreds = (List<String>) commandLine.get(Config.TOPOLOGY_AUTO_CREDENTIALS);
+                    List<String> topoCreds = (List<String>) topologyConf.get(Config.TOPOLOGY_AUTO_CREDENTIALS);
                     
                     if (clCreds != null) {
                         Set<String> extra = new HashSet<>(clCreds);
diff --git a/storm-core/src/jvm/org/apache/storm/shade/org/apache/zookeeper/ZkCli.java b/storm-core/src/jvm/org/apache/storm/shade/org/apache/zookeeper/ZkCli.java
index 57ec899..bba0693 100644
--- a/storm-core/src/jvm/org/apache/storm/shade/org/apache/zookeeper/ZkCli.java
+++ b/storm-core/src/jvm/org/apache/storm/shade/org/apache/zookeeper/ZkCli.java
@@ -37,7 +37,7 @@ public class ZkCli implements AdminCommands.AdminCommand {
         List<String> servers = (List<String>) conf.get(Config.STORM_ZOOKEEPER_SERVERS);
         int port = ObjectReader.getInt(conf.get(Config.STORM_ZOOKEEPER_PORT));
         String root = (String) conf.get(Config.STORM_ZOOKEEPER_ROOT);
-        Map<String, Object> cl = CLI.opt("s","server", null, CLI.AS_STRING, CLI.LAST_WINS)
+        Map<String, Object> cl = CLI.opt("s", "server", null, CLI.AS_STRING, CLI.LAST_WINS)
             .opt("t", "time-out", ObjectReader.getInt(conf.get(Config.STORM_ZOOKEEPER_SESSION_TIMEOUT)),
                 CLI.AS_INT, CLI.LAST_WINS)
             .boolOpt("w", "write")
@@ -46,12 +46,12 @@ public class ZkCli implements AdminCommands.AdminCommand {
             .boolOpt("h", "help")
             .parse(args);
 
-        if ((Boolean)cl.get("h")) {
+        if ((Boolean) cl.get("h")) {
             printCliHelp(command, System.out);
             return;
         }
 
-        String jaas = (String)cl.get("j");
+        String jaas = (String) cl.get("j");
         if (jaas != null && !jaas.isEmpty()) {
             System.setProperty("java.security.auth.login.config", jaas);
         }
@@ -67,14 +67,14 @@ public class ZkCli implements AdminCommands.AdminCommand {
                 isFirst = false;
                 sb.append(zkServer).append(':').append(port);
             }
-            if (!(Boolean)cl.get("n")) {
+            if (!(Boolean) cl.get("n")) {
                 sb.append(root);
             }
             connectionString = sb.toString();
         }
 
-        boolean readOnly = !(Boolean)cl.get("w");
-        int timeout = (Integer)cl.get("t");
+        boolean readOnly = !(Boolean) cl.get("w");
+        int timeout = (Integer) cl.get("t");
         ZooKeeper zk;
         if (readOnly) {
             zk = new ReadOnlyZookeeper(connectionString, timeout, watchedEvent -> { });
diff --git a/storm-server/src/main/java/org/apache/storm/daemon/nimbus/HeartbeatCache.java b/storm-server/src/main/java/org/apache/storm/daemon/nimbus/HeartbeatCache.java
index 6f8abc3..bb6449c 100644
--- a/storm-server/src/main/java/org/apache/storm/daemon/nimbus/HeartbeatCache.java
+++ b/storm-server/src/main/java/org/apache/storm/daemon/nimbus/HeartbeatCache.java
@@ -72,7 +72,7 @@ public class HeartbeatCache {
             isTimedOut = Time.deltaSecs(getNimbusTimeSecs()) >= timeout;
         }
 
-        public synchronized void updateFromHb(Integer timeout, Map<String,Object> newBeat) {
+        public synchronized void updateFromHb(Integer timeout, Map<String, Object> newBeat) {
             if (newBeat != null) {
                 Integer newReportedTime = (Integer) newBeat.getOrDefault(ClientStatsUtil.TIME_SECS, 0);
                 if (!newReportedTime.equals(executorReportedTimeSecs)) {
@@ -148,7 +148,7 @@ public class HeartbeatCache {
      * @param allExecutors the executors.
      * @param timeout the timeout.
      */
-    public void updateFromZkHeartbeat(String topoId, Map<List<Integer>, Map<String,Object>> executorBeats,
+    public void updateFromZkHeartbeat(String topoId, Map<List<Integer>, Map<String, Object>> executorBeats,
                                       Set<List<Integer>> allExecutors, Integer timeout) {
         Map<List<Integer>, ExecutorCache> topoCache = cache.computeIfAbsent(topoId, MAKE_MAP);
         if (executorBeats == null) {
diff --git a/storm-server/src/main/java/org/apache/storm/daemon/nimbus/Nimbus.java b/storm-server/src/main/java/org/apache/storm/daemon/nimbus/Nimbus.java
index 1989d1b..daccf15 100644
--- a/storm-server/src/main/java/org/apache/storm/daemon/nimbus/Nimbus.java
+++ b/storm-server/src/main/java/org/apache/storm/daemon/nimbus/Nimbus.java
@@ -605,8 +605,8 @@ public class Nimbus implements Iface, Shutdownable, DaemonCommon {
         }
         this.groupMapper = groupMapper;
         this.principalToLocal = ClientAuthUtils.getPrincipalToLocalPlugin(conf);
-        this.supervisorClasspaths = Collections.unmodifiableNavigableMap(
-            Utils.getConfiguredClasspathVersions(conf, EMPTY_STRING_LIST));// We don't use the classpath part of this, so just an empty list
+        // We don't use the classpath part of this, so just an empty list
+        this.supervisorClasspaths = Collections.unmodifiableNavigableMap(Utils.getConfiguredClasspathVersions(conf, EMPTY_STRING_LIST));
         clusterMetricSet = new ClusterSummaryMetricSet(metricsRegistry);
     }
 
diff --git a/storm-server/src/main/java/org/apache/storm/localizer/LocalizedResource.java b/storm-server/src/main/java/org/apache/storm/localizer/LocalizedResource.java
index f984def..694eba0 100644
--- a/storm-server/src/main/java/org/apache/storm/localizer/LocalizedResource.java
+++ b/storm-server/src/main/java/org/apache/storm/localizer/LocalizedResource.java
@@ -98,7 +98,7 @@ public class LocalizedResource extends LocallyCachedBlob {
         Path base = getLocalUserFileCacheDir(localBaseDir, user);
         this.baseDir = shouldUncompress ? getCacheDirForArchives(base) : getCacheDirForFiles(base);
         this.conf = conf;
-        this.symLinksDisabled = (boolean)conf.getOrDefault(Config.DISABLE_SYMLINKS, false);
+        this.symLinksDisabled = (boolean) conf.getOrDefault(Config.DISABLE_SYMLINKS, false);
         this.user = user;
         this.fsOps = fsOps;
         versionFilePath = constructVersionFileName(baseDir, key);
diff --git a/storm-server/src/main/java/org/apache/storm/scheduler/resource/normalization/NormalizedResourceRequest.java b/storm-server/src/main/java/org/apache/storm/scheduler/resource/normalization/NormalizedResourceRequest.java
index 0a9c7f0..578c55f 100644
--- a/storm-server/src/main/java/org/apache/storm/scheduler/resource/normalization/NormalizedResourceRequest.java
+++ b/storm-server/src/main/java/org/apache/storm/scheduler/resource/normalization/NormalizedResourceRequest.java
@@ -192,8 +192,8 @@ public class NormalizedResourceRequest implements NormalizedResourcesWithMemory
      * return a map that is the sum of resources1 + resources2
      */
     public static Map<String, Double> addResourceMap(Map<String, Double> resources1, Map<String, Double> resources2) {
-        Map<String,Double> sum = new HashMap<>(resources1);
-        for (Map.Entry<String,Double> me : resources2.entrySet()) {
+        Map<String, Double> sum = new HashMap<>(resources1);
+        for (Map.Entry<String, Double> me : resources2.entrySet()) {
             Double cur = sum.getOrDefault(me.getKey(), 0.0) + me.getValue();
             sum.put(me.getKey(), cur);
         }
@@ -208,7 +208,7 @@ public class NormalizedResourceRequest implements NormalizedResourcesWithMemory
             return new HashMap<>();
         }
         Map<String, Double> difference = new HashMap<>(resource1);
-        for (Map.Entry<String,Double> me : resource2.entrySet()) {
+        for (Map.Entry<String, Double> me : resource2.entrySet()) {
             Double sub = difference.getOrDefault(me.getKey(), 0.0) - me.getValue();
             difference.put(me.getKey(), sub);
         }
diff --git a/storm-server/src/main/java/org/apache/storm/utils/ServerUtils.java b/storm-server/src/main/java/org/apache/storm/utils/ServerUtils.java
index c026160..b099bc1 100644
--- a/storm-server/src/main/java/org/apache/storm/utils/ServerUtils.java
+++ b/storm-server/src/main/java/org/apache/storm/utils/ServerUtils.java
@@ -137,7 +137,7 @@ public class ServerUtils {
             String baseDir,
             NimbusInfo nimbusInfo,
             ILeaderElector leaderElector) {
-        String type = (String)conf.get(DaemonConfig.NIMBUS_BLOBSTORE);
+        String type = (String) conf.get(DaemonConfig.NIMBUS_BLOBSTORE);
         if (type == null) {
             type = LocalFsBlobStore.class.getName();
         }
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/drpc/DRPCServer.java b/storm-webapp/src/main/java/org/apache/storm/daemon/drpc/DRPCServer.java
index 7a8b5b3..e87ec52 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/drpc/DRPCServer.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/drpc/DRPCServer.java
@@ -64,7 +64,7 @@ public class DRPCServer implements AutoCloseable {
      * @param conf Conf to be added in context filter
      */
     public static void addRequestContextFilter(ServletContextHandler context, String configName, Map<String, Object> conf) {
-        IHttpCredentialsPlugin auth = ServerAuthUtils.getHttpCredentialsPlugin(conf, (String)conf.get(configName));
+        IHttpCredentialsPlugin auth = ServerAuthUtils.getHttpCredentialsPlugin(conf, (String) conf.get(configName));
         ReqContextFilter filter = new ReqContextFilter(auth);
         context.addFilter(new FilterHolder(filter), "/*", EnumSet.allOf(DispatcherType.class));
     }
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/drpc/webapp/ReqContextFilter.java b/storm-webapp/src/main/java/org/apache/storm/daemon/drpc/webapp/ReqContextFilter.java
index f5dbc80..72fb19c 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/drpc/webapp/ReqContextFilter.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/drpc/webapp/ReqContextFilter.java
@@ -67,7 +67,7 @@ public class ReqContextFilter implements Filter {
      */
     @Override
     public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
-        handle((HttpServletRequest)request, (HttpServletResponse)response, chain);
+        handle((HttpServletRequest) request, (HttpServletResponse) response, chain);
     }
 
     /**
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/LogviewerServer.java b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/LogviewerServer.java
index f043693..46b8fbb 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/LogviewerServer.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/LogviewerServer.java
@@ -105,7 +105,7 @@ public class LogviewerServer implements AutoCloseable {
 
             ServletHolder holderPwd = new ServletHolder("default", DefaultServlet.class);
             holderPwd.setInitOrder(1);
-            context.addServlet(holderPwd,"/");
+            context.addServlet(holderPwd, "/");
 
             ServletHolder jerseyServlet = context.addServlet(ServletContainer.class, "/api/v1/*");
             jerseyServlet.setInitOrder(2);
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/DirectoryCleaner.java b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/DirectoryCleaner.java
index b8f8ac4..f4bfcd7 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/DirectoryCleaner.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/logviewer/utils/DirectoryCleaner.java
@@ -49,9 +49,9 @@ public class DirectoryCleaner {
     // used to recognize the pattern of active log files, we may remove the "current" from this list
     private static final Pattern ACTIVE_LOG_PATTERN = Pattern.compile(".*\\.(log|err|out|current|yaml|pid|metrics)$");
     // used to recognize the pattern of some meta files in a worker log directory
-    private static final Pattern META_LOG_PATTERN = Pattern.compile(".*\\.(yaml|pid)$");// max number of files to delete for every round
+    private static final Pattern META_LOG_PATTERN = Pattern.compile(".*\\.(yaml|pid)$");
 
-    private static final int PQ_SIZE = 1024;
+    private static final int PQ_SIZE = 1024; // max number of files to delete for every round
     private static final int MAX_ROUNDS = 512; // max rounds of scanning the dirs
     public static final int MAX_NUMBER_OF_FILES_FOR_DIR = 1024;
 
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/ui/UIHelpers.java b/storm-webapp/src/main/java/org/apache/storm/daemon/ui/UIHelpers.java
index 3109bfe..784ba0c 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/ui/UIHelpers.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/ui/UIHelpers.java
@@ -1086,7 +1086,7 @@ public class UIHelpers {
      * @return getSupervisorPageInfo
      */
     public static Map<String, Object> getSupervisorPageInfo(
-            SupervisorPageInfo supervisorPageInfo, Map<String,Object> config) {
+            SupervisorPageInfo supervisorPageInfo, Map<String, Object> config) {
         Map<String, Object> result = new HashMap<>();
         result.put("workers", getWorkerSummaries(supervisorPageInfo, config));
         result.put("schedulerDisplayResource", config.get(DaemonConfig.SCHEDULER_DISPLAY_RESOURCE));
@@ -1103,7 +1103,7 @@ public class UIHelpers {
      * @return getAllTopologiesSummary
      */
     public static Map<String, Object> getAllTopologiesSummary(
-            List<TopologySummary> topologies, Map<String,Object> config) {
+            List<TopologySummary> topologies, Map<String, Object> config) {
         Map<String, Object> result = new HashMap();
         result.put("topologies", getTopologiesMap(null, topologies));
         result.put("schedulerDisplayResource", config.get(DaemonConfig.SCHEDULER_DISPLAY_RESOURCE));
@@ -1169,7 +1169,7 @@ public class UIHelpers {
      * @param windowToTransferred windowToTransferred
      * @return getStatDisplayMapLong
      */
-    private static Map<String, Long> getStatDisplayMapLong(Map<String,Long> windowToTransferred) {
+    private static Map<String, Long> getStatDisplayMapLong(Map<String, Long> windowToTransferred) {
         Map<String, Long> result = new HashMap();
         for (Map.Entry<String, Long> entry : windowToTransferred.entrySet()) {
             result.put(entry.getKey(), entry.getValue());
@@ -1557,7 +1557,7 @@ public class UIHelpers {
      * @param config config
      * @return unpackTopologyInfo
      */
-    private static Map<String,Object> unpackTopologyInfo(TopologyPageInfo topologyPageInfo, String window, Map<String,Object> config) {
+    private static Map<String, Object> unpackTopologyInfo(TopologyPageInfo topologyPageInfo, String window, Map<String, Object> config) {
         Map<String, Object> result = new HashMap();
         result.put("id", topologyPageInfo.get_id());
         result.put("encodedId", Utils.urlEncodeUtf8(topologyPageInfo.get_id()));
@@ -1665,8 +1665,8 @@ public class UIHelpers {
      * @param config config
      * @return getTopologyLag.
      */
-    public static Map<String, Map<String, Object>> getTopologyLag(StormTopology userTopology, Map<String,Object> config) {
-        Boolean disableLagMonitoring = (Boolean)(config.get(DaemonConfig.UI_DISABLE_SPOUT_LAG_MONITORING));
+    public static Map<String, Map<String, Object>> getTopologyLag(StormTopology userTopology, Map<String, Object> config) {
+        Boolean disableLagMonitoring = (Boolean) (config.get(DaemonConfig.UI_DISABLE_SPOUT_LAG_MONITORING));
         return disableLagMonitoring ? Collections.EMPTY_MAP : TopologySpoutLag.lag(userTopology, config);
     }
 
@@ -1731,11 +1731,11 @@ public class UIHelpers {
      * @param stats stats
      * @return sanitizeTransferredStats
      */
-    public static  Map<String, Map<String,Long>> sanitizeTransferredStats(Map<String, Map<String,Long>> stats) {
-        Map<String, Map<String,Long>> result = new HashMap();
-        for (Map.Entry<String, Map<String,Long>> entry : stats.entrySet()) {
-            Map<String,Long> temp = new HashMap();
-            for (Map.Entry<String,Long> innerEntry : entry.getValue().entrySet()) {
+    public static  Map<String, Map<String, Long>> sanitizeTransferredStats(Map<String, Map<String, Long>> stats) {
+        Map<String, Map<String, Long>> result = new HashMap();
+        for (Map.Entry<String, Map<String, Long>> entry : stats.entrySet()) {
+            Map<String, Long> temp = new HashMap();
+            for (Map.Entry<String, Long> innerEntry : entry.getValue().entrySet()) {
                 temp.put(sanitizeStreamName(innerEntry.getKey()), innerEntry.getValue());
             }
             result.put(entry.getKey(), temp);
@@ -1767,7 +1767,7 @@ public class UIHelpers {
      * @param entryInput entryInput
      * @return getInputMap
      */
-    public static Map<String, Object> getInputMap(Map.Entry<GlobalStreamId,Grouping> entryInput) {
+    public static Map<String, Object> getInputMap(Map.Entry<GlobalStreamId, Grouping> entryInput) {
         Map<String, Object> result = new HashMap();
         result.put(":component", entryInput.getKey().get_componentId());
         result.put(":stream", entryInput.getKey().get_streamId());
@@ -2308,7 +2308,7 @@ public class UIHelpers {
      * @throws TException TException
      */
     public static Map<String, Object> getTopologyProfilingDump(Nimbus.Iface client, String id, String hostPort,
-                                                               Map<String,Object> config) throws TException {
+                                                               Map<String, Object> config) throws TException {
         setTopologyProfilingAction(
                 client, id , hostPort, System.currentTimeMillis(),
                 config, ProfileAction.JPROFILE_DUMP
@@ -2342,7 +2342,7 @@ public class UIHelpers {
      */
     public static Map<String, Object> getTopologyProfilingRestartWorker(Nimbus.Iface client,
                                                                         String id, String hostPort,
-                                                                        Map<String,Object> config) throws TException {
+                                                                        Map<String, Object> config) throws TException {
         setTopologyProfilingAction(
                 client, id , hostPort, System.currentTimeMillis(), config, ProfileAction.JVM_RESTART
         );
@@ -2362,7 +2362,7 @@ public class UIHelpers {
      * @throws TException TException
      */
     public static Map<String, Object> getTopologyProfilingDumpHeap(Nimbus.Iface client, String id, String hostPort,
-                                                                   Map<String,Object> config) throws TException {
+                                                                   Map<String, Object> config) throws TException {
         setTopologyProfilingAction(client, id , hostPort, System.currentTimeMillis(), config, ProfileAction.JMAP_DUMP);
         Map<String, Object> result = new HashMap();
         result.put("status", "ok");
@@ -2407,7 +2407,7 @@ public class UIHelpers {
      * @param config config
      * @return getNimbusSummary
      */
-    public static Map<String, Object> getNimbusSummary(ClusterSummary clusterInfo, Map<String,Object> config) {
+    public static Map<String, Object> getNimbusSummary(ClusterSummary clusterInfo, Map<String, Object> config) {
         List<NimbusSummary> nimbusSummaries = clusterInfo.get_nimbuses();
         List<String> nimbusSeeds = new ArrayList();
         for (String nimbusHost : (List<String>) config.get(Config.NIMBUS_SEEDS)) {
diff --git a/storm-webapp/src/main/java/org/apache/storm/daemon/ui/UIServer.java b/storm-webapp/src/main/java/org/apache/storm/daemon/ui/UIServer.java
index 7cc0129..0b292a4 100644
--- a/storm-webapp/src/main/java/org/apache/storm/daemon/ui/UIServer.java
+++ b/storm-webapp/src/main/java/org/apache/storm/daemon/ui/UIServer.java
@@ -161,16 +161,16 @@ public class UIServer {
             }
         }
 
-        holderHome.setInitParameter("dirAllowed","true");
-        holderHome.setInitParameter("pathInfoOnly","true");
+        holderHome.setInitParameter("dirAllowed", "true");
+        holderHome.setInitParameter("pathInfoOnly", "true");
         context.addFilter(new FilterHolder(new HeaderResponseServletFilter(metricsRegistry)), "/*", EnumSet.allOf(DispatcherType.class));
-        context.addServlet(holderHome,"/*");
+        context.addServlet(holderHome, "/*");
 
 
         // Lastly, the default servlet for root content (always needed, to satisfy servlet spec)
         ServletHolder holderPwd = new ServletHolder("default", DefaultServlet.class);
-        holderPwd.setInitParameter("dirAllowed","true");
-        context.addServlet(holderPwd,"/");
+        holderPwd.setInitParameter("dirAllowed", "true");
+        context.addServlet(holderPwd, "/");
 
         metricsRegistry.startMetricsReporters(conf);
         Utils.addShutdownHookWithForceKillIn1Sec(metricsRegistry::stopMetricsReporters);