You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by bh...@apache.org on 2019/09/19 18:30:47 UTC

[hadoop] branch trunk updated: HDDS-2154. Fix Checkstyle issues (#1475)

This is an automated email from the ASF dual-hosted git repository.

bharat pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 126ef77  HDDS-2154. Fix Checkstyle issues (#1475)
126ef77 is described below

commit 126ef77a810113d263042adfec0a613bf900964d
Author: Elek, Márton <el...@users.noreply.github.com>
AuthorDate: Thu Sep 19 20:30:33 2019 +0200

    HDDS-2154. Fix Checkstyle issues (#1475)
---
 .../hadoop/hdds/scm/client/HddsClientUtils.java    |  9 ++++--
 .../hadoop/hdds/utils/LevelDBStoreIterator.java    |  3 +-
 .../hadoop/hdds/utils/RocksDBStoreIterator.java    |  8 +++--
 .../hadoop/hdds/utils/db/cache/TableCache.java     |  3 +-
 .../hadoop/hdds/utils/TestMetadataStore.java       | 10 ++++---
 .../server/ratis/ContainerStateMachine.java        |  3 +-
 .../apache/hadoop/ozone/web/utils/OzoneUtils.java  |  3 +-
 .../org/apache/hadoop/fs/ozone/OzoneFsShell.java   | 34 ++++++++--------------
 .../ozone/freon/OzoneClientKeyValidator.java       |  1 -
 .../apache/hadoop/ozone/freon/S3KeyGenerator.java  |  1 -
 .../apache/hadoop/ozone/freon/SameKeyReader.java   |  1 -
 11 files changed, 37 insertions(+), 39 deletions(-)

diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java
index 4a3926d..d3bb31a 100644
--- a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java
@@ -133,7 +133,8 @@ public final class HddsClientUtils {
    *
    * @throws IllegalArgumentException
    */
-  public static void verifyResourceName(String resName) throws IllegalArgumentException {
+  public static void verifyResourceName(String resName)
+      throws IllegalArgumentException {
     if (resName == null) {
       throw new IllegalArgumentException("Bucket or Volume name is null");
     }
@@ -141,7 +142,8 @@ public final class HddsClientUtils {
     if (resName.length() < OzoneConsts.OZONE_MIN_BUCKET_NAME_LENGTH ||
         resName.length() > OzoneConsts.OZONE_MAX_BUCKET_NAME_LENGTH) {
       throw new IllegalArgumentException(
-          "Bucket or Volume length is illegal, valid length is 3-63 characters");
+          "Bucket or Volume length is illegal, "
+              + "valid length is 3-63 characters");
     }
 
     if (resName.charAt(0) == '.' || resName.charAt(0) == '-') {
@@ -151,7 +153,8 @@ public final class HddsClientUtils {
 
     if (resName.charAt(resName.length() - 1) == '.' ||
         resName.charAt(resName.length() - 1) == '-') {
-      throw new IllegalArgumentException("Bucket or Volume name cannot end with a period or dash");
+      throw new IllegalArgumentException("Bucket or Volume name "
+          + "cannot end with a period or dash");
     }
 
     boolean isIPv4 = true;
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/LevelDBStoreIterator.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/LevelDBStoreIterator.java
index 2b9dbc0..f5b6769 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/LevelDBStoreIterator.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/LevelDBStoreIterator.java
@@ -25,7 +25,8 @@ import java.util.NoSuchElementException;
 /**
  * LevelDB store iterator.
  */
-public class LevelDBStoreIterator implements MetaStoreIterator< MetadataStore.KeyValue > {
+public class LevelDBStoreIterator
+    implements MetaStoreIterator<MetadataStore.KeyValue> {
 
 
   private DBIterator levelDBIterator;
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RocksDBStoreIterator.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RocksDBStoreIterator.java
index 01e7244..e39ec57 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RocksDBStoreIterator.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RocksDBStoreIterator.java
@@ -26,7 +26,8 @@ import java.util.NoSuchElementException;
 /**
  * RocksDB store iterator.
  */
-public class RocksDBStoreIterator implements MetaStoreIterator< MetadataStore.KeyValue > {
+public class RocksDBStoreIterator
+    implements MetaStoreIterator<MetadataStore.KeyValue> {
 
   private RocksIterator rocksDBIterator;
 
@@ -43,8 +44,9 @@ public class RocksDBStoreIterator implements MetaStoreIterator< MetadataStore.Ke
   @Override
   public MetadataStore.KeyValue next() {
     if (rocksDBIterator.isValid()) {
-      MetadataStore.KeyValue value = MetadataStore.KeyValue.create(rocksDBIterator.key(), rocksDBIterator
-          .value());
+      MetadataStore.KeyValue value =
+          MetadataStore.KeyValue.create(rocksDBIterator.key(), rocksDBIterator
+              .value());
       rocksDBIterator.next();
       return value;
     }
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCache.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCache.java
index 2ac62cc..1f16969 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCache.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCache.java
@@ -85,7 +85,8 @@ public interface TableCache<CACHEKEY extends CacheKey,
    *  full cache. It return's {@link CacheResult} with null
    *  and status as {@link CacheResult.CacheStatus#NOT_EXIST}.
    *
-   *  If cache clean up policy is {@link TableCacheImpl.CacheCleanupPolicy#MANUAL} it means
+   *  If cache clean up policy is
+   *  {@link TableCacheImpl.CacheCleanupPolicy#MANUAL} it means
    *  table cache is partial cache. It return's {@link CacheResult} with
    *  null and status as MAY_EXIST.
    *
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestMetadataStore.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestMetadataStore.java
index db4241a..d24fcf5 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestMetadataStore.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestMetadataStore.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdds.utils;
 import com.google.common.collect.Lists;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang3.tuple.ImmutablePair;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdfs.DFSUtil;
@@ -27,6 +28,7 @@ import org.apache.hadoop.ozone.OzoneConfigKeys;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
 import org.apache.hadoop.hdds.utils.MetadataKeyFilters.MetadataKeyFilter;
+
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Rule;
@@ -68,13 +70,14 @@ public class TestMetadataStore {
   public ExpectedException expectedException = ExpectedException.none();
   private MetadataStore store;
   private File testDir;
+
   public TestMetadataStore(String metadataImpl) {
     this.storeImpl = metadataImpl;
   }
 
   @Parameters
   public static Collection<Object[]> data() {
-    return Arrays.asList(new Object[][]{
+    return Arrays.asList(new Object[][] {
         {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_LEVELDB},
         {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_ROCKSDB}
     });
@@ -121,7 +124,8 @@ public class TestMetadataStore {
 
     //As database is empty, check whether iterator is working as expected or
     // not.
-    MetaStoreIterator< MetadataStore.KeyValue > metaStoreIterator = dbStore.iterator();
+    MetaStoreIterator<MetadataStore.KeyValue> metaStoreIterator =
+        dbStore.iterator();
     assertFalse(metaStoreIterator.hasNext());
     try {
       metaStoreIterator.next();
@@ -162,7 +166,6 @@ public class TestMetadataStore {
 
   }
 
-
   @Test
   public void testMetaStoreConfigDifferentFromType() throws IOException {
 
@@ -199,7 +202,6 @@ public class TestMetadataStore {
     GenericTestUtils.LogCapturer logCapturer =
         GenericTestUtils.LogCapturer.captureLogs(MetadataStoreBuilder.LOG);
 
-
     File dbDir = GenericTestUtils.getTestDir(getClass().getSimpleName()
         + "-" + storeImpl.toLowerCase() + "-test");
     MetadataStore dbStore = MetadataStoreBuilder.newBuilder().setConf(conf)
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java
index c6ab0a1..0535763 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java
@@ -448,7 +448,8 @@ public class ContainerStateMachine extends BaseStateMachine {
                 + write.getChunkData().getChunkName() + e);
             raftFuture.completeExceptionally(e);
             throw e;
-          }}, chunkExecutor);
+          }
+        }, chunkExecutor);
 
     writeChunkFutureMap.put(entryIndex, writeChunkFuture);
     LOG.debug(gid + ": writeChunk writeStateMachineData : blockId " +
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java
index 4adb006..e146d31 100644
--- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java
@@ -145,7 +145,8 @@ public final class OzoneUtils {
    *
    * @throws IllegalArgumentException
    */
-  public static void verifyResourceName(String resName) throws IllegalArgumentException {
+  public static void verifyResourceName(String resName)
+      throws IllegalArgumentException {
     HddsClientUtils.verifyResourceName(resName);
   }
 
diff --git a/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/OzoneFsShell.java b/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/OzoneFsShell.java
index 873c843..e3d8742 100644
--- a/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/OzoneFsShell.java
+++ b/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/OzoneFsShell.java
@@ -17,28 +17,12 @@
  */
 package org.apache.hadoop.fs.ozone;
 
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.LinkedList;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FsShell;
-import org.apache.hadoop.fs.shell.Command;
 import org.apache.hadoop.fs.shell.CommandFactory;
 import org.apache.hadoop.fs.shell.FsCommand;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.tools.TableListing;
-import org.apache.hadoop.tracing.TraceUtils;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.htrace.core.TraceScope;
-import org.apache.htrace.core.Tracer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** Provide command line access to a Ozone FileSystem. */
 @InterfaceAudience.Private
@@ -51,14 +35,19 @@ public class OzoneFsShell extends FsShell {
    * {@link #setConf(Configuration)} with a valid configuration prior
    * to running commands.
    */
-  public OzoneFsShell() { this(null); }
+  public OzoneFsShell() {
+    this(null);
+  }
 
   /**
-   * Construct a OzoneFsShell with the given configuration.  Commands can be
-   * executed via {@link #run(String[])}
+   * Construct a OzoneFsShell with the given configuration.
+   *
+   * Commands can be executed via {@link #run(String[])}
    * @param conf the hadoop configuration
    */
-  public OzoneFsShell(Configuration conf) { super(conf); }
+  public OzoneFsShell(Configuration conf) {
+    super(conf);
+  }
 
   protected void registerCommands(CommandFactory factory) {
     // TODO: DFSAdmin subclasses FsShell so need to protect the command
@@ -75,11 +64,12 @@ public class OzoneFsShell extends FsShell {
   }
 
   /**
-   * main() has some simple utility methods
+   * Main entry point to execute fs commands.
+   *
    * @param argv the command and its arguments
    * @throws Exception upon error
    */
-  public static void main(String argv[]) throws Exception {
+  public static void main(String[] argv) throws Exception {
     OzoneFsShell shell = newShellInstance();
     Configuration conf = new Configuration();
     conf.setQuietMode(false);
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OzoneClientKeyValidator.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OzoneClientKeyValidator.java
index 0cff241..f247b33 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OzoneClientKeyValidator.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OzoneClientKeyValidator.java
@@ -22,7 +22,6 @@ import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
-import org.apache.hadoop.ozone.client.OzoneBucket;
 import org.apache.hadoop.ozone.client.OzoneClient;
 import org.apache.hadoop.ozone.client.OzoneClientFactory;
 
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/S3KeyGenerator.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/S3KeyGenerator.java
index 7290a2c..eb9a0ce 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/S3KeyGenerator.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/S3KeyGenerator.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.ozone.freon;
 import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
-import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 
 import com.amazonaws.auth.EnvironmentVariableCredentialsProvider;
 import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/SameKeyReader.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/SameKeyReader.java
index cfbf8be..bafd3ec 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/SameKeyReader.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/SameKeyReader.java
@@ -22,7 +22,6 @@ import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
-import org.apache.hadoop.ozone.client.OzoneBucket;
 import org.apache.hadoop.ozone.client.OzoneClient;
 import org.apache.hadoop.ozone.client.OzoneClientFactory;
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org