You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ja...@apache.org on 2019/12/28 19:37:37 UTC

[hbase] branch branch-2.1 updated: HBASE-23622 Reduced the number of Checkstyle violations in hbase-common

This is an automated email from the ASF dual-hosted git repository.

janh pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2.1 by this push:
     new ca0aaaf  HBASE-23622 Reduced the number of Checkstyle violations in hbase-common
ca0aaaf is described below

commit ca0aaaff30a0b27b2687a93694dbdda34cd2ef38
Author: Jan Hentschel <ja...@apache.org>
AuthorDate: Sat Dec 28 20:03:58 2019 +0100

    HBASE-23622 Reduced the number of Checkstyle violations in hbase-common
    
    Signed-off-by: stack <st...@apache.org>
    Signed-off-by: Viraj Jasani <vj...@apache.org>
---
 .../org/apache/hadoop/hbase/BBKVComparator.java    |  5 +-
 .../org/apache/hadoop/hbase/CellComparator.java    |  2 +-
 .../apache/hadoop/hbase/CellComparatorImpl.java    | 10 ++--
 .../java/org/apache/hadoop/hbase/net/Address.java  |  9 ++-
 .../hadoop/hbase/trace/SpanReceiverHost.java       | 10 ++--
 .../org/apache/hadoop/hbase/trace/TraceUtil.java   |  6 +-
 .../apache/hadoop/hbase/util/ByteRangeUtils.java   | 12 ++--
 .../apache/hadoop/hbase/util/CommonFSUtils.java    | 70 ++++++++--------------
 .../hadoop/hbase/util/ConcatenatedLists.java       |  5 +-
 .../java/org/apache/hadoop/hbase/util/Order.java   | 41 +++++++++----
 10 files changed, 89 insertions(+), 81 deletions(-)

diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/BBKVComparator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/BBKVComparator.java
index 017586d..bc76a9d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/BBKVComparator.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/BBKVComparator.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hbase;
 import java.util.Comparator;
 
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
-import org.apache.hbase.thirdparty.com.google.common.primitives.Longs;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.com.google.common.primitives.Longs;
 
 /**
  * A comparator for case where {@link ByteBufferKeyValue} is prevalent type (BBKV
@@ -71,7 +71,6 @@ public class BBKVComparator implements Comparator {
 
   @Override
   public int compare(Object l, Object r) {
-    // LOG.info("ltype={} rtype={}", l, r);
     if ((l instanceof ByteBufferKeyValue) && (r instanceof ByteBufferKeyValue)) {
       return compare((ByteBufferKeyValue)l, (ByteBufferKeyValue)r, false);
     }
@@ -81,7 +80,7 @@ public class BBKVComparator implements Comparator {
 
   // TODO: Come back here. We get a few percentage points extra of throughput if this is a
   // private method.
-  static final int compare(ByteBufferKeyValue left, ByteBufferKeyValue right,
+  static int compare(ByteBufferKeyValue left, ByteBufferKeyValue right,
       boolean ignoreSequenceid) {
     // NOTE: Same method is in CellComparatorImpl, also private, not shared, intentionally. Not
     // sharing gets us a few percent more throughput in compares. If changes here or there, make
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java
index 3529d54..83a868d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java
@@ -54,7 +54,7 @@ public interface CellComparator extends Comparator<Cell> {
   /**
    * Compare cells.
    * @param ignoreSequenceid True if we are to compare the key portion only and ignore
-   * the sequenceid. Set to false to compare key and consider sequenceid.
+   *    the sequenceid. Set to false to compare key and consider sequenceid.
    * @return 0 if equal, -1 if a &lt; b, and +1 if a &gt; b.
    */
   int compare(Cell leftCell, Cell rightCell, boolean ignoreSequenceid);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java
index 707d919..c647318 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase;
 
 import java.util.Comparator;
@@ -23,13 +22,12 @@ import java.util.Comparator;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hbase.thirdparty.com.google.common.primitives.Longs;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-
+import org.apache.hbase.thirdparty.com.google.common.primitives.Longs;
 
 /**
  * Compare two HBase cells.  Do not use this method comparing <code>-ROOT-</code> or
@@ -52,11 +50,13 @@ import org.slf4j.LoggerFactory;
 @InterfaceStability.Evolving
 public class CellComparatorImpl implements CellComparator {
   static final Logger LOG = LoggerFactory.getLogger(CellComparatorImpl.class);
+
   /**
    * Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion
    * of KeyValue only.
    */
   public static final CellComparatorImpl COMPARATOR = new CellComparatorImpl();
+
   /**
    * A {@link CellComparatorImpl} for <code>hbase:meta</code> catalog table
    * {@link KeyValue}s.
@@ -342,7 +342,7 @@ public class CellComparatorImpl implements CellComparator {
           return -1;
         } else if (rightDelimiter < 0 && leftDelimiter >= 0) {
           return 1;
-        } else if (leftDelimiter < 0 && rightDelimiter < 0) {
+        } else if (leftDelimiter < 0) {
           return 0;
         }
       }
@@ -365,7 +365,7 @@ public class CellComparatorImpl implements CellComparator {
           return -1;
         } else if (rightDelimiter < 0 && leftDelimiter >= 0) {
           return 1;
-        } else if (leftDelimiter < 0 && rightDelimiter < 0) {
+        } else if (leftDelimiter < 0) {
           return 0;
         }
       }
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
index ea4ba12..d76ef9f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
@@ -31,7 +31,7 @@ import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort;
  * We cannot have Guava classes in our API hence this Type.
  */
 @InterfaceAudience.Public
-public class Address implements Comparable<Address> {
+public final class Address implements Comparable<Address> {
   private HostAndPort hostAndPort;
 
   private Address(HostAndPort hostAndPort) {
@@ -62,7 +62,7 @@ public class Address implements Comparable<Address> {
   /**
    * If hostname is a.b.c and the port is 123, return a:123 instead of a.b.c:123.
    * @return if host looks like it is resolved -- not an IP -- then strip the domain portion
-   * otherwise returns same as {@link #toString()}}
+   *    otherwise returns same as {@link #toString()}}
    */
   public String toStringWithoutDomain() {
     String hostname = getHostname();
@@ -100,7 +100,10 @@ public class Address implements Comparable<Address> {
   @Override
   public int compareTo(Address that) {
     int compare = this.getHostname().compareTo(that.getHostname());
-    if (compare != 0) return compare;
+    if (compare != 0) {
+      return compare;
+    }
+
     return this.getPort() - that.getPort();
   }
 }
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
index 14ef945..b967db7 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
@@ -40,7 +40,7 @@ public class SpanReceiverHost {
   private Configuration conf;
   private boolean closed = false;
 
-  private static enum SingletonHolder {
+  private enum SingletonHolder {
     INSTANCE;
     final transient Object lock = new Object();
     transient SpanReceiverHost host = null;
@@ -78,7 +78,6 @@ public class SpanReceiverHost {
   /**
    * Reads the names of classes specified in the {@code hbase.trace.spanreceiver.classes} property
    * and instantiates and registers them with the Tracer.
-   *
    */
   public void loadSpanReceivers() {
     String[] receiverNames = conf.getStrings(SPAN_RECEIVERS_CONF_KEY);
@@ -93,7 +92,7 @@ public class SpanReceiverHost {
       SpanReceiver receiver = builder.className(className).build();
       if (receiver != null) {
         receivers.add(receiver);
-        LOG.info("SpanReceiver " + className + " was loaded successfully.");
+        LOG.info("SpanReceiver {} was loaded successfully.", className);
       }
     }
     for (SpanReceiver rcvr : receivers) {
@@ -105,7 +104,10 @@ public class SpanReceiverHost {
    * Calls close() on all SpanReceivers created by this SpanReceiverHost.
    */
   public synchronized void closeReceivers() {
-    if (closed) return;
+    if (closed) {
+      return;
+    }
+
     closed = true;
     for (SpanReceiver rcvr : receivers) {
       try {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java
index 89386f4..10665d8 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java
@@ -38,7 +38,7 @@ public final class TraceUtil {
   }
 
   public static void initTracer(Configuration c) {
-    if(c != null) {
+    if (c != null) {
       conf = new HBaseHTraceConfiguration(c);
     }
 
@@ -62,7 +62,9 @@ public final class TraceUtil {
    * @return TraceScope or null when not tracing
    */
   public static TraceScope createTrace(String description, Span span) {
-    if(span == null) return createTrace(description);
+    if (span == null) {
+      return createTrace(description);
+    }
 
     return (tracer == null) ? null : tracer.newScope(description, span.getSpanId());
   }
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java
index 90f3bf3..fb0b336 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.util;
 
 import java.io.IOException;
@@ -31,12 +30,16 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
  * Utility methods for working with {@link ByteRange}.
  */
 @InterfaceAudience.Public
-public class ByteRangeUtils {
+public final class ByteRangeUtils {
+  private ByteRangeUtils() {
+  }
 
   public static int numEqualPrefixBytes(ByteRange left, ByteRange right, int rightInnerOffset) {
     int maxCompares = Math.min(left.getLength(), right.getLength() - rightInnerOffset);
-    final byte[] lbytes = left.getBytes(), rbytes = right.getBytes();
-    final int loffset = left.getOffset(), roffset = right.getOffset();
+    final byte[] lbytes = left.getBytes();
+    final byte[] rbytes = right.getBytes();
+    final int loffset = left.getOffset();
+    final int roffset = right.getOffset();
     for (int i = 0; i < maxCompares; ++i) {
       if (lbytes[loffset + i] != rbytes[roffset + rightInnerOffset + i]) {
         return i;
@@ -76,5 +79,4 @@ public class ByteRangeUtils {
     os.write(byteRange.getBytes(), byteRange.getOffset() + byteRangeInnerOffset,
       byteRange.getLength() - byteRangeInnerOffset);
   }
-
 }
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
index 9f68234..6a9f73d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java
@@ -63,7 +63,8 @@ public abstract class CommonFSUtils {
   public static final String HBASE_WAL_DIR = "hbase.wal.dir";
 
   /** Parameter to disable stream capability enforcement checks */
-  public static final String UNSAFE_STREAM_CAPABILITY_ENFORCE = "hbase.unsafe.stream.capability.enforce";
+  public static final String UNSAFE_STREAM_CAPABILITY_ENFORCE =
+    "hbase.unsafe.stream.capability.enforce";
 
   /** Full access permissions (starting point for a umask) */
   public static final String FULL_RWX_PERMISSIONS = "777";
@@ -139,8 +140,7 @@ public abstract class CommonFSUtils {
    * @return True if deleted <code>dir</code>
    * @throws IOException e
    */
-  public static boolean deleteDirectory(final FileSystem fs, final Path dir)
-  throws IOException {
+  public static boolean deleteDirectory(final FileSystem fs, final Path dir) throws IOException {
     return fs.exists(dir) && fs.delete(dir, true);
   }
 
@@ -159,7 +159,7 @@ public abstract class CommonFSUtils {
     Method m = null;
     Class<? extends FileSystem> cls = fs.getClass();
     try {
-      m = cls.getMethod("getDefaultBlockSize", new Class<?>[] { Path.class });
+      m = cls.getMethod("getDefaultBlockSize", Path.class);
     } catch (NoSuchMethodException e) {
       LOG.info("FileSystem doesn't support getDefaultBlockSize");
     } catch (SecurityException e) {
@@ -194,7 +194,7 @@ public abstract class CommonFSUtils {
     Method m = null;
     Class<? extends FileSystem> cls = fs.getClass();
     try {
-      m = cls.getMethod("getDefaultReplication", new Class<?>[] { Path.class });
+      m = cls.getMethod("getDefaultReplication", Path.class);
     } catch (NoSuchMethodException e) {
       LOG.info("FileSystem doesn't support getDefaultReplication");
     } catch (SecurityException e) {
@@ -247,7 +247,7 @@ public abstract class CommonFSUtils {
   public static FSDataOutputStream create(FileSystem fs, Path path,
       FsPermission perm, boolean overwrite) throws IOException {
     if (LOG.isTraceEnabled()) {
-      LOG.trace("Creating file=" + path + " with permission=" + perm + ", overwrite=" + overwrite);
+      LOG.trace("Creating file={} with permission={}, overwrite={}", path, perm, overwrite);
     }
     return fs.create(path, perm, overwrite, getDefaultBufferSize(fs),
         getDefaultReplication(fs, path), getDefaultBlockSize(fs, path), null);
@@ -362,11 +362,11 @@ public abstract class CommonFSUtils {
     return p.makeQualified(fs.getUri(), fs.getWorkingDirectory());
   }
 
-  public static void setRootDir(final Configuration c, final Path root) throws IOException {
+  public static void setRootDir(final Configuration c, final Path root) {
     c.set(HConstants.HBASE_DIR, root.toString());
   }
 
-  public static void setFsDefault(final Configuration c, final Path root) throws IOException {
+  public static void setFsDefault(final Configuration c, final Path root) {
     c.set("fs.defaultFS", root.toString());    // for hadoop 0.21+
   }
 
@@ -392,7 +392,7 @@ public abstract class CommonFSUtils {
   }
 
   @VisibleForTesting
-  public static void setWALRootDir(final Configuration c, final Path root) throws IOException {
+  public static void setWALRootDir(final Configuration c, final Path root) {
     c.set(HBASE_WAL_DIR, root.toString());
   }
 
@@ -501,8 +501,7 @@ public abstract class CommonFSUtils {
   // this mapping means that under a federated FileSystem implementation, we'll
   // only log the first failure from any of the underlying FileSystems at WARN and all others
   // will be at DEBUG.
-  private static final Map<FileSystem, Boolean> warningMap =
-      new ConcurrentHashMap<FileSystem, Boolean>();
+  private static final Map<FileSystem, Boolean> warningMap = new ConcurrentHashMap<>();
 
   /**
    * Sets storage policy for given path.
@@ -574,8 +573,7 @@ public abstract class CommonFSUtils {
     Method m = null;
     Exception toThrow = null;
     try {
-      m = fs.getClass().getDeclaredMethod("setStoragePolicy",
-        new Class<?>[] { Path.class, String.class });
+      m = fs.getClass().getDeclaredMethod("setStoragePolicy", Path.class, String.class);
       m.setAccessible(true);
     } catch (NoSuchMethodException e) {
       toThrow = e;
@@ -607,7 +605,7 @@ public abstract class CommonFSUtils {
       try {
         m.invoke(fs, path, storagePolicy);
         if (LOG.isDebugEnabled()) {
-          LOG.debug("Set storagePolicy=" + storagePolicy + " for path=" + path);
+          LOG.debug("Set storagePolicy={} for path={}", storagePolicy, path);
         }
       } catch (Exception e) {
         toThrow = e;
@@ -679,8 +677,7 @@ public abstract class CommonFSUtils {
    * @return Returns the filesystem of the hbase rootdir.
    * @throws IOException from underlying FileSystem
    */
-  public static FileSystem getCurrentFileSystem(Configuration conf)
-  throws IOException {
+  public static FileSystem getCurrentFileSystem(Configuration conf) throws IOException {
     return getRootDir(conf).getFileSystem(conf);
   }
 
@@ -698,7 +695,7 @@ public abstract class CommonFSUtils {
    * @param filter path filter
    * @return null if dir is empty or doesn't exist, otherwise FileStatus array
    */
-  public static FileStatus [] listStatus(final FileSystem fs,
+  public static FileStatus[] listStatus(final FileSystem fs,
       final Path dir, final PathFilter filter) throws IOException {
     FileStatus [] status = null;
     try {
@@ -706,7 +703,7 @@ public abstract class CommonFSUtils {
     } catch (FileNotFoundException fnfe) {
       // if directory doesn't exist, return null
       if (LOG.isTraceEnabled()) {
-        LOG.trace(dir + " doesn't exist");
+        LOG.trace("{} doesn't exist", dir);
       }
     }
     if (status == null || status.length < 1) {
@@ -749,7 +746,7 @@ public abstract class CommonFSUtils {
     } catch (FileNotFoundException fnfe) {
       // if directory doesn't exist, return null
       if (LOG.isTraceEnabled()) {
-        LOG.trace(dir + " doesn't exist");
+        LOG.trace("{} doesn't exist", dir);
       }
     }
     return status;
@@ -785,13 +782,13 @@ public abstract class CommonFSUtils {
    * Log the current state of the filesystem from a certain root directory
    * @param fs filesystem to investigate
    * @param root root file/directory to start logging from
-   * @param LOG log to output information
+   * @param log log to output information
    * @throws IOException if an unexpected exception occurs
    */
-  public static void logFileSystemState(final FileSystem fs, final Path root, Logger LOG)
+  public static void logFileSystemState(final FileSystem fs, final Path root, Logger log)
       throws IOException {
-    LOG.debug("File system contents for path " + root);
-    logFSTree(LOG, fs, root, "|-");
+    log.debug("File system contents for path {}", root);
+    logFSTree(log, fs, root, "|-");
   }
 
   /**
@@ -799,7 +796,7 @@ public abstract class CommonFSUtils {
    *
    * @see #logFileSystemState(FileSystem, Path, Logger)
    */
-  private static void logFSTree(Logger LOG, final FileSystem fs, final Path root, String prefix)
+  private static void logFSTree(Logger log, final FileSystem fs, final Path root, String prefix)
       throws IOException {
     FileStatus[] files = listStatus(fs, root, null);
     if (files == null) {
@@ -808,10 +805,10 @@ public abstract class CommonFSUtils {
 
     for (FileStatus file : files) {
       if (file.isDirectory()) {
-        LOG.debug(prefix + file.getPath().getName() + "/");
-        logFSTree(LOG, fs, file.getPath(), prefix + "---");
+        log.debug(prefix + file.getPath().getName() + "/");
+        logFSTree(log, fs, file.getPath(), prefix + "---");
       } else {
-        LOG.debug(prefix + file.getPath().getName());
+        log.debug(prefix + file.getPath().getName());
       }
     }
   }
@@ -824,25 +821,6 @@ public abstract class CommonFSUtils {
   }
 
   /**
-   * Do our short circuit read setup.
-   * Checks buffer size to use and whether to do checksumming in hbase or hdfs.
-   * @param conf must not be null
-   */
-  public static void setupShortCircuitRead(final Configuration conf) {
-    // Check that the user has not set the "dfs.client.read.shortcircuit.skip.checksum" property.
-    boolean shortCircuitSkipChecksum =
-      conf.getBoolean("dfs.client.read.shortcircuit.skip.checksum", false);
-    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);
-    if (shortCircuitSkipChecksum) {
-      LOG.warn("Configuration \"dfs.client.read.shortcircuit.skip.checksum\" should not " +
-        "be set to true." + (useHBaseChecksum ? " HBase checksum doesn't require " +
-        "it, see https://issues.apache.org/jira/browse/HBASE-6868." : ""));
-      assert !shortCircuitSkipChecksum; //this will fail if assertions are on
-    }
-    checkShortCircuitReadBufferSize(conf);
-  }
-
-  /**
    * Check if short circuit read buffer size is set and if not, set it to hbase value.
    * @param conf must not be null
    */
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java
index 3922a6d..50c0d63 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java
@@ -79,7 +79,10 @@ public class ConcatenatedLists<T> extends AbstractCollection<T> {
       if (!components.isEmpty()) {
         this.nextWasCalled = true;
         List<T> src = components.get(currentComponent);
-        if (++indexWithinComponent < src.size()) return src.get(indexWithinComponent);
+        if (++indexWithinComponent < src.size()) {
+          return src.get(indexWithinComponent);
+        }
+
         if (++currentComponent < components.size()) {
           indexWithinComponent = 0;
           src = components.get(currentComponent);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Order.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Order.java
index b37142a..9d864ce 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Order.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Order.java
@@ -27,22 +27,31 @@ import org.apache.yetus.audience.InterfaceAudience;
  */
 @InterfaceAudience.Public
 public enum Order {
-
   ASCENDING {
     @Override
-    public int cmp(int cmp) { /* noop */ return cmp; }
+    public int cmp(int cmp) {
+      /* noop */ return cmp;
+    }
 
     @Override
-    public byte apply(byte val) { /* noop */ return val; }
+    public byte apply(byte val) {
+      /* noop */ return val;
+    }
 
     @Override
-    public void apply(byte[] val) { /* noop */ }
+    public void apply(byte[] val) {
+      /* noop */
+    }
 
     @Override
-    public void apply(byte[] val, int offset, int length) { /* noop */ }
+    public void apply(byte[] val, int offset, int length) {
+      /* noop */
+    }
 
     @Override
-    public String toString() { return "ASCENDING"; }
+    public String toString() {
+      return "ASCENDING";
+    }
   },
 
   DESCENDING {
@@ -53,23 +62,33 @@ public enum Order {
     private static final byte MASK = (byte) 0xff;
 
     @Override
-    public int cmp(int cmp) { return -1 * cmp; }
+    public int cmp(int cmp) {
+      return -1 * cmp;
+    }
 
     @Override
-    public byte apply(byte val) { return (byte) (val ^ MASK); }
+    public byte apply(byte val) {
+      return (byte) (val ^ MASK);
+    }
 
     @Override
     public void apply(byte[] val) {
-      for (int i = 0; i < val.length; i++) { val[i] ^= MASK; }
+      for (int i = 0; i < val.length; i++) {
+        val[i] ^= MASK;
+      }
     }
 
     @Override
     public void apply(byte[] val, int offset, int length) {
-      for (int i = 0; i < length; i++) { val[offset + i] ^= MASK; }
+      for (int i = 0; i < length; i++) {
+        val[offset + i] ^= MASK;
+      }
     }
 
     @Override
-    public String toString() { return "DESCENDING"; }
+    public String toString() {
+      return "DESCENDING";
+    }
   };
 
   /**