You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ja...@apache.org on 2019/05/27 20:22:38 UTC

[hbase] branch branch-2 updated: HBASE-22326 Fixed Checkstyle errors in hbase-examples

This is an automated email from the ASF dual-hosted git repository.

janh pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2 by this push:
     new 71a1cd8  HBASE-22326 Fixed Checkstyle errors in hbase-examples
71a1cd8 is described below

commit 71a1cd83fc89bb489661f756182a43105af929bf
Author: Jan Hentschel <ja...@ultratendency.com>
AuthorDate: Mon Apr 29 16:11:24 2019 +0200

    HBASE-22326 Fixed Checkstyle errors in hbase-examples
---
 hbase-examples/pom.xml                             |  7 ++++
 .../hbase/client/example/RefreshHFilesClient.java  | 45 +++++++++++-----------
 .../coprocessor/example/BulkDeleteEndpoint.java    |  7 ++--
 .../example/ExampleMasterObserverWithMetrics.java  |  4 +-
 .../coprocessor/example/RefreshHFilesEndpoint.java |  6 +--
 .../hadoop/hbase/mapreduce/IndexBuilder.java       |  6 +--
 .../hadoop/hbase/mapreduce/SampleUploader.java     | 11 ++----
 .../org/apache/hadoop/hbase/thrift/DemoClient.java | 14 +++----
 .../apache/hadoop/hbase/thrift/HttpDoAsClient.java | 20 +++++-----
 .../apache/hadoop/hbase/thrift2/DemoClient.java    | 16 ++++----
 .../coprocessor/example/TestRefreshHFilesBase.java |  9 +++--
 .../example/TestRefreshHFilesEndpoint.java         | 10 ++---
 .../hbase/mapreduce/TestMapReduceExamples.java     | 15 ++++----
 13 files changed, 87 insertions(+), 83 deletions(-)

diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index a83b017..7a77579 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -69,6 +69,13 @@
         <groupId>net.revelc.code</groupId>
         <artifactId>warbucks-maven-plugin</artifactId>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+        <configuration>
+          <failOnViolation>true</failOnViolation>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
   <dependencies>
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
index 06ad195..0611e71 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
@@ -16,7 +16,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.client.example;
 
 import java.io.Closeable;
@@ -51,7 +50,7 @@ public class RefreshHFilesClient extends Configured implements Tool, Closeable {
   /**
    * Constructor with Conf object
    *
-   * @param cfg
+   * @param cfg the {@link Configuration} object to use
    */
   public RefreshHFilesClient(Configuration cfg) {
     try {
@@ -75,26 +74,28 @@ public class RefreshHFilesClient extends Configured implements Tool, Closeable {
   }
 
   public void refreshHFiles(final Table table) throws Throwable {
-    final RefreshHFilesProtos.RefreshHFilesRequest request = RefreshHFilesProtos.RefreshHFilesRequest
-                                                               .getDefaultInstance();
-    table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class, HConstants.EMPTY_START_ROW,
-                             HConstants.EMPTY_END_ROW,
-                             new Batch.Call<RefreshHFilesProtos.RefreshHFilesService,
-                                             RefreshHFilesProtos.RefreshHFilesResponse>() {
-                               @Override
-                               public RefreshHFilesProtos.RefreshHFilesResponse call(
-                                 RefreshHFilesProtos.RefreshHFilesService refreshHFilesService)
-                                 throws IOException {
-                                 ServerRpcController controller = new ServerRpcController();
-                                 BlockingRpcCallback<RefreshHFilesProtos.RefreshHFilesResponse> rpcCallback =
-                                   new BlockingRpcCallback<>();
-                                 refreshHFilesService.refreshHFiles(controller, request, rpcCallback);
-                                 if (controller.failedOnException()) {
-                                   throw controller.getFailedOn();
-                                 }
-                                 return rpcCallback.get();
-                               }
-                             });
+    final RefreshHFilesProtos.RefreshHFilesRequest request =
+            RefreshHFilesProtos.RefreshHFilesRequest.getDefaultInstance();
+    table.coprocessorService(RefreshHFilesProtos.RefreshHFilesService.class,
+            HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
+            new Batch.Call<RefreshHFilesProtos.RefreshHFilesService,
+                    RefreshHFilesProtos.RefreshHFilesResponse>() {
+        @Override
+        public RefreshHFilesProtos.RefreshHFilesResponse call(
+              RefreshHFilesProtos.RefreshHFilesService refreshHFilesService)
+              throws IOException {
+          ServerRpcController controller = new ServerRpcController();
+          BlockingRpcCallback<RefreshHFilesProtos.RefreshHFilesResponse> rpcCallback =
+                new BlockingRpcCallback<>();
+          refreshHFilesService.refreshHFiles(controller, request, rpcCallback);
+
+          if (controller.failedOnException()) {
+            throw controller.getFailedOn();
+          }
+
+          return rpcCallback.get();
+        }
+      });
     LOG.debug("Done refreshing HFiles");
   }
 
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
index e2681ae..9455053 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
@@ -87,8 +87,8 @@ import org.slf4j.LoggerFactory;
  *     return rpcCallback.get();
  *   }
  * };
- * Map&lt;byte[], BulkDeleteResponse&gt; result = ht.coprocessorService(BulkDeleteService.class, scan
- *     .getStartRow(), scan.getStopRow(), callable);
+ * Map&lt;byte[], BulkDeleteResponse&gt; result = ht.coprocessorService(BulkDeleteService.class,
+ *  scan.getStartRow(), scan.getStopRow(), callable);
  * for (BulkDeleteResponse response : result.values()) {
  *   noOfDeletedRows += response.getRowsDeleted();
  * }
@@ -225,7 +225,8 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro
       int noOfVersionsToDelete = 0;
       if (timestamp == null) {
         for (Cell kv : deleteRow) {
-          delete.addColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv), kv.getTimestamp());
+          delete.addColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv),
+                  kv.getTimestamp());
           noOfVersionsToDelete++;
         }
       } else {
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
index 5fe920e..ecc2559 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.coprocessor.example;
 
 import java.io.IOException;
@@ -94,7 +93,8 @@ public class ExampleMasterObserverWithMetrics implements MasterCoprocessor, Mast
   }
 
   @Override
-  public void preDisableTable(ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName) throws IOException {
+  public void preDisableTable(ObserverContext<MasterCoprocessorEnvironment> ctx,
+      TableName tableName) throws IOException {
     // Increment the Counter for disable table operations
     this.disableTableCounter.increment();
   }
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
index 29fe90c..2cb9fd3 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.coprocessor.example;
 
 import com.google.protobuf.RpcCallback;
@@ -57,8 +56,9 @@ public class RefreshHFilesEndpoint extends RefreshHFilesProtos.RefreshHFilesServ
   }
 
   @Override
-  public void refreshHFiles(RpcController controller, RefreshHFilesProtos.RefreshHFilesRequest request,
-                            RpcCallback<RefreshHFilesProtos.RefreshHFilesResponse> done) {
+  public void refreshHFiles(RpcController controller,
+      RefreshHFilesProtos.RefreshHFilesRequest request,
+      RpcCallback<RefreshHFilesProtos.RefreshHFilesResponse> done) {
     try {
       for (Store store : env.getRegion().getStores()) {
         LOG.debug("Refreshing HFiles for region: " + store.getRegionInfo().getRegionNameAsString() +
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
index 3098ac2..b386b2b 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
@@ -52,7 +52,8 @@ import org.apache.yetus.audience.InterfaceAudience;
  * Modify ${HADOOP_HOME}/conf/hadoop-env.sh to include the hbase jar, the
  * zookeeper jar (can be found in lib/ directory under HBase root, the examples output directory,
  * and the hbase conf directory in HADOOP_CLASSPATH, and then run
- * <tt><strong>bin/hadoop org.apache.hadoop.hbase.mapreduce.IndexBuilder TABLE_NAME COLUMN_FAMILY ATTR [ATTR ...]</strong></tt>
+ * <tt><strong>bin/hadoop org.apache.hadoop.hbase.mapreduce.IndexBuilder
+ *  TABLE_NAME COLUMN_FAMILY ATTR [ATTR ...]</strong></tt>
  * </p>
  *
  * <p>
@@ -117,8 +118,7 @@ public class IndexBuilder extends Configured implements Tool {
   /**
    * Job configuration.
    */
-  public static Job configureJob(Configuration conf, String [] args)
-  throws IOException {
+  public static Job configureJob(Configuration conf, String [] args) throws IOException {
     String tableName = args[0];
     String columnFamily = args[1];
     System.out.println("****" + tableName);
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
index 1248f87..6dbbfe4 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
@@ -63,16 +63,12 @@ public class SampleUploader extends Configured implements Tool {
 
   private static final String NAME = "SampleUploader";
 
-  static class Uploader
-  extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put> {
-
+  static class Uploader extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put> {
     private long checkpoint = 100;
     private long count = 0;
 
     @Override
-    public void map(LongWritable key, Text line, Context context)
-    throws IOException {
-
+    public void map(LongWritable key, Text line, Context context) throws IOException {
       // Input is a CSV file
       // Each map() is a single line, where the key is the line number
       // Each line is comma-delimited; row,family,qualifier,value
@@ -113,8 +109,7 @@ public class SampleUploader extends Configured implements Tool {
   /**
    * Job configuration.
    */
-  public static Job configureJob(Configuration conf, String [] args)
-  throws IOException {
+  public static Job configureJob(Configuration conf, String [] args) throws IOException {
     Path inputPath = new Path(args[0]);
     String tableName = args[1];
     Job job = new Job(conf, NAME + "_" + tableName);
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
index cb3e930..dba2719 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
@@ -194,11 +194,11 @@ public class DemoClient {
 
     // Test UTF-8 handling
     byte[] invalid = {(byte) 'f', (byte) 'o', (byte) 'o', (byte) '-',
-            (byte) 0xfc, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1};
+                      (byte) 0xfc, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1, (byte) 0xa1};
     byte[] valid = {(byte) 'f', (byte) 'o', (byte) 'o', (byte) '-',
-            (byte) 0xE7, (byte) 0x94, (byte) 0x9F, (byte) 0xE3, (byte) 0x83,
-            (byte) 0x93, (byte) 0xE3, (byte) 0x83, (byte) 0xBC, (byte) 0xE3,
-            (byte) 0x83, (byte) 0xAB};
+                    (byte) 0xE7, (byte) 0x94, (byte) 0x9F, (byte) 0xE3, (byte) 0x83,
+                    (byte) 0x93, (byte) 0xE3, (byte) 0x83, (byte) 0xBC, (byte) 0xE3,
+                    (byte) 0x83, (byte) 0xAB};
 
     ArrayList<Mutation> mutations;
     // non-utf8 is fine for data
@@ -423,9 +423,9 @@ public class DemoClient {
             options.put("debug", "true");
 
             return new AppConfigurationEntry[]{
-                new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
-                    AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-                    options)};
+              new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
+                            AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                            options)};
           }
         });
 
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
index 422d405..f4cb616 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
@@ -68,9 +68,7 @@ public class HttpDoAsClient {
   static protected String principal = null;
 
   public static void main(String[] args) throws Exception {
-
     if (args.length < 3 || args.length > 4) {
-
       System.out.println("Invalid arguments!");
       System.out.println("Usage: HttpDoAsClient host port doAsUserName [security=true]");
       System.exit(-1);
@@ -146,8 +144,6 @@ public class HttpDoAsClient {
       }
     }
 
-
-
     //
     // Create the demo table with two column families, entry: and unused:
     //
@@ -175,7 +171,7 @@ public class HttpDoAsClient {
     Map<ByteBuffer, ColumnDescriptor> columnMap = refresh(client, httpClient)
         .getColumnDescriptors(ByteBuffer.wrap(t));
     for (ColumnDescriptor col2 : columnMap.values()) {
-      System.out.println("  column: " + utf8(col2.name.array()) + ", maxVer: " + Integer.toString(col2.maxVersions));
+      System.out.println("  column: " + utf8(col2.name.array()) + ", maxVer: " + col2.maxVersions);
     }
 
     transport.close();
@@ -184,7 +180,7 @@ public class HttpDoAsClient {
 
   private Hbase.Client refresh(Hbase.Client client, THttpClient httpClient) {
     httpClient.setCustomHeader("doAs", doAsUser);
-    if(secure) {
+    if (secure) {
       try {
         httpClient.setCustomHeader("Authorization", generateTicket());
       } catch (GSSException e) {
@@ -235,7 +231,6 @@ public class HttpDoAsClient {
 
   private void printRow(TRowResult rowResult) {
     // copy values into a TreeMap to get them in sorted order
-
     TreeMap<String, TCell> sorted = new TreeMap<>();
     for (Map.Entry<ByteBuffer, TCell> column : rowResult.columns.entrySet()) {
       sorted.put(utf8(column.getKey().array()), column.getValue());
@@ -252,7 +247,10 @@ public class HttpDoAsClient {
   }
 
   static Subject getSubject() throws Exception {
-    if (!secure) return new Subject();
+    if (!secure) {
+      return new Subject();
+    }
+
     /*
      * To authenticate the DemoClient, kinit should be invoked ahead.
      * Here we try to get the Kerberos credential from the ticket cache.
@@ -276,9 +274,9 @@ public class HttpDoAsClient {
             options.put("debug", "true");
 
             return new AppConfigurationEntry[]{
-                new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
-                    AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-                    options)};
+              new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
+                            AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                            options)};
           }
         });
     context.login();
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
index 7fbe2aa..a720143 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
@@ -45,7 +45,6 @@ import org.apache.yetus.audience.InterfaceAudience;
 
 @InterfaceAudience.Private
 public class DemoClient {
-
   private static String host = "localhost";
   private static int port = 9090;
   private static boolean secure = false;
@@ -54,7 +53,8 @@ public class DemoClient {
   public static void main(String[] args) throws Exception {
     System.out.println("Thrift2 Demo");
     System.out.println("Usage: DemoClient [host=localhost] [port=9090] [secure=false]");
-    System.out.println("This demo assumes you have a table called \"example\" with a column family called \"family1\"");
+    System.out.println("This demo assumes you have a table called \"example\" with a column " +
+            "family called \"family1\"");
 
     // use passed in arguments instead of defaults
     if (args.length >= 1) {
@@ -95,7 +95,7 @@ public class DemoClient {
     if (framed) {
       transport = new TFramedTransport(transport);
     } else if (secure) {
-      /**
+      /*
        * The Thrift server the DemoClient is trying to connect to
        * must have a matching principal, and support authentication.
        *
@@ -148,7 +148,9 @@ public class DemoClient {
   }
 
   static Subject getSubject() throws Exception {
-    if (!secure) return new Subject();
+    if (!secure) {
+      return new Subject();
+    }
 
     /*
      * To authenticate the DemoClient, kinit should be invoked ahead.
@@ -173,9 +175,9 @@ public class DemoClient {
           options.put("debug", "true");
 
           return new AppConfigurationEntry[]{
-              new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
-                  AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-                  options)};
+            new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
+                          AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                          options)};
         }
       });
     context.login();
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesBase.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesBase.java
index b948b62..e84929d 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesBase.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesBase.java
@@ -55,7 +55,8 @@ public class TestRefreshHFilesBase {
       CONF.set(HConstants.REGION_IMPL, regionImpl);
       CONF.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2);
 
-      CONF.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, RefreshHFilesEndpoint.class.getName());
+      CONF.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
+              RefreshHFilesEndpoint.class.getName());
       cluster = HTU.startMiniCluster(NUM_RS);
 
       // Create table
@@ -80,9 +81,9 @@ public class TestRefreshHFilesBase {
     for (Region region : cluster.getRegions(TABLE_NAME)) {
       Path regionDir = new Path(tableDir, region.getRegionInfo().getEncodedName());
       Path familyDir = new Path(regionDir, Bytes.toString(FAMILY));
-      HFileTestUtil
-          .createHFile(HTU.getConfiguration(), HTU.getTestFileSystem(), new Path(familyDir, HFILE_NAME), FAMILY,
-              QUALIFIER, Bytes.toBytes("50"), Bytes.toBytes("60"), NUM_ROWS);
+      HFileTestUtil.createHFile(HTU.getConfiguration(), HTU.getTestFileSystem(),
+              new Path(familyDir, HFILE_NAME), FAMILY, QUALIFIER, Bytes.toBytes("50"),
+              Bytes.toBytes("60"), NUM_ROWS);
     }
   }
 }
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
index 3f9c23b..e726a11 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
@@ -44,7 +44,6 @@ import org.junit.experimental.categories.Category;
 
 @Category(MediumTests.class)
 public class TestRefreshHFilesEndpoint extends TestRefreshHFilesBase {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestRefreshHFilesEndpoint.class);
@@ -69,8 +68,9 @@ public class TestRefreshHFilesEndpoint extends TestRefreshHFilesBase {
       RefreshHFilesClient refreshHFilesClient = new RefreshHFilesClient(CONF);
       refreshHFilesClient.refreshHFiles(TABLE_NAME);
     } catch (RetriesExhaustedException rex) {
-      if (rex.getCause() instanceof IOException)
+      if (rex.getCause() instanceof IOException) {
         throw new IOException();
+      }
     } catch (Throwable ex) {
       LOG.error(ex.toString(), ex);
       fail("Couldn't call the RefreshRegionHFilesEndpoint");
@@ -81,15 +81,15 @@ public class TestRefreshHFilesEndpoint extends TestRefreshHFilesBase {
     HStoreWithFaultyRefreshHFilesAPI store;
 
     public HRegionForRefreshHFilesEP(final Path tableDir, final WAL wal, final FileSystem fs,
-                                     final Configuration confParam, final RegionInfo regionInfo,
-                                     final TableDescriptor htd, final RegionServerServices rsServices) {
+        final Configuration confParam, final RegionInfo regionInfo, final TableDescriptor htd,
+        final RegionServerServices rsServices) {
       super(tableDir, wal, fs, confParam, regionInfo, htd, rsServices);
     }
 
     @Override
     public List<HStore> getStores() {
       List<HStore> list = new ArrayList<>(stores.size());
-      /**
+      /*
        * This is used to trigger the custom definition (faulty)
        * of refresh HFiles API.
        */
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java
index 3b7f783..43dba2c 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java
@@ -17,8 +17,13 @@
  */
 package org.apache.hadoop.hbase.mapreduce;
 
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 import java.io.ByteArrayOutputStream;
 import java.io.PrintStream;
@@ -49,7 +54,6 @@ import org.mockito.stubbing.Answer;
 
 @Category({MapReduceTests.class, LargeTests.class})
 public class TestMapReduceExamples {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestMapReduceExamples.class);
@@ -59,11 +63,9 @@ public class TestMapReduceExamples {
   /**
    * Test SampleUploader from examples
    */
-
   @SuppressWarnings("unchecked")
   @Test
   public void testSampleUploader() throws Exception {
-
     Configuration configuration = new Configuration();
     Uploader uploader = new Uploader();
     Mapper<LongWritable, Text, ImmutableBytesWritable, Put>.Context ctx = mock(Context.class);
@@ -86,7 +88,6 @@ public class TestMapReduceExamples {
     String[] args = { dir.toString(), "simpleTable" };
     Job job = SampleUploader.configureJob(configuration, args);
     assertEquals(SequenceFileInputFormat.class, job.getInputFormatClass());
-
   }
 
   /**
@@ -179,11 +180,9 @@ public class TestMapReduceExamples {
         assertTrue(data.toString().contains(
             "Usage: IndexBuilder <TABLE_NAME> <COLUMN_FAMILY> <ATTR> [<ATTR> ...]"));
       }
-
     } finally {
       System.setErr(oldPrintStream);
       System.setSecurityManager(SECURITY_MANAGER);
     }
-
   }
 }