You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/03/07 19:23:31 UTC

[18/22] hbase git commit: HBASE-17532 Replaced explicit type with diamond operator

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
index 1dab633..01e9ef3 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
@@ -103,7 +103,7 @@ public class IndexBuilder extends Configured implements Tool {
       String[] fields = configuration.getStrings("index.fields");
       String familyName = configuration.get("index.familyname");
       family = Bytes.toBytes(familyName);
-      indexes = new TreeMap<byte[], ImmutableBytesWritable>(Bytes.BYTES_COMPARATOR);
+      indexes = new TreeMap<>(Bytes.BYTES_COMPARATOR);
       for(String field : fields) {
         // if the table is "people" and the field to index is "email", then the
         // index table will be called "people-email"

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
index b16ef7b..cb0cfbb 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
@@ -114,7 +114,7 @@ public class DemoClient {
     private void run() throws Exception {
         TTransport transport = new TSocket(host, port);
         if (secure) {
-          Map<String, String> saslProperties = new HashMap<String, String>();
+          Map<String, String> saslProperties = new HashMap<>();
           saslProperties.put(Sasl.QOP, "auth-conf,auth-int,auth");
           /**
            * The Thrift server the DemoClient is trying to connect to
@@ -154,7 +154,7 @@ public class DemoClient {
         //
         // Create the demo table with two column families, entry: and unused:
         //
-        ArrayList<ColumnDescriptor> columns = new ArrayList<ColumnDescriptor>(2);
+        ArrayList<ColumnDescriptor> columns = new ArrayList<>(2);
         ColumnDescriptor col;
         col = new ColumnDescriptor();
         col.name = ByteBuffer.wrap(bytes("entry:"));
@@ -194,7 +194,7 @@ public class DemoClient {
 
         ArrayList<Mutation> mutations;
         // non-utf8 is fine for data
-        mutations = new ArrayList<Mutation>(1);
+        mutations = new ArrayList<>(1);
         mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")),
             ByteBuffer.wrap(invalid), writeToWal));
         client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(bytes("foo")),
@@ -202,19 +202,19 @@ public class DemoClient {
 
 
         // this row name is valid utf8
-        mutations = new ArrayList<Mutation>(1);
+        mutations = new ArrayList<>(1);
         mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(valid), writeToWal));
         client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(valid), mutations, dummyAttributes);
 
         // non-utf8 is now allowed in row names because HBase stores values as binary
 
-        mutations = new ArrayList<Mutation>(1);
+        mutations = new ArrayList<>(1);
         mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(invalid), writeToWal));
         client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(invalid), mutations, dummyAttributes);
 
 
         // Run a scanner on the rows we just created
-        ArrayList<ByteBuffer> columnNames = new ArrayList<ByteBuffer>();
+        ArrayList<ByteBuffer> columnNames = new ArrayList<>();
         columnNames.add(ByteBuffer.wrap(bytes("entry:")));
 
         System.out.println("Starting scanner...");
@@ -238,7 +238,7 @@ public class DemoClient {
             nf.setGroupingUsed(false);
             byte[] row = bytes(nf.format(i));
 
-            mutations = new ArrayList<Mutation>(1);
+            mutations = new ArrayList<>(1);
             mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("unused:")), ByteBuffer.wrap(bytes("DELETE_ME")), writeToWal));
             client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
             printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
@@ -251,14 +251,14 @@ public class DemoClient {
                 // no-op
             }
 
-            mutations = new ArrayList<Mutation>(2);
+            mutations = new ArrayList<>(2);
             mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:num")), ByteBuffer.wrap(bytes("0")), writeToWal));
             mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(bytes("FOO")), writeToWal));
             client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
             printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
 
             Mutation m;
-            mutations = new ArrayList<Mutation>(2);
+            mutations = new ArrayList<>(2);
             m = new Mutation();
             m.column = ByteBuffer.wrap(bytes("entry:foo"));
             m.isDelete = true;
@@ -270,7 +270,7 @@ public class DemoClient {
             client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
             printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
 
-            mutations = new ArrayList<Mutation>();
+            mutations = new ArrayList<>();
             mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:num")), ByteBuffer.wrap(bytes(Integer.toString(i))), writeToWal));
             mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:sqr")), ByteBuffer.wrap(bytes(Integer.toString(i * i))), writeToWal));
             client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
@@ -347,7 +347,7 @@ public class DemoClient {
     private void printRow(TRowResult rowResult) {
         // copy values into a TreeMap to get them in sorted order
 
-        TreeMap<String, TCell> sorted = new TreeMap<String, TCell>();
+        TreeMap<String, TCell> sorted = new TreeMap<>();
         for (Map.Entry<ByteBuffer, TCell> column : rowResult.columns.entrySet()) {
             sorted.put(utf8(column.getKey().array()), column.getValue());
         }
@@ -379,7 +379,7 @@ public class DemoClient {
         new Configuration() {
           @Override
           public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
-            Map<String, String> options = new HashMap<String, String>();
+            Map<String, String> options = new HashMap<>();
             options.put("useKeyTab", "false");
             options.put("storeKey", "false");
             options.put("doNotPrompt", "true");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
index 666891c..25fdc4a 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
@@ -151,7 +151,7 @@ public class HttpDoAsClient {
     //
     // Create the demo table with two column families, entry: and unused:
     //
-    ArrayList<ColumnDescriptor> columns = new ArrayList<ColumnDescriptor>(2);
+    ArrayList<ColumnDescriptor> columns = new ArrayList<>(2);
     ColumnDescriptor col;
     col = new ColumnDescriptor();
     col.name = ByteBuffer.wrap(bytes("entry:"));
@@ -236,7 +236,7 @@ public class HttpDoAsClient {
   private void printRow(TRowResult rowResult) {
     // copy values into a TreeMap to get them in sorted order
 
-    TreeMap<String, TCell> sorted = new TreeMap<String, TCell>();
+    TreeMap<String, TCell> sorted = new TreeMap<>();
     for (Map.Entry<ByteBuffer, TCell> column : rowResult.columns.entrySet()) {
       sorted.put(utf8(column.getKey().array()), column.getValue());
     }
@@ -261,7 +261,7 @@ public class HttpDoAsClient {
         new Configuration() {
           @Override
           public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
-            Map<String, String> options = new HashMap<String, String>();
+            Map<String, String> options = new HashMap<>();
             options.put("useKeyTab", "false");
             options.put("storeKey", "false");
             options.put("doNotPrompt", "true");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
index 4083792..666997e 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
@@ -102,7 +102,7 @@ public class DemoClient {
        *
        * The HBase cluster must be secure, allow proxy user.
        */
-      Map<String, String> saslProperties = new HashMap<String, String>();
+      Map<String, String> saslProperties = new HashMap<>();
       saslProperties.put(Sasl.QOP, "auth-conf,auth-int,auth");
       transport = new TSaslClientTransport("GSSAPI", null,
         user != null ? user : "hbase",// Thrift server user name, should be an authorized proxy user
@@ -126,7 +126,7 @@ public class DemoClient {
     columnValue.setFamily("family1".getBytes());
     columnValue.setQualifier("qualifier1".getBytes());
     columnValue.setValue("value1".getBytes());
-    List<TColumnValue> columnValues = new ArrayList<TColumnValue>(1);
+    List<TColumnValue> columnValues = new ArrayList<>(1);
     columnValues.add(columnValue);
     put.setColumnValues(columnValues);
 
@@ -159,7 +159,7 @@ public class DemoClient {
       new Configuration() {
         @Override
         public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
-          Map<String, String> options = new HashMap<String, String>();
+          Map<String, String> options = new HashMap<>();
           options.put("useKeyTab", "false");
           options.put("storeKey", "false");
           options.put("doNotPrompt", "true");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
----------------------------------------------------------------------
diff --git a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
index 69d8521..e741760 100644
--- a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
+++ b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
@@ -100,7 +100,7 @@ public class MemcachedBlockCache implements BlockCache {
     // case.
     String serverListString = c.get(MEMCACHED_CONFIG_KEY,"localhost:11211");
     String[] servers = serverListString.split(",");
-    List<InetSocketAddress> serverAddresses = new ArrayList<InetSocketAddress>(servers.length);
+    List<InetSocketAddress> serverAddresses = new ArrayList<>(servers.length);
     for (String s:servers) {
       serverAddresses.add(Addressing.createInetSocketAddressFromHostAndPortStr(s));
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
index 78442ba..be6d6d1 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
@@ -34,7 +34,7 @@ public class CompatibilitySingletonFactory extends CompatibilityFactory {
   public static enum SingletonStorage {
     INSTANCE;
     private final Object lock = new Object();
-    private final Map<Class, Object> instances = new HashMap<Class, Object>();
+    private final Map<Class, Object> instances = new HashMap<>();
   }
   private static final Log LOG = LogFactory.getLog(CompatibilitySingletonFactory.class);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/TestCompatibilitySingletonFactory.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/TestCompatibilitySingletonFactory.java b/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/TestCompatibilitySingletonFactory.java
index f942059..168f6c7 100644
--- a/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/TestCompatibilitySingletonFactory.java
+++ b/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/TestCompatibilitySingletonFactory.java
@@ -55,9 +55,8 @@ public class TestCompatibilitySingletonFactory {
 
   @Test
   public void testGetInstance() throws Exception {
-    List<TestCompatibilitySingletonFactoryCallable> callables =
-        new ArrayList<TestCompatibilitySingletonFactoryCallable>(ITERATIONS);
-    List<String> resultStrings = new ArrayList<String>(ITERATIONS);
+    List<TestCompatibilitySingletonFactoryCallable> callables = new ArrayList<>(ITERATIONS);
+    List<String> resultStrings = new ArrayList<>(ITERATIONS);
 
 
     // Create the callables.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.java
index 76bbb09..78893ab 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.java
@@ -28,10 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 public class MetricsHBaseServerSourceFactoryImpl extends MetricsHBaseServerSourceFactory {
   private enum SourceStorage {
     INSTANCE;
-    HashMap<String, MetricsHBaseServerSource>
-        sources =
-        new HashMap<String, MetricsHBaseServerSource>();
-
+    HashMap<String, MetricsHBaseServerSource> sources = new HashMap<>();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancerSourceImpl.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancerSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancerSourceImpl.java
index f658a27..c304fb9 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancerSourceImpl.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/master/balancer/MetricsStochasticBalancerSourceImpl.java
@@ -46,7 +46,7 @@ public class MetricsStochasticBalancerSourceImpl extends MetricsBalancerSourceIm
           return size() > mruCap;
         }
       };
-  private Map<String, String> costFunctionDescs = new ConcurrentHashMap<String, String>();
+  private Map<String, String> costFunctionDescs = new ConcurrentHashMap<>();
 
   /**
    * Calculates the mru cache capacity from the metrics size
@@ -79,7 +79,7 @@ public class MetricsStochasticBalancerSourceImpl extends MetricsBalancerSourceIm
     synchronized (stochasticCosts) {
       Map<String, Double> costs = stochasticCosts.get(tableName);
       if (costs == null) {
-        costs = new ConcurrentHashMap<String, Double>();
+        costs = new ConcurrentHashMap<>();
       }
 
       costs.put(costFunctionName, cost);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/Interns.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/Interns.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/Interns.java
index 7905561..565b853 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/Interns.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/Interns.java
@@ -45,14 +45,14 @@ public final class Interns {
       CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.DAYS)
           .build(new CacheLoader<String, ConcurrentHashMap<String, MetricsInfo>>() {
             public ConcurrentHashMap<String, MetricsInfo> load(String key) {
-              return new ConcurrentHashMap<String, MetricsInfo>();
+              return new ConcurrentHashMap<>();
             }
           });
   private static LoadingCache<MetricsInfo, ConcurrentHashMap<String, MetricsTag>> tagCache =
       CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.DAYS)
           .build(new CacheLoader<MetricsInfo, ConcurrentHashMap<String, MetricsTag>>() {
             public ConcurrentHashMap<String, MetricsTag> load(MetricsInfo key) {
-              return new ConcurrentHashMap<String, MetricsTag>();
+              return new ConcurrentHashMap<>();
             }
           });
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
index a968aca..3e4016d 100644
--- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
+++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java
@@ -73,7 +73,7 @@ public class MetricSampleQuantiles {
 
   public MetricSampleQuantiles(MetricQuantile[] quantiles) {
     this.quantiles = Arrays.copyOf(quantiles, quantiles.length);
-    this.samples = new LinkedList<SampleItem>();
+    this.samples = new LinkedList<>();
   }
 
   /**
@@ -235,7 +235,7 @@ public class MetricSampleQuantiles {
   synchronized public Map<MetricQuantile, Long> snapshot() throws IOException {
     // flush the buffer first for best results
     insertBatch();
-    Map<MetricQuantile, Long> values = new HashMap<MetricQuantile, Long>(quantiles.length);
+    Map<MetricQuantile, Long> values = new HashMap<>(quantiles.length);
     for (int i = 0; i < quantiles.length; i++) {
       values.put(quantiles[i], query(quantiles[i].quantile));
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelperImpl.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelperImpl.java b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelperImpl.java
index 4291eb7..19a8ad2 100644
--- a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelperImpl.java
+++ b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelperImpl.java
@@ -37,9 +37,9 @@ import static org.junit.Assert.*;
  *  A helper class that will allow tests to get into hadoop2's metrics2 values.
  */
 public class MetricsAssertHelperImpl implements MetricsAssertHelper {
-  private Map<String, String> tags = new HashMap<String, String>();
-  private Map<String, Number> gauges = new HashMap<String, Number>();
-  private Map<String, Long> counters = new HashMap<String, Long>();
+  private Map<String, String> tags = new HashMap<>();
+  private Map<String, Number> gauges = new HashMap<>();
+  private Map<String, Long> counters = new HashMap<>();
 
   public class MockMetricsBuilder implements MetricsCollector {
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
index d35ef84..431ba42 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
@@ -317,7 +317,7 @@ public class DistributedHBaseCluster extends HBaseCluster {
   }
 
   protected boolean restoreMasters(ClusterStatus initial, ClusterStatus current) {
-    List<IOException> deferred = new ArrayList<IOException>();
+    List<IOException> deferred = new ArrayList<>();
     //check whether current master has changed
     final ServerName initMaster = initial.getMaster();
     if (!ServerName.isSameHostnameAndPort(initMaster, current.getMaster())) {
@@ -371,8 +371,8 @@ public class DistributedHBaseCluster extends HBaseCluster {
       }
     } else {
       //current master has not changed, match up backup masters
-      Set<ServerName> toStart = new TreeSet<ServerName>(new ServerNameIgnoreStartCodeComparator());
-      Set<ServerName> toKill = new TreeSet<ServerName>(new ServerNameIgnoreStartCodeComparator());
+      Set<ServerName> toStart = new TreeSet<>(new ServerNameIgnoreStartCodeComparator());
+      Set<ServerName> toKill = new TreeSet<>(new ServerNameIgnoreStartCodeComparator());
       toStart.addAll(initial.getBackupMasters());
       toKill.addAll(current.getBackupMasters());
 
@@ -429,8 +429,8 @@ public class DistributedHBaseCluster extends HBaseCluster {
   }
 
   protected boolean restoreRegionServers(ClusterStatus initial, ClusterStatus current) {
-    Set<ServerName> toStart = new TreeSet<ServerName>(new ServerNameIgnoreStartCodeComparator());
-    Set<ServerName> toKill = new TreeSet<ServerName>(new ServerNameIgnoreStartCodeComparator());
+    Set<ServerName> toStart = new TreeSet<>(new ServerNameIgnoreStartCodeComparator());
+    Set<ServerName> toKill = new TreeSet<>(new ServerNameIgnoreStartCodeComparator());
     toStart.addAll(initial.getServers());
     toKill.addAll(current.getServers());
 
@@ -443,7 +443,7 @@ public class DistributedHBaseCluster extends HBaseCluster {
       toKill.remove(server);
     }
 
-    List<IOException> deferred = new ArrayList<IOException>();
+    List<IOException> deferred = new ArrayList<>();
 
     for(ServerName sn:toStart) {
       try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java
index b6f1aeb..07014e5 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java
@@ -312,7 +312,7 @@ public class HBaseClusterManager extends Configured implements ClusterManager {
     LOG.info("Executed remote command, exit code:" + shell.getExitCode()
         + " , output:" + shell.getOutput());
 
-    return new Pair<Integer, String>(shell.getExitCode(), shell.getOutput());
+    return new Pair<>(shell.getExitCode(), shell.getOutput());
   }
 
   private Pair<Integer, String> execWithRetries(String hostname, ServiceType service, String... cmd)

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
index 5d79722..2d3693a 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
@@ -125,17 +125,13 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
 
   protected int numThreads, numRegions;
 
-  ConcurrentHashMap<String, NamespaceDescriptor> namespaceMap =
-      new ConcurrentHashMap<String, NamespaceDescriptor>();
+  ConcurrentHashMap<String, NamespaceDescriptor> namespaceMap = new ConcurrentHashMap<>();
 
-  ConcurrentHashMap<TableName, HTableDescriptor> enabledTables =
-      new ConcurrentHashMap<TableName, HTableDescriptor>();
+  ConcurrentHashMap<TableName, HTableDescriptor> enabledTables = new ConcurrentHashMap<>();
 
-  ConcurrentHashMap<TableName, HTableDescriptor> disabledTables =
-      new ConcurrentHashMap<TableName, HTableDescriptor>();
+  ConcurrentHashMap<TableName, HTableDescriptor> disabledTables = new ConcurrentHashMap<>();
 
-  ConcurrentHashMap<TableName, HTableDescriptor> deletedTables =
-      new ConcurrentHashMap<TableName, HTableDescriptor>();
+  ConcurrentHashMap<TableName, HTableDescriptor> deletedTables = new ConcurrentHashMap<>();
 
   @Override
   public void setUpCluster() throws Exception {
@@ -256,7 +252,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
         if (namespaceMap.isEmpty()) {
           return null;
         }
-        ArrayList<String> namespaceList = new ArrayList<String>(namespaceMap.keySet());
+        ArrayList<String> namespaceList = new ArrayList<>(namespaceMap.keySet());
         String randomKey = namespaceList.get(RandomUtils.nextInt(namespaceList.size()));
         NamespaceDescriptor randomNsd = namespaceMap.get(randomKey);
         // remove from namespaceMap
@@ -396,7 +392,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
         if (tableMap.isEmpty()) {
           return null;
         }
-        ArrayList<TableName> tableList = new ArrayList<TableName>(tableMap.keySet());
+        ArrayList<TableName> tableList = new ArrayList<>(tableMap.keySet());
         TableName randomKey = tableList.get(RandomUtils.nextInt(tableList.size()));
         HTableDescriptor randomHtd = tableMap.get(randomKey);
         // remove from tableMap
@@ -770,7 +766,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
       Admin admin = connection.getAdmin();
       TableName tableName = selected.getTableName();
       try (Table table = connection.getTable(tableName)){
-        ArrayList<HRegionInfo> regionInfos = new ArrayList<HRegionInfo>(admin.getTableRegions(
+        ArrayList<HRegionInfo> regionInfos = new ArrayList<>(admin.getTableRegions(
             selected.getTableName()));
         int numRegions = regionInfos.size();
         // average number of rows to be added per action to each region

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java
index d4bcacd..7b6635e 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java
@@ -207,7 +207,7 @@ public class IntegrationTestIngest extends IntegrationTestBase {
   }
 
   protected String[] getArgsForLoadTestToolInitTable() {
-    List<String> args = new ArrayList<String>();
+    List<String> args = new ArrayList<>();
     args.add("-tn");
     args.add(getTablename().getNameAsString());
     // pass all remaining args from conf with keys <test class name>.<load test tool arg>
@@ -225,7 +225,7 @@ public class IntegrationTestIngest extends IntegrationTestBase {
 
   protected String[] getArgsForLoadTestTool(String mode, String modeSpecificArg, long startKey,
       long numKeys) {
-    List<String> args = new ArrayList<String>(11);
+    List<String> args = new ArrayList<>(11);
     args.add("-tn");
     args.add(getTablename().getNameAsString());
     args.add("-families");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithACL.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithACL.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithACL.java
index 82eef1a..d129279 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithACL.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithACL.java
@@ -68,7 +68,7 @@ public class IntegrationTestIngestWithACL extends IntegrationTestIngest {
   protected String[] getArgsForLoadTestTool(String mode, String modeSpecificArg, long startKey,
       long numKeys) {
     String[] args = super.getArgsForLoadTestTool(mode, modeSpecificArg, startKey, numKeys);
-    List<String> tmp = new ArrayList<String>(Arrays.asList(args));
+    List<String> tmp = new ArrayList<>(Arrays.asList(args));
     tmp.add(HYPHEN + LoadTestTool.OPT_GENERATOR);
     StringBuilder sb = new StringBuilder(LoadTestDataGeneratorWithACL.class.getName());
     sb.append(COLON);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java
index 13a5936..cd9e355 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java
@@ -61,7 +61,7 @@ public class IntegrationTestIngestWithMOB extends IntegrationTestIngest {
 
   @Override
   protected String[] getArgsForLoadTestToolInitTable() {
-    List<String> args = new ArrayList<String>();
+    List<String> args = new ArrayList<>();
     args.add("-tn");
     args.add(getTablename().getNameAsString());
     // pass all remaining args from conf with keys <test class name>.<load test tool arg>
@@ -133,7 +133,7 @@ public class IntegrationTestIngestWithMOB extends IntegrationTestIngest {
   protected String[] getArgsForLoadTestTool(String mode, String modeSpecificArg, long startKey,
       long numKeys) {
     String[] args = super.getArgsForLoadTestTool(mode, modeSpecificArg, startKey, numKeys);
-    List<String> tmp = new ArrayList<String>(Arrays.asList(args));
+    List<String> tmp = new ArrayList<>(Arrays.asList(args));
     // LoadTestDataGeneratorMOB:mobColumnFamily:minMobDataSize:maxMobDataSize
     tmp.add(HIPHEN + LoadTestTool.OPT_GENERATOR);
     StringBuilder sb = new StringBuilder(LoadTestDataGeneratorWithMOB.class.getName());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithTags.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithTags.java
index f1b2c68..08bd4e5 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithTags.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithTags.java
@@ -45,7 +45,7 @@ public class IntegrationTestIngestWithTags extends IntegrationTestIngest {
   protected String[] getArgsForLoadTestTool(String mode, String modeSpecificArg, long startKey,
       long numKeys) {
     String[] args = super.getArgsForLoadTestTool(mode, modeSpecificArg, startKey, numKeys);
-    List<String> tmp = new ArrayList<String>(Arrays.asList(args));
+    List<String> tmp = new ArrayList<>(Arrays.asList(args));
     // LoadTestDataGeneratorWithTags:minNumTags:maxNumTags:minTagLength:maxTagLength
     tmp.add(HIPHEN + LoadTestTool.OPT_GENERATOR);
     StringBuilder sb = new StringBuilder(LoadTestDataGeneratorWithTags.class.getName());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithVisibilityLabels.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithVisibilityLabels.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithVisibilityLabels.java
index 133be1a..b7d8dad 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithVisibilityLabels.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithVisibilityLabels.java
@@ -42,31 +42,31 @@ public class IntegrationTestIngestWithVisibilityLabels extends IntegrationTestIn
   private static final String[] VISIBILITY_EXPS = { "secret & confidential & !private",
       "topsecret | confidential", "confidential & private", "public", "topsecret & private",
       "!public | private", "(secret | topsecret) & private" };
-  private static final List<List<String>> AUTHS = new ArrayList<List<String>>();
+  private static final List<List<String>> AUTHS = new ArrayList<>();
 
   static {
-    ArrayList<String> tmp = new ArrayList<String>(2);
+    ArrayList<String> tmp = new ArrayList<>(2);
     tmp.add("secret");
     tmp.add("confidential");
     AUTHS.add(tmp);
-    tmp = new ArrayList<String>(1);
+    tmp = new ArrayList<>(1);
     tmp.add("topsecret");
     AUTHS.add(tmp);
-    tmp = new ArrayList<String>(2);
+    tmp = new ArrayList<>(2);
     tmp.add("confidential");
     tmp.add("private");
     AUTHS.add(tmp);
-    tmp = new ArrayList<String>(1);
+    tmp = new ArrayList<>(1);
     tmp.add("public");
     AUTHS.add(tmp);
-    tmp = new ArrayList<String>(2);
+    tmp = new ArrayList<>(2);
     tmp.add("topsecret");
     tmp.add("private");
     AUTHS.add(tmp);
-    tmp = new ArrayList<String>(1);
+    tmp = new ArrayList<>(1);
     tmp.add("confidential");
     AUTHS.add(tmp);
-    tmp = new ArrayList<String>(2);
+    tmp = new ArrayList<>(2);
     tmp.add("topsecret");
     tmp.add("private");
     AUTHS.add(tmp);
@@ -88,7 +88,7 @@ public class IntegrationTestIngestWithVisibilityLabels extends IntegrationTestIn
   protected String[] getArgsForLoadTestTool(String mode, String modeSpecificArg, long startKey,
       long numKeys) {
     String[] args = super.getArgsForLoadTestTool(mode, modeSpecificArg, startKey, numKeys);
-    List<String> tmp = new ArrayList<String>(Arrays.asList(args));
+    List<String> tmp = new ArrayList<>(Arrays.asList(args));
     tmp.add(HIPHEN + LoadTestTool.OPT_GENERATOR);
     StringBuilder sb = new StringBuilder(LoadTestDataGeneratorWithVisibilityLabels.class.getName());
     sb.append(COLON);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
index 548ff53..6efe9d8 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
@@ -92,8 +92,7 @@ public class IntegrationTestLazyCfLoading {
     public static final byte[] VALUE_COLUMN = Bytes.toBytes("val");
     public static final long ACCEPTED_VALUE = 1L;
 
-    private static final Map<byte[], byte[][]> columnMap = new TreeMap<byte[], byte[][]>(
-        Bytes.BYTES_COMPARATOR);
+    private static final Map<byte[], byte[][]> columnMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
 
     private final AtomicLong expectedNumberOfKeys = new AtomicLong(0);
     private final AtomicLong totalNumberOfKeys = new AtomicLong(0);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
index e609f0b..c3c5df3 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
@@ -266,8 +266,8 @@ public class IntegrationTestRegionReplicaPerf extends IntegrationTestBase {
       format("--nomapred --table=%s --latency --sampleRate=0.1 randomRead 4", tableName);
     String replicaReadOpts = format("%s %s", replicas, readOpts);
 
-    ArrayList<TimingResult> resultsWithoutReplicas = new ArrayList<TimingResult>(maxIters);
-    ArrayList<TimingResult> resultsWithReplicas = new ArrayList<TimingResult>(maxIters);
+    ArrayList<TimingResult> resultsWithoutReplicas = new ArrayList<>(maxIters);
+    ArrayList<TimingResult> resultsWithReplicas = new ArrayList<>(maxIters);
 
     // create/populate the table, replicas disabled
     LOG.debug("Populating table.");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java
index 98d53e9..b6cfdcd 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java
@@ -126,7 +126,7 @@ public class IntegrationTestRegionReplicaReplication extends IntegrationTestInge
     protected BlockingQueue<Long> createWriteKeysQueue(Configuration conf) {
       this.delayMs = conf.getLong(String.format("%s.%s",
         IntegrationTestRegionReplicaReplication.class.getSimpleName(), OPT_READ_DELAY_MS), 5000);
-      return new ConstantDelayQueue<Long>(TimeUnit.MILLISECONDS, delayMs);
+      return new ConstantDelayQueue<>(TimeUnit.MILLISECONDS, delayMs);
     }
   }
 
@@ -145,7 +145,7 @@ public class IntegrationTestRegionReplicaReplication extends IntegrationTestInge
     protected BlockingQueue<Long> createWriteKeysQueue(Configuration conf) {
       this.delayMs = conf.getLong(String.format("%s.%s",
         IntegrationTestRegionReplicaReplication.class.getSimpleName(), OPT_READ_DELAY_MS), 5000);
-      return new ConstantDelayQueue<Long>(TimeUnit.MILLISECONDS, delayMs);
+      return new ConstantDelayQueue<>(TimeUnit.MILLISECONDS, delayMs);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
index 04a3b05..03ba460 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
@@ -336,7 +336,7 @@ public class RESTApiClusterManager extends Configured implements ClusterManager
   // ClusterManager methods take a "ServiceType" object (e.g. "HBASE_MASTER," "HADOOP_NAMENODE").
   // These "service types," which cluster managers call "roles" or "components," need to be mapped
   // to their corresponding service (e.g. "HBase," "HDFS") in order to be controlled.
-  private static Map<ServiceType, Service> roleServiceType = new HashMap<ServiceType, Service>();
+  private static Map<ServiceType, Service> roleServiceType = new HashMap<>();
   static {
     roleServiceType.put(ServiceType.HADOOP_NAMENODE, Service.HDFS);
     roleServiceType.put(ServiceType.HADOOP_DATANODE, Service.HDFS);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java
index d1a32b1..4c7be8c 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java
@@ -123,7 +123,7 @@ public class Action {
     if (count == 1) {
       return new ServerName [] {};
     }
-    ArrayList<ServerName> tmp = new ArrayList<ServerName>(count);
+    ArrayList<ServerName> tmp = new ArrayList<>(count);
     tmp.addAll(regionServers);
     tmp.remove(master);
     return tmp.toArray(new ServerName[count-1]);
@@ -192,11 +192,11 @@ public class Action {
   protected void unbalanceRegions(ClusterStatus clusterStatus,
       List<ServerName> fromServers, List<ServerName> toServers,
       double fractionOfRegions) throws Exception {
-    List<byte[]> victimRegions = new LinkedList<byte[]>();
+    List<byte[]> victimRegions = new LinkedList<>();
     for (ServerName server : fromServers) {
       ServerLoad serverLoad = clusterStatus.getLoad(server);
       // Ugh.
-      List<byte[]> regions = new LinkedList<byte[]>(serverLoad.getRegionsLoad().keySet());
+      List<byte[]> regions = new LinkedList<>(serverLoad.getRegionsLoad().keySet());
       int victimRegionCount = (int)Math.ceil(fractionOfRegions * regions.size());
       LOG.debug("Removing " + victimRegionCount + " regions from " + server.getServerName());
       for (int i = 0; i < victimRegionCount; ++i) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/BatchRestartRsAction.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/BatchRestartRsAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/BatchRestartRsAction.java
index ce66000..75414ae 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/BatchRestartRsAction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/BatchRestartRsAction.java
@@ -43,7 +43,7 @@ public class BatchRestartRsAction extends RestartActionBaseAction {
     List<ServerName> selectedServers = PolicyBasedChaosMonkey.selectRandomItems(getCurrentServers(),
         ratio);
 
-    Set<ServerName> killedServers = new HashSet<ServerName>();
+    Set<ServerName> killedServers = new HashSet<>();
 
     for (ServerName server : selectedServers) {
       // Don't keep killing servers if we're

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.java
index 7299e79..f5349dc 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.java
@@ -49,7 +49,7 @@ public class RestartRandomDataNodeAction extends RestartActionBaseAction {
     DistributedFileSystem fs = (DistributedFileSystem) FSUtils.getRootDir(getConf())
         .getFileSystem(getConf());
     DFSClient dfsClient = fs.getClient();
-    List<ServerName> hosts = new LinkedList<ServerName>();
+    List<ServerName> hosts = new LinkedList<>();
     for (DatanodeInfo dataNode: dfsClient.datanodeReport(HdfsConstants.DatanodeReportType.LIVE)) {
       hosts.add(ServerName.valueOf(dataNode.getHostName(), -1, -1));
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java
index e79ff5b..ba25198 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java
@@ -59,8 +59,8 @@ public class RollingBatchRestartRsAction extends BatchRestartRsAction {
         (int)(ratio * 100)));
     List<ServerName> selectedServers = selectServers();
 
-    Queue<ServerName> serversToBeKilled = new LinkedList<ServerName>(selectedServers);
-    Queue<ServerName> deadServers = new LinkedList<ServerName>();
+    Queue<ServerName> serversToBeKilled = new LinkedList<>(selectedServers);
+    Queue<ServerName> deadServers = new LinkedList<>();
 
     // loop while there are servers to be killed or dead servers to be restarted
     while ((!serversToBeKilled.isEmpty() || !deadServers.isEmpty())  && !context.isStopping()) {
@@ -123,7 +123,7 @@ public class RollingBatchRestartRsAction extends BatchRestartRsAction {
       @Override
       protected ServerName[] getCurrentServers() throws IOException {
         final int count = 4;
-        List<ServerName> serverNames = new ArrayList<ServerName>(count);
+        List<ServerName> serverNames = new ArrayList<>(count);
         for (int i = 0; i < 4; i++) {
           serverNames.add(ServerName.valueOf(i + ".example.org", i, i));
         }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceKillAndRebalanceAction.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceKillAndRebalanceAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceKillAndRebalanceAction.java
index 1ac1458..a40c8b1 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceKillAndRebalanceAction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceKillAndRebalanceAction.java
@@ -52,13 +52,13 @@ public class UnbalanceKillAndRebalanceAction extends Action {
   @Override
   public void perform() throws Exception {
     ClusterStatus status = this.cluster.getClusterStatus();
-    List<ServerName> victimServers = new LinkedList<ServerName>(status.getServers());
-    Set<ServerName> killedServers = new HashSet<ServerName>();
+    List<ServerName> victimServers = new LinkedList<>(status.getServers());
+    Set<ServerName> killedServers = new HashSet<>();
 
     int liveCount = (int)Math.ceil(FRC_SERVERS_THAT_HOARD_AND_LIVE * victimServers.size());
     int deadCount = (int)Math.ceil(FRC_SERVERS_THAT_HOARD_AND_DIE * victimServers.size());
     Assert.assertTrue((liveCount + deadCount) < victimServers.size());
-    List<ServerName> targetServers = new ArrayList<ServerName>(liveCount);
+    List<ServerName> targetServers = new ArrayList<>(liveCount);
     for (int i = 0; i < liveCount + deadCount; ++i) {
       int victimIx = RandomUtils.nextInt(victimServers.size());
       targetServers.add(victimServers.remove(victimIx));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java
index 2779bd1..bdffdb1 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java
@@ -48,9 +48,9 @@ public class UnbalanceRegionsAction extends Action {
   public void perform() throws Exception {
     LOG.info("Unbalancing regions");
     ClusterStatus status = this.cluster.getClusterStatus();
-    List<ServerName> victimServers = new LinkedList<ServerName>(status.getServers());
+    List<ServerName> victimServers = new LinkedList<>(status.getServers());
     int targetServerCount = (int)Math.ceil(fractionOfServers * victimServers.size());
-    List<ServerName> targetServers = new ArrayList<ServerName>(targetServerCount);
+    List<ServerName> targetServers = new ArrayList<>(targetServerCount);
     for (int i = 0; i < targetServerCount; ++i) {
       int victimIx = RandomUtils.nextInt(victimServers.size());
       targetServers.add(victimServers.remove(victimIx));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
index 57f7c83..951f8f8 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
@@ -92,7 +92,7 @@ public class PolicyBasedChaosMonkey extends ChaosMonkey {
   public static <T> List<T> selectRandomItems(T[] items, float ratio) {
     int remaining = (int)Math.ceil(items.length * ratio);
 
-    List<T> selectedItems = new ArrayList<T>(remaining);
+    List<T> selectedItems = new ArrayList<>(remaining);
 
     for (int i=0; i<items.length && remaining > 0; i++) {
       if (RandomUtils.nextFloat() < ((float)remaining/(items.length-i))) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/DoActionsOncePolicy.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/DoActionsOncePolicy.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/DoActionsOncePolicy.java
index e03816a..35f06eb 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/DoActionsOncePolicy.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/DoActionsOncePolicy.java
@@ -31,7 +31,7 @@ public class DoActionsOncePolicy extends PeriodicPolicy {
 
   public DoActionsOncePolicy(long periodMs, List<Action> actions) {
     super(periodMs);
-    this.actions = new ArrayList<Action>(actions);
+    this.actions = new ArrayList<>(actions);
   }
 
   public DoActionsOncePolicy(long periodMs, Action... actions) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicRandomActionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicRandomActionPolicy.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicRandomActionPolicy.java
index 8912467..8b76e49 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicRandomActionPolicy.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicRandomActionPolicy.java
@@ -46,9 +46,9 @@ public class PeriodicRandomActionPolicy extends PeriodicPolicy {
 
   public PeriodicRandomActionPolicy(long periodMs, Action... actions) {
     super(periodMs);
-    this.actions = new ArrayList<Pair<Action, Integer>>(actions.length);
+    this.actions = new ArrayList<>(actions.length);
     for (Action action : actions) {
-      this.actions.add(new Pair<Action, Integer>(action, 1));
+      this.actions.add(new Pair<>(action, 1));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
index fd062d1..e39d0fe 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
@@ -353,7 +353,7 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase {
     @Override
     public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {
       int numSplits = context.getConfiguration().getInt(NUM_MAPS_KEY, NUM_MAPS);
-      ArrayList<InputSplit> ret = new ArrayList<InputSplit>(numSplits);
+      ArrayList<InputSplit> ret = new ArrayList<>(numSplits);
       for (int i = 0; i < numSplits; ++i) {
         ret.add(new EmptySplit());
       }
@@ -376,7 +376,7 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase {
       chainId = chainId - (chainId % numMapTasks) + taskId; // ensure that chainId is unique per task and across iterations
       LongWritable[] keys = new LongWritable[] {new LongWritable(chainId)};
 
-      return new FixedRecordReader<LongWritable, LongWritable>(keys, keys);
+      return new FixedRecordReader<>(keys, keys);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
index 42b6ae7..9d04bf9 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
@@ -194,7 +194,7 @@ public class IntegrationTestImportTsv extends Configured implements Tool {
         util.getDataTestDirOnTestFS(table.getNameAsString()), "hfiles");
 
 
-    Map<String, String> args = new HashMap<String, String>();
+    Map<String, String> args = new HashMap<>();
     args.put(ImportTsv.BULK_OUTPUT_CONF_KEY, hfiles.toString());
     args.put(ImportTsv.COLUMNS_CONF_KEY,
         format("HBASE_ROW_KEY,HBASE_TS_KEY,%s:c1,%s:c2", cf, cf));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
index f87cc86..bd14c31 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
@@ -297,9 +297,9 @@ public class IntegrationTestMTTR {
     LOG.info("Starting " + testName + " with " + maxIters + " iterations.");
 
     // Array to keep track of times.
-    ArrayList<TimingResult> resultPuts = new ArrayList<TimingResult>(maxIters);
-    ArrayList<TimingResult> resultScan = new ArrayList<TimingResult>(maxIters);
-    ArrayList<TimingResult> resultAdmin = new ArrayList<TimingResult>(maxIters);
+    ArrayList<TimingResult> resultPuts = new ArrayList<>(maxIters);
+    ArrayList<TimingResult> resultScan = new ArrayList<>(maxIters);
+    ArrayList<TimingResult> resultAdmin = new ArrayList<>(maxIters);
     long start = System.nanoTime();
 
     try {
@@ -357,7 +357,7 @@ public class IntegrationTestMTTR {
    */
   private static class TimingResult {
     DescriptiveStatistics stats = new DescriptiveStatistics();
-    ArrayList<Long> traces = new ArrayList<Long>(10);
+    ArrayList<Long> traces = new ArrayList<>(10);
 
     /**
      * Add a result to this aggregate result.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index dff1828..1b23de8 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -350,7 +350,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
       public List<InputSplit> getSplits(JobContext job) throws IOException, InterruptedException {
         int numMappers = job.getConfiguration().getInt(GENERATOR_NUM_MAPPERS_KEY, 1);
 
-        ArrayList<InputSplit> splits = new ArrayList<InputSplit>(numMappers);
+        ArrayList<InputSplit> splits = new ArrayList<>(numMappers);
 
         for (int i = 0; i < numMappers; i++) {
           splits.add(new GeneratorInputSplit());
@@ -956,7 +956,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
     throws IOException, InterruptedException {
       Path keysInputDir = new Path(conf.get(SEARCHER_INPUTDIR_KEY));
       FileSystem fs = FileSystem.get(conf);
-      SortedSet<byte []> result = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
+      SortedSet<byte []> result = new TreeSet<>(Bytes.BYTES_COMPARATOR);
       if (!fs.exists(keysInputDir)) {
         throw new FileNotFoundException(keysInputDir.toString());
       }
@@ -977,7 +977,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
     private static SortedSet<byte[]> readFileToSearch(final Configuration conf,
         final FileSystem fs, final LocatedFileStatus keyFileStatus) throws IOException,
         InterruptedException {
-      SortedSet<byte []> result = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
+      SortedSet<byte []> result = new TreeSet<>(Bytes.BYTES_COMPARATOR);
       // Return entries that are flagged Counts.UNDEFINED in the value. Return the row. This is
       // what is missing.
       TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
@@ -1064,7 +1064,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
      */
     public static class VerifyReducer extends
         Reducer<BytesWritable, BytesWritable, BytesWritable, BytesWritable> {
-      private ArrayList<byte[]> refs = new ArrayList<byte[]>();
+      private ArrayList<byte[]> refs = new ArrayList<>();
       private final BytesWritable UNREF = new BytesWritable(addPrefixFlag(
         Counts.UNREFERENCED.ordinal(), new byte[] {}));
       private final BytesWritable LOSTFAM = new BytesWritable(addPrefixFlag(

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
index dec565f..9eacc5a 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
@@ -456,7 +456,7 @@ public void cleanUpCluster() throws Exception {
       throws IOException, InterruptedException {
     Path keysInputDir = new Path(conf.get(SEARCHER_INPUTDIR_KEY));
     FileSystem fs = FileSystem.get(conf);
-    SortedSet<byte []> result = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
+    SortedSet<byte []> result = new TreeSet<>(Bytes.BYTES_COMPARATOR);
     if (!fs.exists(keysInputDir)) {
       throw new FileNotFoundException(keysInputDir.toString());
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java
index 141b24d..bf534f3 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java
@@ -234,7 +234,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList {
 
         // set the test table to be the table to replicate
         HashMap<TableName, ArrayList<String>> toReplicate = new HashMap<>();
-        toReplicate.put(tableName, new ArrayList<String>(0));
+        toReplicate.put(tableName, new ArrayList<>(0));
 
         replicationAdmin.addPeer("TestPeer", peerConfig, toReplicate);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
index b7463bd..327d879 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
@@ -233,7 +233,7 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
   }
 
   private LinkedBlockingQueue<Long> insertData() throws IOException, InterruptedException {
-    LinkedBlockingQueue<Long> rowKeys = new LinkedBlockingQueue<Long>(25000);
+    LinkedBlockingQueue<Long> rowKeys = new LinkedBlockingQueue<>(25000);
     BufferedMutator ht = util.getConnection().getBufferedMutator(this.tableName);
     byte[] value = new byte[300];
     for (int x = 0; x < 5000; x++) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java
index e6df88a..5aa5d88 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java
@@ -42,8 +42,7 @@ public class ArraySearcherPool {
    */
   private static final Integer MAX_POOL_SIZE = 1000;
 
-  protected Queue<PrefixTreeArraySearcher> pool
-    = new LinkedBlockingQueue<PrefixTreeArraySearcher>(MAX_POOL_SIZE);
+  protected Queue<PrefixTreeArraySearcher> pool = new LinkedBlockingQueue<>(MAX_POOL_SIZE);
 
   public PrefixTreeArraySearcher checkOut(ByteBuff buffer, boolean includesMvccVersion) {
     PrefixTreeArraySearcher searcher = pool.poll();//will return null if pool is empty

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
index 3ca4236..255c8a3 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
@@ -87,7 +87,7 @@ public class PrefixTreeCell extends ByteBufferCell implements SettableSequenceId
   protected int tagsOffset;
   protected int tagsLength;
   // Pair to set the value ByteBuffer and its offset
-  protected ObjectIntPair<ByteBuffer> pair = new ObjectIntPair<ByteBuffer>();
+  protected ObjectIntPair<ByteBuffer> pair = new ObjectIntPair<>();
 
   /********************** Cell methods ******************/
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/EncoderPoolImpl.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/EncoderPoolImpl.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/EncoderPoolImpl.java
index 8a5ffba..a8f0082 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/EncoderPoolImpl.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/EncoderPoolImpl.java
@@ -26,8 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 @InterfaceAudience.Private
 public class EncoderPoolImpl implements EncoderPool {
 
-  private BlockingQueue<PrefixTreeEncoder> unusedEncoders = 
-      new LinkedBlockingQueue<PrefixTreeEncoder>();
+  private BlockingQueue<PrefixTreeEncoder> unusedEncoders = new LinkedBlockingQueue<>();
 
   @Override
   public PrefixTreeEncoder checkOut(OutputStream outputStream, boolean includeMvccVersion) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java
index 3291d72..3597fbe 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java
@@ -60,7 +60,7 @@ public class LongEncoder {
   /****************** construct ****************************/
 
   public LongEncoder() {
-    this.uniqueValues = new HashSet<Long>();
+    this.uniqueValues = new HashSet<>();
   }
 
   public void reset() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java
index f44017b..e2824b0 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java
@@ -161,7 +161,7 @@ public class Tokenizer{
   }
 
   public List<byte[]> getArrays() {
-    List<TokenizerNode> nodes = new ArrayList<TokenizerNode>();
+    List<TokenizerNode> nodes = new ArrayList<>();
     root.appendNodesToExternalList(nodes, true, true);
     List<byte[]> byteArrays = Lists.newArrayListWithCapacity(CollectionUtils.nullSafeSize(nodes));
     for (int i = 0; i < nodes.size(); ++i) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/impl/ByteRangeHashSet.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/impl/ByteRangeHashSet.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/impl/ByteRangeHashSet.java
index 9ce6163..dbaa508 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/impl/ByteRangeHashSet.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/impl/ByteRangeHashSet.java
@@ -39,7 +39,7 @@ public class ByteRangeHashSet extends ByteRangeSet {
   /************************ constructors *****************************/
 
   public ByteRangeHashSet() {
-    this.uniqueIndexByUniqueRange = new HashMap<ByteRange, Integer>();
+    this.uniqueIndexByUniqueRange = new HashMap<>();
   }
 
   public ByteRangeHashSet(List<ByteRange> rawByteArrays) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/impl/ByteRangeTreeSet.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/impl/ByteRangeTreeSet.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/impl/ByteRangeTreeSet.java
index b5c0b1a..4ee7b28 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/impl/ByteRangeTreeSet.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/impl/ByteRangeTreeSet.java
@@ -36,7 +36,7 @@ public class ByteRangeTreeSet extends ByteRangeSet {
   /************************ constructors *****************************/
 
   public ByteRangeTreeSet() {
-    this.uniqueIndexByUniqueRange = new TreeMap<ByteRange, Integer>();
+    this.uniqueIndexByUniqueRange = new TreeMap<>();
   }
 
   public ByteRangeTreeSet(List<ByteRange> rawByteArrays) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java
index 39140a3..1f9b459 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java
@@ -43,7 +43,7 @@ public class TestRowDataExerciseFInts extends BaseTestRowData{
 
   static List<ByteRange> rows;
   static{
-    List<String> rowStrings = new ArrayList<String>(16);
+    List<String> rowStrings = new ArrayList<>(16);
     rowStrings.add("com.edsBlog/directoryAa/pageAaa");
     rowStrings.add("com.edsBlog/directoryAa/pageBbb");
     rowStrings.add("com.edsBlog/directoryAa/pageCcc");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
index 2d3901f..a7edfe7 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
@@ -46,7 +46,7 @@ public class TestRowDataTrivialWithTags extends BaseTestRowData{
 
   static List<KeyValue> d = Lists.newArrayList();
   static {
-    List<Tag> tagList = new ArrayList<Tag>(2);
+    List<Tag> tagList = new ArrayList<>(2);
     Tag t = new ArrayBackedTag((byte) 1, "visisbility");
     tagList.add(t);
     t = new ArrayBackedTag((byte) 2, "ACL");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java
index a71daaa..0276617 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java
@@ -41,7 +41,7 @@ public class TestRowDataUrls extends BaseTestRowData{
 
   static List<ByteRange> rows;
   static{
-    List<String> rowStrings = new ArrayList<String>(16);
+    List<String> rowStrings = new ArrayList<>(16);
     rowStrings.add("com.edsBlog/directoryAa/pageAaa");
     rowStrings.add("com.edsBlog/directoryAa/pageBbb");
     rowStrings.add("com.edsBlog/directoryAa/pageCcc");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataBasic.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataBasic.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataBasic.java
index bccff6d..d4fbb4d 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataBasic.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataBasic.java
@@ -27,7 +27,7 @@ public class TestTimestampDataBasic implements TestTimestampData {
 
   @Override
   public List<Long> getInputs() {
-    List<Long> d = new ArrayList<Long>(5);
+    List<Long> d = new ArrayList<>(5);
     d.add(5L);
     d.add(3L);
     d.add(0L);
@@ -43,7 +43,7 @@ public class TestTimestampDataBasic implements TestTimestampData {
 
   @Override
   public List<Long> getOutputs() {
-    List<Long> d = new ArrayList<Long>(4);
+    List<Long> d = new ArrayList<>(4);
     d.add(0L);
     d.add(1L);
     d.add(3L);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataNumbers.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataNumbers.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataNumbers.java
index 2a5dcae..d0bc837 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataNumbers.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataNumbers.java
@@ -29,7 +29,7 @@ public class TestTimestampDataNumbers implements TestTimestampData {
 
   @Override
   public List<Long> getInputs() {
-    List<Long> d = new ArrayList<Long>(5);
+    List<Long> d = new ArrayList<>(5);
     d.add(5L << shift);
     d.add(3L << shift);
     d.add(7L << shift);
@@ -45,7 +45,7 @@ public class TestTimestampDataNumbers implements TestTimestampData {
 
   @Override
   public List<Long> getOutputs() {
-    List<Long> d = new ArrayList<Long>(4);
+    List<Long> d = new ArrayList<>(4);
     d.add(1L << shift);
     d.add(3L << shift);
     d.add(5L << shift);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataRepeats.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataRepeats.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataRepeats.java
index 2186528..3320d66 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataRepeats.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/data/TestTimestampDataRepeats.java
@@ -29,7 +29,7 @@ public class TestTimestampDataRepeats implements TestTimestampData {
 
   @Override
   public List<Long> getInputs() {
-    List<Long> d = new ArrayList<Long>(5);
+    List<Long> d = new ArrayList<>(5);
     d.add(t);
     d.add(t);
     d.add(t);
@@ -45,7 +45,7 @@ public class TestTimestampDataRepeats implements TestTimestampData {
 
   @Override
   public List<Long> getOutputs() {
-    List<Long> d = new ArrayList<Long>();
+    List<Long> d = new ArrayList<>();
     return d;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index b38b96c..0856aa2 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -209,33 +209,28 @@ public class ProcedureExecutor<TEnvironment> {
    * Once a Root-Procedure completes (success or failure), the result will be added to this map.
    * The user of ProcedureExecutor should call getResult(procId) to get the result.
    */
-  private final ConcurrentHashMap<Long, ProcedureInfo> completed =
-    new ConcurrentHashMap<Long, ProcedureInfo>();
+  private final ConcurrentHashMap<Long, ProcedureInfo> completed = new ConcurrentHashMap<>();
 
   /**
    * Map the the procId returned by submitProcedure(), the Root-ProcID, to the RootProcedureState.
    * The RootProcedureState contains the execution stack of the Root-Procedure,
    * It is added to the map by submitProcedure() and removed on procedure completion.
    */
-  private final ConcurrentHashMap<Long, RootProcedureState> rollbackStack =
-    new ConcurrentHashMap<Long, RootProcedureState>();
+  private final ConcurrentHashMap<Long, RootProcedureState> rollbackStack = new ConcurrentHashMap<>();
 
   /**
    * Helper map to lookup the live procedures by ID.
    * This map contains every procedure. root-procedures and subprocedures.
    */
-  private final ConcurrentHashMap<Long, Procedure> procedures =
-    new ConcurrentHashMap<Long, Procedure>();
+  private final ConcurrentHashMap<Long, Procedure> procedures = new ConcurrentHashMap<>();
 
   /**
    * Helper map to lookup whether the procedure already issued from the same client.
    * This map contains every root procedure.
    */
-  private final ConcurrentHashMap<NonceKey, Long> nonceKeysToProcIdsMap =
-      new ConcurrentHashMap<NonceKey, Long>();
+  private final ConcurrentHashMap<NonceKey, Long> nonceKeysToProcIdsMap = new ConcurrentHashMap<>();
 
-  private final CopyOnWriteArrayList<ProcedureExecutorListener> listeners =
-    new CopyOnWriteArrayList<ProcedureExecutorListener>();
+  private final CopyOnWriteArrayList<ProcedureExecutorListener> listeners = new CopyOnWriteArrayList<>();
 
   private Configuration conf;
   private ThreadGroup threadGroup;
@@ -399,7 +394,7 @@ public class ProcedureExecutor<TEnvironment> {
           break;
         case WAITING_TIMEOUT:
           if (waitingSet == null) {
-            waitingSet = new HashSet<Procedure>();
+            waitingSet = new HashSet<>();
           }
           waitingSet.add(proc);
           break;
@@ -498,7 +493,7 @@ public class ProcedureExecutor<TEnvironment> {
 
     // Create the workers
     workerId.set(0);
-    workerThreads = new CopyOnWriteArrayList<WorkerThread>();
+    workerThreads = new CopyOnWriteArrayList<>();
     for (int i = 0; i < corePoolSize; ++i) {
       workerThreads.add(new WorkerThread(threadGroup));
     }
@@ -979,8 +974,7 @@ public class ProcedureExecutor<TEnvironment> {
    * @return the procedures in a list
    */
   public List<ProcedureInfo> listProcedures() {
-    final List<ProcedureInfo> procedureLists =
-        new ArrayList<ProcedureInfo>(procedures.size() + completed.size());
+    final List<ProcedureInfo> procedureLists = new ArrayList<>(procedures.size() + completed.size());
     for (Map.Entry<Long, Procedure> p: procedures.entrySet()) {
       procedureLists.add(ProcedureUtil.convertToProcedureInfo(p.getValue()));
     }
@@ -1614,7 +1608,7 @@ public class ProcedureExecutor<TEnvironment> {
   //  Timeout Thread
   // ==========================================================================
   private final class TimeoutExecutorThread extends StoppableThread {
-    private final DelayQueue<DelayedWithTimeout> queue = new DelayQueue<DelayedWithTimeout>();
+    private final DelayQueue<DelayedWithTimeout> queue = new DelayQueue<>();
 
     public TimeoutExecutorThread(final ThreadGroup group) {
       super(group, "ProcedureTimeoutExecutor");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java
index 2f118b7..4f9b136 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java
@@ -147,7 +147,7 @@ class RootProcedureState {
       state = State.FAILED;
     }
     if (subprocStack == null) {
-      subprocStack = new ArrayList<Procedure>();
+      subprocStack = new ArrayList<>();
     }
     proc.addStackIndex(subprocStack.size());
     subprocStack.add(proc);
@@ -156,7 +156,7 @@ class RootProcedureState {
   protected synchronized void addSubProcedure(final Procedure proc) {
     if (!proc.hasParent()) return;
     if (subprocs == null) {
-      subprocs = new HashSet<Procedure>();
+      subprocs = new HashSet<>();
     }
     subprocs.add(proc);
   }
@@ -173,7 +173,7 @@ class RootProcedureState {
     int[] stackIndexes = proc.getStackIndexes();
     if (stackIndexes != null) {
       if (subprocStack == null) {
-        subprocStack = new ArrayList<Procedure>();
+        subprocStack = new ArrayList<>();
       }
       int diff = (1 + stackIndexes[stackIndexes.length - 1]) - subprocStack.size();
       if (diff > 0) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
index 3f9a7b7..5c3a4c7 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
@@ -130,7 +130,7 @@ public abstract class StateMachineProcedure<TEnvironment, TState>
    */
   protected void addChildProcedure(Procedure... subProcedure) {
     if (subProcList == null) {
-      subProcList = new ArrayList<Procedure>(subProcedure.length);
+      subProcList = new ArrayList<>(subProcedure.length);
     }
     for (int i = 0; i < subProcedure.length; ++i) {
       Procedure proc = subProcedure[i];

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreBase.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreBase.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreBase.java
index 0e0e46f..63eff37 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreBase.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/ProcedureStoreBase.java
@@ -25,8 +25,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
  * Base class for {@link ProcedureStore}s.
  */
 public abstract class ProcedureStoreBase implements ProcedureStore {
-  private final CopyOnWriteArrayList<ProcedureStoreListener> listeners =
-      new CopyOnWriteArrayList<ProcedureStoreListener>();
+  private final CopyOnWriteArrayList<ProcedureStoreListener> listeners = new CopyOnWriteArrayList<>();
 
   private final AtomicBoolean running = new AtomicBoolean(false);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java
index ec59607..5ad96e1 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java
@@ -155,7 +155,7 @@ public class ProcedureWALPrettyPrinter extends Configured implements Tool {
     options.addOption("h", "help", false, "Output help message");
     options.addOption("f", "file", true, "File to print");
 
-    final List<Path> files = new ArrayList<Path>();
+    final List<Path> files = new ArrayList<>();
     try {
       CommandLine cmd = new PosixParser().parse(options, args);