You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/03/07 19:23:33 UTC

[20/22] hbase git commit: HBASE-17532 Replaced explicit type with diamond operator

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesClientImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesClientImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesClientImpl.java
index dcbed7a..3507547 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesClientImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesClientImpl.java
@@ -72,7 +72,7 @@ public class TableBasedReplicationQueuesClientImpl extends ReplicationTableBase
 
   @Override
   public Set<String> getAllWALs() {
-    Set<String> allWals = new HashSet<String>();
+    Set<String> allWals = new HashSet<>();
     ResultScanner allQueues = null;
     try (Table replicationTable = getOrBlockOnReplicationTable()) {
       allQueues = replicationTable.getScanner(new Scan());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
index 1023e0d..bf55e8c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
@@ -201,7 +201,7 @@ public class TableBasedReplicationQueuesImpl extends ReplicationTableBase
   public List<String> getLogsInQueue(String queueId) {
     String errMsg = "Failed getting logs in queue queueId=" + queueId;
     byte[] rowKey = queueIdToRowKey(queueId);
-    List<String> logs = new ArrayList<String>();
+    List<String> logs = new ArrayList<>();
     try {
       Get getQueue = new Get(rowKey);
       Result queue = getResultIfOwner(getQueue);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
index e48f81d..0e8a68d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SecurityInfo.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto
 @InterfaceAudience.Private
 public class SecurityInfo {
   /** Maps RPC service names to authentication information */
-  private static ConcurrentMap<String,SecurityInfo> infos = new ConcurrentHashMap<String,SecurityInfo>();
+  private static ConcurrentMap<String,SecurityInfo> infos = new ConcurrentHashMap<>();
   // populate info for known services
   static {
     infos.put(AdminProtos.AdminService.getDescriptor().getName(),

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
index eeac9c7..1c4a868 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
@@ -266,7 +266,7 @@ public class AccessControlClient {
     HBaseRpcController controller
       = ((ClusterConnection) connection).getRpcControllerFactory().newController();
       */
-    List<UserPermission> permList = new ArrayList<UserPermission>();
+    List<UserPermission> permList = new ArrayList<>();
     try (Table table = connection.getTable(ACL_TABLE_NAME)) {
       try (Admin admin = connection.getAdmin()) {
         CoprocessorRpcChannel service = table.coprocessorService(HConstants.EMPTY_START_ROW);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java
index 1d26366..1873ea3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java
@@ -367,7 +367,7 @@ public class AccessControlUtil {
    */
   public static List<Permission.Action> toPermissionActions(
       List<AccessControlProtos.Permission.Action> protoActions) {
-    List<Permission.Action> actions = new ArrayList<Permission.Action>(protoActions.size());
+    List<Permission.Action> actions = new ArrayList<>(protoActions.size());
     for (AccessControlProtos.Permission.Action a : protoActions) {
       actions.add(toPermissionAction(a));
     }
@@ -644,7 +644,7 @@ public class AccessControlUtil {
     AccessControlProtos.GetUserPermissionsRequest request = builder.build();
     AccessControlProtos.GetUserPermissionsResponse response =
         protocol.getUserPermissions(controller, request);
-    List<UserPermission> perms = new ArrayList<UserPermission>(response.getUserPermissionCount());
+    List<UserPermission> perms = new ArrayList<>(response.getUserPermissionCount());
     for (AccessControlProtos.UserPermission perm: response.getUserPermissionList()) {
       perms.add(toUserPermission(perm));
     }
@@ -672,7 +672,7 @@ public class AccessControlUtil {
     AccessControlProtos.GetUserPermissionsRequest request = builder.build();
     AccessControlProtos.GetUserPermissionsResponse response =
         protocol.getUserPermissions(controller, request);
-    List<UserPermission> perms = new ArrayList<UserPermission>(response.getUserPermissionCount());
+    List<UserPermission> perms = new ArrayList<>(response.getUserPermissionCount());
     for (AccessControlProtos.UserPermission perm: response.getUserPermissionList()) {
       perms.add(toUserPermission(perm));
     }
@@ -700,7 +700,7 @@ public class AccessControlUtil {
     AccessControlProtos.GetUserPermissionsRequest request = builder.build();
     AccessControlProtos.GetUserPermissionsResponse response =
         protocol.getUserPermissions(controller, request);
-    List<UserPermission> perms = new ArrayList<UserPermission>(response.getUserPermissionCount());
+    List<UserPermission> perms = new ArrayList<>(response.getUserPermissionCount());
     for (AccessControlProtos.UserPermission perm: response.getUserPermissionList()) {
       perms.add(toUserPermission(perm));
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java
index 4b3ed54..5fdeee9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java
@@ -34,7 +34,7 @@ public class Authorizations {
 
   private List<String> labels;
   public Authorizations(String... labels) {
-    this.labels = new ArrayList<String>(labels.length);
+    this.labels = new ArrayList<>(labels.length);
     Collections.addAll(this.labels, labels);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
index cd153f1..d87bf14 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
@@ -130,7 +130,7 @@ public class VisibilityClient {
           new Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse>() {
             ServerRpcController controller = new ServerRpcController();
             CoprocessorRpcUtils.BlockingRpcCallback<VisibilityLabelsResponse> rpcCallback =
-                new CoprocessorRpcUtils.BlockingRpcCallback<VisibilityLabelsResponse>();
+                new CoprocessorRpcUtils.BlockingRpcCallback<>();
 
             public VisibilityLabelsResponse call(VisibilityLabelsService service)
                 throws IOException {
@@ -215,7 +215,7 @@ public class VisibilityClient {
             new Batch.Call<VisibilityLabelsService, GetAuthsResponse>() {
           ServerRpcController controller = new ServerRpcController();
           CoprocessorRpcUtils.BlockingRpcCallback<GetAuthsResponse> rpcCallback =
-              new CoprocessorRpcUtils.BlockingRpcCallback<GetAuthsResponse>();
+              new CoprocessorRpcUtils.BlockingRpcCallback<>();
 
           public GetAuthsResponse call(VisibilityLabelsService service) throws IOException {
             GetAuthsRequest.Builder getAuthReqBuilder = GetAuthsRequest.newBuilder();
@@ -268,7 +268,7 @@ public class VisibilityClient {
           new Batch.Call<VisibilityLabelsService, ListLabelsResponse>() {
             ServerRpcController controller = new ServerRpcController();
             CoprocessorRpcUtils.BlockingRpcCallback<ListLabelsResponse> rpcCallback =
-                new CoprocessorRpcUtils.BlockingRpcCallback<ListLabelsResponse>();
+                new CoprocessorRpcUtils.BlockingRpcCallback<>();
 
             public ListLabelsResponse call(VisibilityLabelsService service) throws IOException {
               ListLabelsRequest.Builder listAuthLabelsReqBuilder = ListLabelsRequest.newBuilder();
@@ -340,7 +340,7 @@ public class VisibilityClient {
             new Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse>() {
           ServerRpcController controller = new ServerRpcController();
           CoprocessorRpcUtils.BlockingRpcCallback<VisibilityLabelsResponse> rpcCallback =
-              new CoprocessorRpcUtils.BlockingRpcCallback<VisibilityLabelsResponse>();
+              new CoprocessorRpcUtils.BlockingRpcCallback<>();
 
           public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException {
             SetAuthsRequest.Builder setAuthReqBuilder = SetAuthsRequest.newBuilder();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index 24302be..38ae04a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -191,8 +191,7 @@ public final class ProtobufUtil {
   /**
    * Primitive type to class mapping.
    */
-  private final static Map<String, Class<?>>
-    PRIMITIVES = new HashMap<String, Class<?>>();
+  private final static Map<String, Class<?>> PRIMITIVES = new HashMap<>();
 
   /**
    * Many results are simple: no cell, exists true or false. To save on object creations,
@@ -1491,7 +1490,7 @@ public final class ProtobufUtil {
       return proto.getStale() ? EMPTY_RESULT_STALE : EMPTY_RESULT;
     }
 
-    List<Cell> cells = new ArrayList<Cell>(values.size());
+    List<Cell> cells = new ArrayList<>(values.size());
     for (CellProtos.Cell c : values) {
       cells.add(toCell(c));
     }
@@ -1525,7 +1524,7 @@ public final class ProtobufUtil {
     List<Cell> cells = null;
     if (proto.hasAssociatedCellCount()) {
       int count = proto.getAssociatedCellCount();
-      cells = new ArrayList<Cell>(count + values.size());
+      cells = new ArrayList<>(count + values.size());
       for (int i = 0; i < count; i++) {
         if (!scanner.advance()) throw new IOException("Failed get " + i + " of " + count);
         cells.add(scanner.current());
@@ -1533,7 +1532,7 @@ public final class ProtobufUtil {
     }
 
     if (!values.isEmpty()){
-      if (cells == null) cells = new ArrayList<Cell>(values.size());
+      if (cells == null) cells = new ArrayList<>(values.size());
       for (CellProtos.Cell c: values) {
         cells.add(toCell(c));
       }
@@ -1903,7 +1902,7 @@ public final class ProtobufUtil {
    */
   static List<HRegionInfo> getRegionInfos(final GetOnlineRegionResponse proto) {
     if (proto == null) return null;
-    List<HRegionInfo> regionInfos = new ArrayList<HRegionInfo>(proto.getRegionInfoList().size());
+    List<HRegionInfo> regionInfos = new ArrayList<>(proto.getRegionInfoList().size());
     for (RegionInfo regionInfo: proto.getRegionInfoList()) {
       regionInfos.add(HRegionInfo.convert(regionInfo));
     }
@@ -2719,7 +2718,7 @@ public final class ProtobufUtil {
 
   public static List<ReplicationLoadSource> toReplicationLoadSourceList(
       List<ClusterStatusProtos.ReplicationLoadSource> clsList) {
-    ArrayList<ReplicationLoadSource> rlsList = new ArrayList<ReplicationLoadSource>(clsList.size());
+    ArrayList<ReplicationLoadSource> rlsList = new ArrayList<>(clsList.size());
     for (ClusterStatusProtos.ReplicationLoadSource cls : clsList) {
       rlsList.add(toReplicationLoadSource(cls));
     }
@@ -2976,26 +2975,26 @@ public final class ProtobufUtil {
   public static ClusterStatus convert(ClusterStatusProtos.ClusterStatus proto) {
 
     Map<ServerName, ServerLoad> servers = null;
-    servers = new HashMap<ServerName, ServerLoad>(proto.getLiveServersList().size());
+    servers = new HashMap<>(proto.getLiveServersList().size());
     for (LiveServerInfo lsi : proto.getLiveServersList()) {
       servers.put(ProtobufUtil.toServerName(
           lsi.getServer()), new ServerLoad(lsi.getServerLoad()));
     }
 
     Collection<ServerName> deadServers = null;
-    deadServers = new ArrayList<ServerName>(proto.getDeadServersList().size());
+    deadServers = new ArrayList<>(proto.getDeadServersList().size());
     for (HBaseProtos.ServerName sn : proto.getDeadServersList()) {
       deadServers.add(ProtobufUtil.toServerName(sn));
     }
 
     Collection<ServerName> backupMasters = null;
-    backupMasters = new ArrayList<ServerName>(proto.getBackupMastersList().size());
+    backupMasters = new ArrayList<>(proto.getBackupMastersList().size());
     for (HBaseProtos.ServerName sn : proto.getBackupMastersList()) {
       backupMasters.add(ProtobufUtil.toServerName(sn));
     }
 
     Set<RegionState> rit = null;
-    rit = new HashSet<RegionState>(proto.getRegionsInTransitionList().size());
+    rit = new HashSet<>(proto.getRegionsInTransitionList().size());
     for (RegionInTransition region : proto.getRegionsInTransitionList()) {
       RegionState value = RegionState.convert(region.getRegionState());
       rit.add(value);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
index d3ef7b8..998b3c0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
@@ -572,8 +572,7 @@ public final class RequestConverter {
             .setService(userToken.getService().toString()).build();
     }
 
-    List<ClientProtos.BulkLoadHFileRequest.FamilyPath> protoFamilyPaths =
-        new ArrayList<ClientProtos.BulkLoadHFileRequest.FamilyPath>(familyPaths.size());
+    List<ClientProtos.BulkLoadHFileRequest.FamilyPath> protoFamilyPaths = new ArrayList<>(familyPaths.size());
     if (!familyPaths.isEmpty()) {
       ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder pathBuilder
         = ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java
index cbcad80..ecadbbc 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java
@@ -233,7 +233,7 @@ public final class ResponseConverter {
   public static List<RegionOpeningState> getRegionOpeningStateList(
       final OpenRegionResponse proto) {
     if (proto == null) return null;
-    List<RegionOpeningState> regionOpeningStates = new ArrayList<RegionOpeningState>(proto.getOpeningStateCount());
+    List<RegionOpeningState> regionOpeningStates = new ArrayList<>(proto.getOpeningStateCount());
     for (int i = 0; i < proto.getOpeningStateCount(); i++) {
       regionOpeningStates.add(RegionOpeningState.valueOf(
           proto.getOpeningState(i).name()));
@@ -394,7 +394,7 @@ public final class ResponseConverter {
         boolean isPartial =
             response.getPartialFlagPerResultCount() > i ?
                 response.getPartialFlagPerResult(i) : false;
-        List<Cell> cells = new ArrayList<Cell>(noOfCells);
+        List<Cell> cells = new ArrayList<>(noOfCells);
         for (int j = 0; j < noOfCells; j++) {
           try {
             if (cellScanner.advance() == false) {
@@ -426,7 +426,7 @@ public final class ResponseConverter {
   }
 
   public static Map<String, Long> getScanMetrics(ScanResponse response) {
-    Map<String, Long> metricMap = new HashMap<String, Long>();
+    Map<String, Long> metricMap = new HashMap<>();
     if (response == null || !response.hasScanMetrics() || response.getScanMetrics() == null) {
       return metricMap;
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
index b683fcc..2131db3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
@@ -57,7 +57,7 @@ public class PoolMap<K, V> implements Map<K, V> {
 
   private int poolMaxSize;
 
-  private Map<K, Pool<V>> pools = new ConcurrentHashMap<K, Pool<V>>();
+  private Map<K, Pool<V>> pools = new ConcurrentHashMap<>();
 
   public PoolMap(PoolType poolType) {
     this.poolType = poolType;
@@ -107,7 +107,7 @@ public class PoolMap<K, V> implements Map<K, V> {
 
   @Override
   public Collection<V> values() {
-    Collection<V> values = new ArrayList<V>();
+    Collection<V> values = new ArrayList<>();
     for (Pool<V> pool : pools.values()) {
       Collection<V> poolValues = pool.values();
       if (poolValues != null) {
@@ -118,7 +118,7 @@ public class PoolMap<K, V> implements Map<K, V> {
   }
 
   public Collection<V> values(K key) {
-    Collection<V> values = new ArrayList<V>();
+    Collection<V> values = new ArrayList<>();
     Pool<V> pool = pools.get(key);
     if (pool != null) {
       Collection<V> poolValues = pool.values();
@@ -185,7 +185,7 @@ public class PoolMap<K, V> implements Map<K, V> {
 
   @Override
   public Set<Map.Entry<K, V>> entrySet() {
-    Set<Map.Entry<K, V>> entries = new HashSet<Entry<K, V>>();
+    Set<Map.Entry<K, V>> entries = new HashSet<>();
     for (Map.Entry<K, Pool<V>> poolEntry : pools.entrySet()) {
       final K poolKey = poolEntry.getKey();
       final Pool<V> pool = poolEntry.getValue();
@@ -271,11 +271,11 @@ public class PoolMap<K, V> implements Map<K, V> {
   protected Pool<V> createPool() {
     switch (poolType) {
     case Reusable:
-      return new ReusablePool<V>(poolMaxSize);
+      return new ReusablePool<>(poolMaxSize);
     case RoundRobin:
-      return new RoundRobinPool<V>(poolMaxSize);
+      return new RoundRobinPool<>(poolMaxSize);
     case ThreadLocal:
-      return new ThreadLocalPool<V>();
+      return new ThreadLocalPool<>();
     }
     return null;
   }
@@ -389,7 +389,7 @@ public class PoolMap<K, V> implements Map<K, V> {
    *          the type of the resource
    */
   static class ThreadLocalPool<R> extends ThreadLocal<R> implements Pool<R> {
-    private static final Map<ThreadLocalPool<?>, AtomicInteger> poolSizes = new HashMap<ThreadLocalPool<?>, AtomicInteger>();
+    private static final Map<ThreadLocalPool<?>, AtomicInteger> poolSizes = new HashMap<>();
 
     public ThreadLocalPool() {
     }
@@ -441,7 +441,7 @@ public class PoolMap<K, V> implements Map<K, V> {
 
     @Override
     public Collection<R> values() {
-      List<R> values = new ArrayList<R>();
+      List<R> values = new ArrayList<>();
       values.add(get());
       return values;
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/util/Writables.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/Writables.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/Writables.java
index 940d523..abe3079 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/Writables.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/Writables.java
@@ -68,7 +68,7 @@ public class Writables {
    * @throws IOException e
    */
   public static byte [] getBytes(final Writable... ws) throws IOException {
-    List<byte []> bytes = new ArrayList<byte []>(ws.length);
+    List<byte []> bytes = new ArrayList<>(ws.length);
     int size = 0;
     for (Writable w: ws) {
       byte [] b = getBytes(w);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
index 6127997..0090b6f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
@@ -106,7 +106,7 @@ public class HQuorumPeer {
         conf.get("hbase.zookeeper.dns.interface","default"),
         conf.get("hbase.zookeeper.dns.nameserver","default")));
 
-    List<String> ips = new ArrayList<String>();
+    List<String> ips = new ArrayList<>();
 
     // Add what could be the best (configured) match
     ips.add(myAddress.contains(".") ?

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/InstancePending.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/InstancePending.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/InstancePending.java
index 7458ac7..e63bfc5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/InstancePending.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/InstancePending.java
@@ -74,7 +74,7 @@ class InstancePending<T> {
    */
   void prepare(T instance) {
     assert instance != null;
-    instanceHolder = new InstanceHolder<T>(instance);
+    instanceHolder = new InstanceHolder<>(instance);
     pendingLatch.countDown();
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
index e8431a2..afab54a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java
@@ -106,8 +106,8 @@ public class MetaTableLocator {
   public List<Pair<HRegionInfo, ServerName>> getMetaRegionsAndLocations(ZooKeeperWatcher zkw,
       int replicaId) {
     ServerName serverName = getMetaRegionLocation(zkw, replicaId);
-    List<Pair<HRegionInfo, ServerName>> list = new ArrayList<Pair<HRegionInfo, ServerName>>(1);
-    list.add(new Pair<HRegionInfo, ServerName>(RegionReplicaUtil.getRegionInfoForReplica(
+    List<Pair<HRegionInfo, ServerName>> list = new ArrayList<>(1);
+    list.add(new Pair<>(RegionReplicaUtil.getRegionInfoForReplica(
         HRegionInfo.FIRST_META_REGIONINFO, replicaId), serverName));
     return list;
   }
@@ -135,7 +135,7 @@ public class MetaTableLocator {
   private List<HRegionInfo> getListOfHRegionInfos(
       final List<Pair<HRegionInfo, ServerName>> pairs) {
     if (pairs == null || pairs.isEmpty()) return null;
-    List<HRegionInfo> result = new ArrayList<HRegionInfo>(pairs.size());
+    List<HRegionInfo> result = new ArrayList<>(pairs.size());
     for (Pair<HRegionInfo, ServerName> pair: pairs) {
       result.add(pair.getFirst());
     }
@@ -550,7 +550,7 @@ public class MetaTableLocator {
           throws InterruptedException {
     int numReplicasConfigured = 1;
 
-    List<ServerName> servers = new ArrayList<ServerName>();
+    List<ServerName> servers = new ArrayList<>();
     // Make the blocking call first so that we do the wait to know
     // the znodes are all in place or timeout.
     ServerName server = blockUntilAvailable(zkw, timeout);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/PendingWatcher.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/PendingWatcher.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/PendingWatcher.java
index 11d0e5d..da7d176 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/PendingWatcher.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/PendingWatcher.java
@@ -33,7 +33,7 @@ import org.apache.zookeeper.Watcher;
  * and then call the method {@code PendingWatcher.prepare}.
  */
 class PendingWatcher implements Watcher {
-  private final InstancePending<Watcher> pending = new InstancePending<Watcher>();
+  private final InstancePending<Watcher> pending = new InstancePending<>();
 
   @Override
   public void process(WatchedEvent event) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
index 14532cf..43a5ad9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
@@ -637,7 +637,7 @@ public class RecoverableZooKeeper {
   throws UnsupportedOperationException {
     if(ops == null) return null;
 
-    List<Op> preparedOps = new LinkedList<Op>();
+    List<Op> preparedOps = new LinkedList<>();
     for (Op op : ops) {
       if (op.getType() == ZooDefs.OpCode.create) {
         CreateRequest create = (CreateRequest)op.toRequestRecord();
@@ -777,7 +777,7 @@ public class RecoverableZooKeeper {
    */
   private static List<String> filterByPrefix(List<String> nodes,
       String... prefixes) {
-    List<String> lockChildren = new ArrayList<String>();
+    List<String> lockChildren = new ArrayList<>();
     for (String child : nodes){
       for (String prefix : prefixes){
         if (child.startsWith(prefix)){

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
index 3e00e04..c678a7c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
@@ -235,8 +235,7 @@ public class ZKUtil {
     private static final String CLIENT_KEYTAB_KERBEROS_CONFIG_NAME =
       "zookeeper-client-keytab-kerberos";
 
-    private static final Map<String, String> BASIC_JAAS_OPTIONS =
-      new HashMap<String,String>();
+    private static final Map<String, String> BASIC_JAAS_OPTIONS = new HashMap<>();
     static {
       String jaasEnvVar = System.getenv("HBASE_JAAS_DEBUG");
       if (jaasEnvVar != null && "true".equalsIgnoreCase(jaasEnvVar)) {
@@ -244,8 +243,7 @@ public class ZKUtil {
       }
     }
 
-    private static final Map<String,String> KEYTAB_KERBEROS_OPTIONS =
-      new HashMap<String,String>();
+    private static final Map<String,String> KEYTAB_KERBEROS_OPTIONS = new HashMap<>();
     static {
       KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true");
       KEYTAB_KERBEROS_OPTIONS.put("storeKey", "true");
@@ -746,7 +744,7 @@ public class ZKUtil {
     List<String> nodes =
       ZKUtil.listChildrenAndWatchForNewChildren(zkw, baseNode);
     if (nodes != null) {
-      List<NodeAndData> newNodes = new ArrayList<NodeAndData>();
+      List<NodeAndData> newNodes = new ArrayList<>();
       for (String node : nodes) {
         String nodePath = ZKUtil.joinZNode(baseNode, node);
         byte[] data = ZKUtil.getDataAndWatch(zkw, nodePath);
@@ -905,11 +903,11 @@ public class ZKUtil {
       return Ids.OPEN_ACL_UNSAFE;
     }
     if (isSecureZooKeeper) {
-      ArrayList<ACL> acls = new ArrayList<ACL>();
+      ArrayList<ACL> acls = new ArrayList<>();
       // add permission to hbase supper user
       String[] superUsers = zkw.getConfiguration().getStrings(Superusers.SUPERUSER_CONF_KEY);
       if (superUsers != null) {
-        List<String> groups = new ArrayList<String>();
+        List<String> groups = new ArrayList<>();
         for (String user : superUsers) {
           if (AuthUtil.isGroupPrincipal(user)) {
             // TODO: Set node ACL for groups when ZK supports this feature
@@ -1327,7 +1325,7 @@ public class ZKUtil {
       LOG.warn("Given path is not valid!");
       return;
     }
-    List<ZKUtilOp> ops = new ArrayList<ZKUtil.ZKUtilOp>();
+    List<ZKUtilOp> ops = new ArrayList<>();
     for (String eachRoot : pathRoots) {
       List<String> children = listChildrenBFSNoWatch(zkw, eachRoot);
       // Delete the leaves first and eventually get rid of the root
@@ -1377,7 +1375,7 @@ public class ZKUtil {
       LOG.warn("Given path is not valid!");
       return;
     }
-    List<ZKUtilOp> ops = new ArrayList<ZKUtil.ZKUtilOp>();
+    List<ZKUtilOp> ops = new ArrayList<>();
     for (String eachRoot : pathRoots) {
       // ZooKeeper Watches are one time triggers; When children of parent nodes are deleted
       // recursively, must set another watch, get notified of delete node
@@ -1415,8 +1413,8 @@ public class ZKUtil {
    */
   private static List<String> listChildrenBFSNoWatch(ZooKeeperWatcher zkw,
       final String znode) throws KeeperException {
-    Deque<String> queue = new LinkedList<String>();
-    List<String> tree = new ArrayList<String>();
+    Deque<String> queue = new LinkedList<>();
+    List<String> tree = new ArrayList<>();
     queue.add(znode);
     while (true) {
       String node = queue.pollFirst();
@@ -1451,8 +1449,8 @@ public class ZKUtil {
    */
   private static List<String> listChildrenBFSAndWatchThem(ZooKeeperWatcher zkw, final String znode)
       throws KeeperException {
-    Deque<String> queue = new LinkedList<String>();
-    List<String> tree = new ArrayList<String>();
+    Deque<String> queue = new LinkedList<>();
+    List<String> tree = new ArrayList<>();
     queue.add(znode);
     while (true) {
       String node = queue.pollFirst();
@@ -1648,7 +1646,7 @@ public class ZKUtil {
     }
     if (ops == null) return;
 
-    List<Op> zkOps = new LinkedList<Op>();
+    List<Op> zkOps = new LinkedList<>();
     for (ZKUtilOp op : ops) {
       zkOps.add(toZooKeeperOp(zkw, op));
     }
@@ -1816,7 +1814,7 @@ public class ZKUtil {
 
   private static void appendRSZnodes(ZooKeeperWatcher zkw, String znode, StringBuilder sb)
       throws KeeperException {
-    List<String> stack = new LinkedList<String>();
+    List<String> stack = new LinkedList<>();
     stack.add(znode);
     do {
       String znodeToProcess = stack.remove(stack.size() - 1);
@@ -1927,7 +1925,7 @@ public class ZKUtil {
         socket.getInputStream()));
       out.println("stat");
       out.flush();
-      ArrayList<String> res = new ArrayList<String>();
+      ArrayList<String> res = new ArrayList<>();
       while (true) {
         String line = in.readLine();
         if (line != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
index c8462fb..f18b8ba 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
@@ -83,8 +83,7 @@ public class ZooKeeperWatcher implements Watcher, Abortable, Closeable {
   public final ZNodePaths znodePaths;
 
   // listeners to be notified
-  private final List<ZooKeeperListener> listeners =
-    new CopyOnWriteArrayList<ZooKeeperListener>();
+  private final List<ZooKeeperListener> listeners = new CopyOnWriteArrayList<>();
 
   // Used by ZKUtil:waitForZKConnectionIfAuthenticating to wait for SASL
   // negotiation to complete
@@ -374,7 +373,7 @@ public class ZooKeeperWatcher implements Watcher, Abortable, Closeable {
    */
   public List<String> getMetaReplicaNodes() throws KeeperException {
     List<String> childrenOfBaseNode = ZKUtil.listChildrenNoWatch(this, znodePaths.baseZNode);
-    List<String> metaReplicaNodes = new ArrayList<String>(2);
+    List<String> metaReplicaNodes = new ArrayList<>(2);
     if (childrenOfBaseNode != null) {
       String pattern = conf.get("zookeeper.znode.metaserver","meta-region-server");
       for (String child : childrenOfBaseNode) {
@@ -416,7 +415,7 @@ public class ZooKeeperWatcher implements Watcher, Abortable, Closeable {
    * Get a copy of current registered listeners
    */
   public List<ZooKeeperListener> getListeners() {
-    return new ArrayList<ZooKeeperListener>(listeners);
+    return new ArrayList<>(listeners);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
index d0b6317..9acbb43 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
@@ -342,7 +342,7 @@ public class TestInterfaceAudienceAnnotations {
   @Test
   public void testProtosInReturnTypes() throws ClassNotFoundException, IOException, LinkageError {
     Set<Class<?>> classes = findPublicClasses();
-    List<Pair<Class<?>, Method>> protosReturnType = new ArrayList<Pair<Class<?>, Method>>();
+    List<Pair<Class<?>, Method>> protosReturnType = new ArrayList<>();
     for (Class<?> clazz : classes) {
       findProtoInReturnType(clazz, protosReturnType);
     }
@@ -374,8 +374,7 @@ public class TestInterfaceAudienceAnnotations {
   @Test
   public void testProtosInParamTypes() throws ClassNotFoundException, IOException, LinkageError {
     Set<Class<?>> classes = findPublicClasses();
-    List<Triple<Class<?>, Method, Class<?>>> protosParamType =
-        new ArrayList<Triple<Class<?>, Method, Class<?>>>();
+    List<Triple<Class<?>, Method, Class<?>>> protosParamType = new ArrayList<>();
     for (Class<?> clazz : classes) {
       findProtoInParamType(clazz, protosParamType);
     }
@@ -395,7 +394,7 @@ public class TestInterfaceAudienceAnnotations {
   @Test
   public void testProtosInConstructors() throws ClassNotFoundException, IOException, LinkageError {
     Set<Class<?>> classes = findPublicClasses();
-    List<Class<?>> classList = new ArrayList<Class<?>>();
+    List<Class<?>> classList = new ArrayList<>();
     for (Class<?> clazz : classes) {
       Constructor<?>[] constructors = clazz.getConstructors();
       for (Constructor<?> cons : constructors) {
@@ -424,7 +423,7 @@ public class TestInterfaceAudienceAnnotations {
 
   private void findProtoInReturnType(Class<?> clazz,
       List<Pair<Class<?>, Method>> protosReturnType) {
-    Pair<Class<?>, Method> returnTypePair = new Pair<Class<?>, Method>();
+    Pair<Class<?>, Method> returnTypePair = new Pair<>();
     Method[] methods = clazz.getMethods();
     returnTypePair.setFirst(clazz);
     for (Method method : methods) {
@@ -443,7 +442,7 @@ public class TestInterfaceAudienceAnnotations {
 
   private void findProtoInParamType(Class<?> clazz,
       List<Triple<Class<?>, Method, Class<?>>> protosParamType) {
-    Triple<Class<?>, Method, Class<?>> paramType = new Triple<Class<?>, Method, Class<?>>();
+    Triple<Class<?>, Method, Class<?>> paramType = new Triple<>();
     Method[] methods = clazz.getMethods();
     paramType.setFirst(clazz);
     for (Method method : methods) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
index 75199a6..f2f0467 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
@@ -147,7 +147,7 @@ public class TestAsyncProcess {
   static class MyAsyncProcess extends AsyncProcess {
     final AtomicInteger nbMultiResponse = new AtomicInteger();
     final AtomicInteger nbActions = new AtomicInteger();
-    public List<AsyncRequestFuture> allReqs = new ArrayList<AsyncRequestFuture>();
+    public List<AsyncRequestFuture> allReqs = new ArrayList<>();
     public AtomicInteger callsCt = new AtomicInteger();
 
     private long previousTimeout = -1;
@@ -162,7 +162,7 @@ public class TestAsyncProcess {
           return DUMMY_TABLE;
         }
       };
-      AsyncRequestFutureImpl<Res> r = new MyAsyncRequestFutureImpl<Res>(
+      AsyncRequestFutureImpl<Res> r = new MyAsyncRequestFutureImpl<>(
           wrap, actions, nonceGroup, this);
       allReqs.add(r);
       return r;
@@ -326,9 +326,9 @@ public class TestAsyncProcess {
     }
   }
   class MyAsyncProcessWithReplicas extends MyAsyncProcess {
-    private Set<byte[]> failures = new TreeSet<byte[]>(new Bytes.ByteArrayComparator());
+    private Set<byte[]> failures = new TreeSet<>(new Bytes.ByteArrayComparator());
     private long primarySleepMs = 0, replicaSleepMs = 0;
-    private Map<ServerName, Long> customPrimarySleepMs = new HashMap<ServerName, Long>();
+    private Map<ServerName, Long> customPrimarySleepMs = new HashMap<>();
     private final AtomicLong replicaCalls = new AtomicLong(0);
 
     public void addFailures(HRegionInfo... hris) {
@@ -683,7 +683,7 @@ public class TestAsyncProcess {
     ClusterConnection hc = createHConnection();
     MyAsyncProcess ap = new MyAsyncProcess(hc, CONF);
 
-    List<Put> puts = new ArrayList<Put>(1);
+    List<Put> puts = new ArrayList<>(1);
     puts.add(createPut(1, true));
 
     ap.submit(null, DUMMY_TABLE, puts, false, null, false);
@@ -702,7 +702,7 @@ public class TestAsyncProcess {
     };
     MyAsyncProcess ap = new MyAsyncProcess(hc, CONF);
 
-    List<Put> puts = new ArrayList<Put>(1);
+    List<Put> puts = new ArrayList<>(1);
     puts.add(createPut(1, true));
 
     final AsyncRequestFuture ars = ap.submit(null, DUMMY_TABLE, puts, false, cb, false);
@@ -719,7 +719,7 @@ public class TestAsyncProcess {
       SimpleRequestController.class.getName());
     MyAsyncProcess ap = new MyAsyncProcess(conn, CONF);
     SimpleRequestController controller = (SimpleRequestController) ap.requestController;
-    List<Put> puts = new ArrayList<Put>(1);
+    List<Put> puts = new ArrayList<>(1);
     puts.add(createPut(1, true));
 
     for (int i = 0; i != controller.maxConcurrentTasksPerRegion; ++i) {
@@ -748,7 +748,7 @@ public class TestAsyncProcess {
     SimpleRequestController controller = (SimpleRequestController) ap.requestController;
     controller.taskCounterPerServer.put(sn2, new AtomicInteger(controller.maxConcurrentTasksPerServer));
 
-    List<Put> puts = new ArrayList<Put>(4);
+    List<Put> puts = new ArrayList<>(4);
     puts.add(createPut(1, true));
     puts.add(createPut(3, true)); // <== this one won't be taken, the rs is busy
     puts.add(createPut(1, true)); // <== this one will make it, the region is already in
@@ -770,7 +770,7 @@ public class TestAsyncProcess {
   public void testFail() throws Exception {
     MyAsyncProcess ap = new MyAsyncProcess(createHConnection(), CONF, false);
 
-    List<Put> puts = new ArrayList<Put>(1);
+    List<Put> puts = new ArrayList<>(1);
     Put p = createPut(1, false);
     puts.add(p);
 
@@ -818,7 +818,7 @@ public class TestAsyncProcess {
       }
     };
 
-    List<Put> puts = new ArrayList<Put>(1);
+    List<Put> puts = new ArrayList<>(1);
     Put p = createPut(1, true);
     puts.add(p);
 
@@ -844,7 +844,7 @@ public class TestAsyncProcess {
   public void testFailAndSuccess() throws Exception {
     MyAsyncProcess ap = new MyAsyncProcess(createHConnection(), CONF, false);
 
-    List<Put> puts = new ArrayList<Put>(3);
+    List<Put> puts = new ArrayList<>(3);
     puts.add(createPut(1, false));
     puts.add(createPut(1, true));
     puts.add(createPut(1, true));
@@ -871,7 +871,7 @@ public class TestAsyncProcess {
   public void testFlush() throws Exception {
     MyAsyncProcess ap = new MyAsyncProcess(createHConnection(), CONF, false);
 
-    List<Put> puts = new ArrayList<Put>(3);
+    List<Put> puts = new ArrayList<>(3);
     puts.add(createPut(1, false));
     puts.add(createPut(1, true));
     puts.add(createPut(1, true));
@@ -956,7 +956,7 @@ public class TestAsyncProcess {
       }
     };
 
-    List<Put> puts = new ArrayList<Put>(1);
+    List<Put> puts = new ArrayList<>(1);
     puts.add(createPut(1, true));
 
     t.start();
@@ -981,7 +981,7 @@ public class TestAsyncProcess {
     t2.start();
 
     long start = System.currentTimeMillis();
-    ap.submit(null, DUMMY_TABLE, new ArrayList<Row>(), false, null, false);
+    ap.submit(null, DUMMY_TABLE, new ArrayList<>(), false, null, false);
     long end = System.currentTimeMillis();
 
     //Adds 100 to secure us against approximate timing.
@@ -1008,7 +1008,7 @@ public class TestAsyncProcess {
     setMockLocation(hc, DUMMY_BYTES_1, hrls1);
     setMockLocation(hc, DUMMY_BYTES_2, hrls2);
     setMockLocation(hc, DUMMY_BYTES_3, hrls3);
-    List<HRegionLocation> locations = new ArrayList<HRegionLocation>();
+    List<HRegionLocation> locations = new ArrayList<>();
     for (HRegionLocation loc : hrls1.getRegionLocations()) {
       locations.add(loc);
     }
@@ -1172,7 +1172,7 @@ public class TestAsyncProcess {
     HTable ht = new HTable(conn, mutator);
     ht.multiAp = new MyAsyncProcess(conn, CONF, false);
 
-    List<Put> puts = new ArrayList<Put>(7);
+    List<Put> puts = new ArrayList<>(7);
     puts.add(createPut(1, true));
     puts.add(createPut(1, true));
     puts.add(createPut(1, true));
@@ -1309,8 +1309,8 @@ public class TestAsyncProcess {
   @Test
   public void testThreadCreation() throws Exception {
     final int NB_REGS = 100;
-    List<HRegionLocation> hrls = new ArrayList<HRegionLocation>(NB_REGS);
-    List<Get> gets = new ArrayList<Get>(NB_REGS);
+    List<HRegionLocation> hrls = new ArrayList<>(NB_REGS);
+    List<Get> gets = new ArrayList<>(NB_REGS);
     for (int i = 0; i < NB_REGS; i++) {
       HRegionInfo hri = new HRegionInfo(
           DUMMY_TABLE, Bytes.toBytes(i * 10L), Bytes.toBytes(i * 10L + 9L), false, i);
@@ -1518,7 +1518,7 @@ public class TestAsyncProcess {
   }
 
   private static List<Get> makeTimelineGets(byte[]... rows) {
-    List<Get> result = new ArrayList<Get>(rows.length);
+    List<Get> result = new ArrayList<>(rows.length);
     for (byte[] row : rows) {
       Get get = new Get(row);
       get.setConsistency(Consistency.TIMELINE);
@@ -1609,10 +1609,10 @@ public class TestAsyncProcess {
     ClusterConnection hc = createHConnection();
     MyThreadPoolExecutor myPool =
         new MyThreadPoolExecutor(1, 20, 60, TimeUnit.SECONDS,
-            new LinkedBlockingQueue<Runnable>(200));
+            new LinkedBlockingQueue<>(200));
     AsyncProcess ap = new AsyncProcessForThrowableCheck(hc, CONF);
 
-    List<Put> puts = new ArrayList<Put>(1);
+    List<Put> puts = new ArrayList<>(1);
     puts.add(createPut(1, true));
     AsyncProcessTask task = AsyncProcessTask.newBuilder()
             .setPool(myPool)

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
index a4be9a2..d20c7c8 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
@@ -368,8 +368,7 @@ public class TestClientNoCluster extends Configured implements Tool {
     throws IOException {
       super(conf, pool, user);
       int serverCount = conf.getInt("hbase.test.servers", 10);
-      this.serversByClient =
-        new HashMap<ServerName, ClientService.BlockingInterface>(serverCount);
+      this.serversByClient = new HashMap<>(serverCount);
       this.meta = makeMeta(Bytes.toBytes(
         conf.get("hbase.test.tablename", Bytes.toString(BIG_USER_TABLE))),
         conf.getInt("hbase.test.regions", 100),
@@ -694,14 +693,13 @@ public class TestClientNoCluster extends Configured implements Tool {
       final int regionCount, final long namespaceSpan, final int serverCount) {
     // I need a comparator for meta rows so we sort properly.
     SortedMap<byte [], Pair<HRegionInfo, ServerName>> meta =
-      new ConcurrentSkipListMap<byte[], Pair<HRegionInfo,ServerName>>(new MetaRowsComparator());
+      new ConcurrentSkipListMap<>(new MetaRowsComparator());
     HRegionInfo [] hris = makeHRegionInfos(tableName, regionCount, namespaceSpan);
     ServerName [] serverNames = makeServerNames(serverCount);
     int per = regionCount / serverCount;
     int count = 0;
     for (HRegionInfo hri: hris) {
-      Pair<HRegionInfo, ServerName> p =
-        new Pair<HRegionInfo, ServerName>(hri, serverNames[count++ / per]);
+      Pair<HRegionInfo, ServerName> p = new Pair<>(hri, serverNames[count++ / per]);
       meta.put(hri.getRegionName(), p);
     }
     return meta;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
index e93319a..f22e5d4 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
@@ -48,7 +48,7 @@ public class TestKeyOnlyFilter {
 
   @Parameters
   public static Collection<Object[]> parameters() {
-    List<Object[]> paramList = new ArrayList<Object[]>(2);
+    List<Object[]> paramList = new ArrayList<>(2);
     {
       paramList.add(new Object[] { false });
       paramList.add(new Object[] { true });

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java
index 0659f30..0ec78ad 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java
@@ -40,7 +40,7 @@ public class TestHBaseRpcControllerImpl {
   @Test
   public void testListOfCellScannerables() throws IOException {
     final int count = 10;
-    List<CellScannable> cells = new ArrayList<CellScannable>(count);
+    List<CellScannable> cells = new ArrayList<>(count);
 
     for (int i = 0; i < count; i++) {
       cells.add(createCell(i));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/test/java/org/apache/hadoop/hbase/util/BuilderStyleTest.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/util/BuilderStyleTest.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/BuilderStyleTest.java
index d2d0a53..771cf52 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/util/BuilderStyleTest.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/BuilderStyleTest.java
@@ -80,7 +80,7 @@ public class BuilderStyleTest {
           }
           Set<Method> sigMethods = methodsBySignature.get(sig);
           if (sigMethods == null) {
-            sigMethods = new HashSet<Method>();
+            sigMethods = new HashSet<>();
             methodsBySignature.put(sig, sigMethods);
           }
           sigMethods.add(method);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestInstancePending.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestInstancePending.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestInstancePending.java
index 667fed8..e67c9fd 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestInstancePending.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestInstancePending.java
@@ -29,8 +29,8 @@ import org.junit.experimental.categories.Category;
 public class TestInstancePending {
   @Test(timeout = 1000)
   public void test() throws Exception {
-    final InstancePending<String> pending = new InstancePending<String>();
-    final AtomicReference<String> getResultRef = new AtomicReference<String>();
+    final InstancePending<String> pending = new InstancePending<>();
+    final AtomicReference<String> getResultRef = new AtomicReference<>();
 
     new Thread() {
       @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index 28c1d88..5930928 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -1597,7 +1597,7 @@ public final class CellUtil {
    * @return Tags in the given Cell as a List
    */
   public static List<Tag> getTags(Cell cell) {
-    List<Tag> tags = new ArrayList<Tag>();
+    List<Tag> tags = new ArrayList<>();
     Iterator<Tag> tagsItr = tagsIterator(cell);
     while (tagsItr.hasNext()) {
       tags.add(tagsItr.next());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java
index 99dc163..d4ec48e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java
@@ -134,8 +134,8 @@ public class ChoreService implements ChoreServicer {
     }
 
     scheduler.setRemoveOnCancelPolicy(true);
-    scheduledChores = new HashMap<ScheduledChore, ScheduledFuture<?>>();
-    choresMissingStartTime = new HashMap<ScheduledChore, Boolean>();
+    scheduledChores = new HashMap<>();
+    choresMissingStartTime = new HashMap<>();
   }
 
   /**
@@ -348,7 +348,7 @@ public class ChoreService implements ChoreServicer {
   }
 
   private void cancelAllChores(final boolean mayInterruptIfRunning) {
-    ArrayList<ScheduledChore> choresToCancel = new ArrayList<ScheduledChore>(scheduledChores.keySet().size());
+    ArrayList<ScheduledChore> choresToCancel = new ArrayList<>(scheduledChores.keySet().size());
     // Build list of chores to cancel so we can iterate through a set that won't change
     // as chores are cancelled. If we tried to cancel each chore while iterating through
     // keySet the results would be undefined because the keySet would be changing
@@ -365,7 +365,7 @@ public class ChoreService implements ChoreServicer {
    * Prints a summary of important details about the chore. Used for debugging purposes
    */
   private void printChoreDetails(final String header, ScheduledChore chore) {
-    LinkedHashMap<String, String> output = new LinkedHashMap<String, String>();
+    LinkedHashMap<String, String> output = new LinkedHashMap<>();
     output.put(header, "");
     output.put("Chore name: ", chore.getName());
     output.put("Chore period: ", Integer.toString(chore.getPeriod()));
@@ -380,7 +380,7 @@ public class ChoreService implements ChoreServicer {
    * Prints a summary of important details about the service. Used for debugging purposes
    */
   private void printChoreServiceDetails(final String header) {
-    LinkedHashMap<String, String> output = new LinkedHashMap<String, String>();
+    LinkedHashMap<String, String> output = new LinkedHashMap<>();
     output.put(header, "");
     output.put("ChoreService corePoolSize: ", Integer.toString(getCorePoolSize()));
     output.put("ChoreService scheduledChores: ", Integer.toString(getNumberOfScheduledChores()));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java
index 0eda1e5..a7fcba6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CompoundConfiguration.java
@@ -72,8 +72,7 @@ public class CompoundConfiguration extends Configuration {
     int size();
   }
 
-  private final List<ImmutableConfigMap> configs
-    = new ArrayList<ImmutableConfigMap>();
+  private final List<ImmutableConfigMap> configs = new ArrayList<>();
 
   static class ImmutableConfWrapper implements  ImmutableConfigMap {
    private final Configuration c;
@@ -167,7 +166,7 @@ public class CompoundConfiguration extends Configuration {
 
       @Override
       public Iterator<Map.Entry<String,String>> iterator() {
-        Map<String, String> ret = new HashMap<String, String>();
+        Map<String, String> ret = new HashMap<>();
         for (Map.Entry<Bytes, Bytes> entry : map.entrySet()) {
           String key = Bytes.toString(entry.getKey().get());
           String val = entry.getValue() == null ? null : Bytes.toString(entry.getValue().get());
@@ -366,7 +365,7 @@ public class CompoundConfiguration extends Configuration {
 
   @Override
   public Iterator<Map.Entry<String, String>> iterator() {
-    Map<String, String> ret = new HashMap<String, String>();
+    Map<String, String> ret = new HashMap<>();
 
     // add in reverse order so that oldest get overridden.
     if (!configs.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 0434820..96fc30b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -81,7 +81,7 @@ import com.google.common.annotations.VisibleForTesting;
  */
 @InterfaceAudience.Private
 public class KeyValue implements ExtendedCell {
-  private static final ArrayList<Tag> EMPTY_ARRAY_LIST = new ArrayList<Tag>();
+  private static final ArrayList<Tag> EMPTY_ARRAY_LIST = new ArrayList<>();
 
   private static final Log LOG = LogFactory.getLog(KeyValue.class);
 
@@ -1174,7 +1174,7 @@ public class KeyValue implements ExtendedCell {
    * @return the Map&lt;String,?&gt; containing data from this key
    */
   public Map<String, Object> toStringMap() {
-    Map<String, Object> stringMap = new HashMap<String, Object>();
+    Map<String, Object> stringMap = new HashMap<>();
     stringMap.put("row", Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength()));
     stringMap.put("family",
       Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength()));
@@ -1184,7 +1184,7 @@ public class KeyValue implements ExtendedCell {
     stringMap.put("vlen", getValueLength());
     List<Tag> tags = getTags();
     if (tags != null) {
-      List<String> tagsString = new ArrayList<String>(tags.size());
+      List<String> tagsString = new ArrayList<>(tags.size());
       for (Tag t : tags) {
         tagsString.add(t.toString());
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
index ca990cf..807749a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
@@ -496,7 +496,7 @@ public class KeyValueUtil {
         return KeyValueUtil.ensureKeyValue(arg0);
       }
     });
-    return new ArrayList<KeyValue>(lazyList);
+    return new ArrayList<>(lazyList);
   }
   /**
    * Write out a KeyValue in the manner in which we used to when KeyValue was a

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
index e1ceace..23876ab 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
@@ -57,14 +57,14 @@ public class NamespaceDescriptor {
 
   public final static Set<String> RESERVED_NAMESPACES;
   static {
-    Set<String> set = new HashSet<String>();
+    Set<String> set = new HashSet<>();
     set.add(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME_STR);
     set.add(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR);
     RESERVED_NAMESPACES = Collections.unmodifiableSet(set);
   }
   public final static Set<byte[]> RESERVED_NAMESPACES_BYTES;
   static {
-    Set<byte[]> set = new TreeSet<byte[]>(Bytes.BYTES_RAWCOMPARATOR);
+    Set<byte[]> set = new TreeSet<>(Bytes.BYTES_RAWCOMPARATOR);
     for(String name: RESERVED_NAMESPACES) {
       set.add(Bytes.toBytes(name));
     }
@@ -165,7 +165,7 @@ public class NamespaceDescriptor {
   @InterfaceStability.Evolving
   public static class Builder {
     private String bName;
-    private Map<String, String> bConfiguration = new TreeMap<String, String>();
+    private Map<String, String> bConfiguration = new TreeMap<>();
 
     private Builder(NamespaceDescriptor ns) {
       this.bName = ns.name;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java
index 499ffd9..0c0a7ff 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java
@@ -99,7 +99,7 @@ public class ServerName implements Comparable<ServerName>, Serializable {
    * @see #getVersionedBytes()
    */
   private byte [] bytes;
-  public static final List<ServerName> EMPTY_SERVER_LIST = new ArrayList<ServerName>(0);
+  public static final List<ServerName> EMPTY_SERVER_LIST = new ArrayList<>(0);
 
   protected ServerName(final String hostname, final int port, final long startcode) {
     this(Address.fromParts(hostname, port), startcode);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
index 63066b3..9b9755b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
@@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.KeyValue.KVComparator;
 public final class TableName implements Comparable<TableName> {
 
   /** See {@link #createTableNameIfNecessary(ByteBuffer, ByteBuffer)} */
-  private static final Set<TableName> tableCache = new CopyOnWriteArraySet<TableName>();
+  private static final Set<TableName> tableCache = new CopyOnWriteArraySet<>();
 
   /** Namespace delimiter */
   //this should always be only 1 byte long

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
index f9668dd..936d8c2 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
@@ -72,7 +72,7 @@ public final class TagUtil {
    * @return List of tags
    */
   public static List<Tag> asList(byte[] b, int offset, int length) {
-    List<Tag> tags = new ArrayList<Tag>();
+    List<Tag> tags = new ArrayList<>();
     int pos = offset;
     while (pos < offset + length) {
       int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE);
@@ -91,7 +91,7 @@ public final class TagUtil {
    * @return List of tags
    */
   public static List<Tag> asList(ByteBuffer b, int offset, int length) {
-    List<Tag> tags = new ArrayList<Tag>();
+    List<Tag> tags = new ArrayList<>();
     int pos = offset;
     while (pos < offset + length) {
       int tagLen = ByteBufferUtils.readAsInt(b, pos, TAG_LENGTH_SIZE);
@@ -239,7 +239,7 @@ public final class TagUtil {
     }
     List<Tag> tags = tagsOrNull;
     if (tags == null) {
-      tags = new ArrayList<Tag>();
+      tags = new ArrayList<>();
     }
     while (itr.hasNext()) {
       tags.add(itr.next());
@@ -276,7 +276,7 @@ public final class TagUtil {
     // in the array so set its size to '1' (I saw this being done in earlier version of
     // tag-handling).
     if (tags == null) {
-      tags = new ArrayList<Tag>(1);
+      tags = new ArrayList<>(1);
     }
     tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
     return tags;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
index 939d12d..079a277 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
@@ -51,7 +51,7 @@ import com.google.common.annotations.VisibleForTesting;
 public class BoundedByteBufferPool {
   private static final Log LOG = LogFactory.getLog(BoundedByteBufferPool.class);
 
-  private final Queue<ByteBuffer> buffers = new ConcurrentLinkedQueue<ByteBuffer>();
+  private final Queue<ByteBuffer> buffers = new ConcurrentLinkedQueue<>();
 
   @VisibleForTesting
   int getQueueSize() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java
index c334a5a..9c6796e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java
@@ -44,8 +44,8 @@ public class ByteBufferListOutputStream extends ByteBufferOutputStream {
   // it is not available will make a new one our own and keep writing to that. We keep track of all
   // the BBs that we got from pool, separately so that on closeAndPutbackBuffers, we can make sure
   // to return back all of them to pool
-  protected List<ByteBuffer> allBufs = new ArrayList<ByteBuffer>();
-  protected List<ByteBuffer> bufsFromPool = new ArrayList<ByteBuffer>();
+  protected List<ByteBuffer> allBufs = new ArrayList<>();
+  protected List<ByteBuffer> bufsFromPool = new ArrayList<>();
 
   private boolean lastBufFlipped = false;// Indicate whether the curBuf/lastBuf is flipped already
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java
index 115671d..07ba3db 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java
@@ -54,7 +54,7 @@ public class ByteBufferPool {
   public static final int DEFAULT_BUFFER_SIZE = 64 * 1024;// 64 KB. Making it same as the chunk size
                                                           // what we will write/read to/from the
                                                           // socket channel.
-  private final Queue<ByteBuffer> buffers = new ConcurrentLinkedQueue<ByteBuffer>();
+  private final Queue<ByteBuffer> buffers = new ConcurrentLinkedQueue<>();
 
   private final int bufferSize;
   private final int maxPoolSize;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
index ad89ca0..b6c2e97 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
@@ -533,15 +533,14 @@ public final class Encryption {
     }
   }
 
-  static final Map<Pair<String,String>,KeyProvider> keyProviderCache =
-      new ConcurrentHashMap<Pair<String,String>,KeyProvider>();
+  static final Map<Pair<String,String>,KeyProvider> keyProviderCache = new ConcurrentHashMap<>();
 
   public static KeyProvider getKeyProvider(Configuration conf) {
     String providerClassName = conf.get(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY,
       KeyStoreKeyProvider.class.getName());
     String providerParameters = conf.get(HConstants.CRYPTO_KEYPROVIDER_PARAMETERS_KEY, "");
     try {
-      Pair<String,String> providerCacheKey = new Pair<String,String>(providerClassName,
+      Pair<String,String> providerCacheKey = new Pair<>(providerClassName,
         providerParameters);
       KeyProvider provider = keyProviderCache.get(providerCacheKey);
       if (provider != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
index cef51d8..22d7e3e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
@@ -738,7 +738,7 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
     protected  KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();
     // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too
     // many object creations.
-    protected final ObjectIntPair<ByteBuffer> tmpPair = new ObjectIntPair<ByteBuffer>();
+    protected final ObjectIntPair<ByteBuffer> tmpPair = new ObjectIntPair<>();
     protected STATE current, previous;
 
     public BufferedEncodedSeeker(CellComparator comparator,

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java
index d81bb4a..0f8ea01 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java
@@ -110,7 +110,7 @@ public class RowIndexCodecV1 extends AbstractDataBlockEncoder {
       RowIndexSeekerV1 seeker = new RowIndexSeekerV1(CellComparator.COMPARATOR,
           decodingCtx);
       seeker.setCurrentBuffer(new SingleByteBuff(sourceAsBuffer));
-      List<Cell> kvs = new ArrayList<Cell>();
+      List<Cell> kvs = new ArrayList<>();
       kvs.add(seeker.getCell());
       while (seeker.next()) {
         kvs.add(seeker.getCell());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java
index 4e14acb..6ac5645 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexSeekerV1.java
@@ -40,7 +40,7 @@ public class RowIndexSeekerV1 extends AbstractEncodedSeeker {
 
   // A temp pair object which will be reused by ByteBuff#asSubByteBuffer calls. This avoids too
   // many object creations.
-  protected final ObjectIntPair<ByteBuffer> tmpPair = new ObjectIntPair<ByteBuffer>();
+  protected final ObjectIntPair<ByteBuffer> tmpPair = new ObjectIntPair<>();
 
   private ByteBuff currentBuffer;
   private SeekerState current = new SeekerState(); // always valid

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java
index 99780ba..2456961 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java
@@ -86,7 +86,7 @@ public class LRUDictionary implements Dictionary {
     private Node head;
     private Node tail;
 
-    private HashMap<Node, Short> nodeToIndex = new HashMap<Node, Short>();
+    private HashMap<Node, Short> nodeToIndex = new HashMap<>();
     private Node[] indexToNode;
     private int initSize = 0;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java
index 0e1c3ae..9325284 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java
@@ -135,7 +135,7 @@ public class StreamUtils {
     int newOffset = offset;
     byte tmp = input[newOffset++];
     if (tmp >= 0) {
-      return new Pair<Integer, Integer>((int) tmp, newOffset - offset);
+      return new Pair<>((int) tmp, newOffset - offset);
     }
     int result = tmp & 0x7f;
     tmp = input[newOffset++];
@@ -160,7 +160,7 @@ public class StreamUtils {
             for (int i = 0; i < 5; i++) {
               tmp = input[newOffset++];
               if (tmp >= 0) {
-                return new Pair<Integer, Integer>(result, newOffset - offset);
+                return new Pair<>(result, newOffset - offset);
               }
             }
             throw new IOException("Malformed varint");
@@ -168,7 +168,7 @@ public class StreamUtils {
         }
       }
     }
-    return new Pair<Integer, Integer>(result, newOffset - offset);
+    return new Pair<>(result, newOffset - offset);
   }
 
   public static Pair<Integer, Integer> readRawVarint32(ByteBuffer input, int offset)
@@ -176,7 +176,7 @@ public class StreamUtils {
     int newOffset = offset;
     byte tmp = input.get(newOffset++);
     if (tmp >= 0) {
-      return new Pair<Integer, Integer>((int) tmp, newOffset - offset);
+      return new Pair<>((int) tmp, newOffset - offset);
     }
     int result = tmp & 0x7f;
     tmp = input.get(newOffset++);
@@ -201,7 +201,7 @@ public class StreamUtils {
             for (int i = 0; i < 5; i++) {
               tmp = input.get(newOffset++);
               if (tmp >= 0) {
-                return new Pair<Integer, Integer>(result, newOffset - offset);
+                return new Pair<>(result, newOffset - offset);
               }
             }
             throw new IOException("Malformed varint");
@@ -209,7 +209,7 @@ public class StreamUtils {
         }
       }
     }
-    return new Pair<Integer, Integer>(result, newOffset - offset);
+    return new Pair<>(result, newOffset - offset);
   }
 
   public static short toShort(byte hi, byte lo) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java
index 0e45410..9f6b7b5 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java
@@ -202,7 +202,7 @@ public class SingleByteBuff extends ByteBuff {
     } else {
       // TODO we can do some optimization here? Call to asSubByteBuffer might
       // create a copy.
-      ObjectIntPair<ByteBuffer> pair = new ObjectIntPair<ByteBuffer>();
+      ObjectIntPair<ByteBuffer> pair = new ObjectIntPair<>();
       src.asSubByteBuffer(srcOffset, length, pair);
       if (pair.getFirst() != null) {
         ByteBufferUtils.copyFromBufferToBuffer(pair.getFirst(), this.buf, pair.getSecond(), offset,

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java
index c480dad..be2a0d3 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java
@@ -354,8 +354,7 @@ public abstract class User {
   }
 
   static class TestingGroups extends Groups {
-    private final Map<String, List<String>> userToGroupsMapping =
-        new HashMap<String,List<String>>();
+    private final Map<String, List<String>> userToGroupsMapping = new HashMap<>();
     private Groups underlyingImplementation;
 
     TestingGroups(Groups underlyingImplementation) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
index 43b1c89..955abfc 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
@@ -90,7 +90,7 @@ public class UserProvider extends BaseConfigurable {
 
           private String[] getGroupStrings(String ugi) {
             try {
-              Set<String> result = new LinkedHashSet<String>(groups.getGroups(ugi));
+              Set<String> result = new LinkedHashSet<>(groups.getGroups(ugi));
               return result.toArray(new String[result.size()]);
             } catch (Exception e) {
               return new String[0];

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
index f632ae0..1b6a67d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
@@ -63,7 +63,7 @@ public class SpanReceiverHost {
   }
 
   SpanReceiverHost(Configuration conf) {
-    receivers = new HashSet<SpanReceiver>();
+    receivers = new HashSet<>();
     this.conf = conf;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java
index c9c3b64..d73a17d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
 @InterfaceStability.Evolving
 public class StructBuilder {
 
-  protected final List<DataType<?>> fields = new ArrayList<DataType<?>>();
+  protected final List<DataType<?>> fields = new ArrayList<>();
 
   /**
    * Create an empty {@code StructBuilder}.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ArrayUtils.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ArrayUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ArrayUtils.java
index 4e3374e..51b87f7 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ArrayUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ArrayUtils.java
@@ -102,7 +102,7 @@ public class ArrayUtils {
 
   public static ArrayList<Long> toList(long[] array){
     int length = length(array);
-    ArrayList<Long> list = new ArrayList<Long>(length);
+    ArrayList<Long> list = new ArrayList<>(length);
     for(int i=0; i < length; ++i){
       list.add(array[i]);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java
index 260a8b2..58c50a8 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AvlUtil.java
@@ -261,7 +261,7 @@ public final class AvlUtil {
         final AvlNodeVisitor<TNode> visitor) {
       if (root == null) return;
 
-      final AvlTreeIterator<TNode> iterator = new AvlTreeIterator<TNode>(root);
+      final AvlTreeIterator<TNode> iterator = new AvlTreeIterator<>(root);
       boolean visitNext = true;
       while (visitNext && iterator.hasNext()) {
         visitNext = visitor.visitNode(iterator.next());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/BoundedCompletionService.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/BoundedCompletionService.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/BoundedCompletionService.java
index c3fa547..ba38097 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/BoundedCompletionService.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/BoundedCompletionService.java
@@ -58,8 +58,8 @@ public class BoundedCompletionService<V> {
 
   public BoundedCompletionService(Executor executor, int maxTasks) {
     this.executor = executor;
-    this.tasks = new ArrayList<Future<V>>(maxTasks);
-    this.completed = new ArrayBlockingQueue<Future<V>>(maxTasks);
+    this.tasks = new ArrayList<>(maxTasks);
+    this.completed = new ArrayBlockingQueue<>(maxTasks);
   }
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java
index 7de1b13..9248b41 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java
@@ -49,7 +49,7 @@ public class ByteRangeUtils {
 
   public static ArrayList<byte[]> copyToNewArrays(Collection<ByteRange> ranges) {
     if (ranges == null) {
-      return new ArrayList<byte[]>(0);
+      return new ArrayList<>(0);
     }
     ArrayList<byte[]> arrays = Lists.newArrayListWithCapacity(ranges.size());
     for (ByteRange range : ranges) {
@@ -60,7 +60,7 @@ public class ByteRangeUtils {
 
   public static ArrayList<ByteRange> fromArrays(Collection<byte[]> arrays) {
     if (arrays == null) {
-      return new ArrayList<ByteRange>(0);
+      return new ArrayList<>(0);
     }
     ArrayList<ByteRange> ranges = Lists.newArrayListWithCapacity(arrays.size());
     for (byte[] array : arrays) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java
index 8cc71a3..1470d5c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java
@@ -34,8 +34,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 @InterfaceAudience.Private
 public class CollectionUtils {
 
-  private static final List<Object> EMPTY_LIST = Collections.unmodifiableList(
-    new ArrayList<Object>(0));
+  private static final List<Object> EMPTY_LIST = Collections.unmodifiableList(new ArrayList<>(0));
 
   
   @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java
index 0f00132..ba54f9d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ConcatenatedLists.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
  */
 @InterfaceAudience.Private
 public class ConcatenatedLists<T> implements Collection<T> {
-  protected final ArrayList<List<T>> components = new ArrayList<List<T>>();
+  protected final ArrayList<List<T>> components = new ArrayList<>();
   protected int size = 0;
 
   public void addAllSublists(List<? extends List<T>> items) {