You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by sy...@apache.org on 2017/03/10 22:09:59 UTC

[19/50] [abbrv] hbase git commit: HBASE-17532 Replaced explicit type with diamond operator

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
index 5cb7d75..a38e264 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
@@ -147,14 +147,14 @@ public class ReplicationSourceManager implements ReplicationListener {
       final Path oldLogDir, final UUID clusterId) throws IOException {
     //CopyOnWriteArrayList is thread-safe.
     //Generally, reading is more than modifying.
-    this.sources = new CopyOnWriteArrayList<ReplicationSourceInterface>();
+    this.sources = new CopyOnWriteArrayList<>();
     this.replicationQueues = replicationQueues;
     this.replicationPeers = replicationPeers;
     this.replicationTracker = replicationTracker;
     this.server = server;
-    this.walsById = new HashMap<String, Map<String, SortedSet<String>>>();
-    this.walsByIdRecoveredQueues = new ConcurrentHashMap<String, Map<String, SortedSet<String>>>();
-    this.oldsources = new CopyOnWriteArrayList<ReplicationSourceInterface>();
+    this.walsById = new HashMap<>();
+    this.walsByIdRecoveredQueues = new ConcurrentHashMap<>();
+    this.oldsources = new CopyOnWriteArrayList<>();
     this.conf = conf;
     this.fs = fs;
     this.logDir = logDir;
@@ -170,8 +170,7 @@ public class ReplicationSourceManager implements ReplicationListener {
     // use a short 100ms sleep since this could be done inline with a RS startup
     // even if we fail, other region servers can take care of it
     this.executor = new ThreadPoolExecutor(nbWorkers, nbWorkers,
-        100, TimeUnit.MILLISECONDS,
-        new LinkedBlockingQueue<Runnable>());
+        100, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>());
     ThreadFactoryBuilder tfb = new ThreadFactoryBuilder();
     tfb.setNameFormat("ReplicationExecutor-%d");
     tfb.setDaemon(true);
@@ -277,7 +276,7 @@ public class ReplicationSourceManager implements ReplicationListener {
           this.replicationPeers, server, id, this.clusterId, peerConfig, peer);
     synchronized (this.walsById) {
       this.sources.add(src);
-      Map<String, SortedSet<String>> walsByGroup = new HashMap<String, SortedSet<String>>();
+      Map<String, SortedSet<String>> walsByGroup = new HashMap<>();
       this.walsById.put(id, walsByGroup);
       // Add the latest wal to that source's queue
       synchronized (latestPaths) {
@@ -285,7 +284,7 @@ public class ReplicationSourceManager implements ReplicationListener {
           for (Path logPath : latestPaths) {
             String name = logPath.getName();
             String walPrefix = AbstractFSWALProvider.getWALPrefixFromWALName(name);
-            SortedSet<String> logs = new TreeSet<String>();
+            SortedSet<String> logs = new TreeSet<>();
             logs.add(name);
             walsByGroup.put(walPrefix, logs);
             try {
@@ -423,7 +422,7 @@ public class ReplicationSourceManager implements ReplicationListener {
         if (!existingPrefix) {
           // The new log belongs to a new group, add it into this peer
           LOG.debug("Start tracking logs for wal group " + logPrefix + " for peer " + peerId);
-          SortedSet<String> wals = new TreeSet<String>();
+          SortedSet<String> wals = new TreeSet<>();
           wals.add(logName);
           walsByPrefix.put(logPrefix, wals);
         }
@@ -570,8 +569,7 @@ public class ReplicationSourceManager implements ReplicationListener {
         + sources.size() + " and another "
         + oldsources.size() + " that were recovered");
     String terminateMessage = "Replication stream was removed by a user";
-    List<ReplicationSourceInterface> oldSourcesToDelete =
-        new ArrayList<ReplicationSourceInterface>();
+    List<ReplicationSourceInterface> oldSourcesToDelete = new ArrayList<>();
     // synchronized on oldsources to avoid adding recovered source for the to-be-removed peer
     // see NodeFailoverWorker.run
     synchronized (oldsources) {
@@ -589,7 +587,7 @@ public class ReplicationSourceManager implements ReplicationListener {
     LOG.info("Number of deleted recovered sources for " + id + ": "
         + oldSourcesToDelete.size());
     // Now look for the one on this cluster
-    List<ReplicationSourceInterface> srcToRemove = new ArrayList<ReplicationSourceInterface>();
+    List<ReplicationSourceInterface> srcToRemove = new ArrayList<>();
     // synchronize on replicationPeers to avoid adding source for the to-be-removed peer
     synchronized (this.replicationPeers) {
       for (ReplicationSourceInterface src : this.sources) {
@@ -735,13 +733,13 @@ public class ReplicationSourceManager implements ReplicationListener {
             continue;
           }
           // track sources in walsByIdRecoveredQueues
-          Map<String, SortedSet<String>> walsByGroup = new HashMap<String, SortedSet<String>>();
+          Map<String, SortedSet<String>> walsByGroup = new HashMap<>();
           walsByIdRecoveredQueues.put(peerId, walsByGroup);
           for (String wal : walsSet) {
             String walPrefix = AbstractFSWALProvider.getWALPrefixFromWALName(wal);
             SortedSet<String> wals = walsByGroup.get(walPrefix);
             if (wals == null) {
-              wals = new TreeSet<String>();
+              wals = new TreeSet<>();
               walsByGroup.put(walPrefix, wals);
             }
             wals.add(wal);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index f06330c..c1aad93 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -286,7 +286,7 @@ public class AccessControlLists {
             ACL_KEY_DELIMITER, columnName, ACL_KEY_DELIMITER,
             ACL_KEY_DELIMITER, columnName))));
 
-    Set<byte[]> qualifierSet = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+    Set<byte[]> qualifierSet = new TreeSet<>(Bytes.BYTES_COMPARATOR);
     ResultScanner scanner = null;
     try {
       scanner = table.getScanner(scan);
@@ -384,8 +384,7 @@ public class AccessControlLists {
       throw new IOException("Can only load permissions from "+ACL_TABLE_NAME);
     }
 
-    Map<byte[], ListMultimap<String, TablePermission>> allPerms =
-        new TreeMap<byte[], ListMultimap<String, TablePermission>>(Bytes.BYTES_RAWCOMPARATOR);
+    Map<byte[], ListMultimap<String, TablePermission>> allPerms = new TreeMap<>(Bytes.BYTES_RAWCOMPARATOR);
 
     // do a full scan of _acl_ table
 
@@ -397,7 +396,7 @@ public class AccessControlLists {
       iScanner = aclRegion.getScanner(scan);
 
       while (true) {
-        List<Cell> row = new ArrayList<Cell>();
+        List<Cell> row = new ArrayList<>();
 
         boolean hasNext = iScanner.next(row);
         ListMultimap<String,TablePermission> perms = ArrayListMultimap.create();
@@ -436,8 +435,7 @@ public class AccessControlLists {
    */
   static Map<byte[], ListMultimap<String,TablePermission>> loadAll(
       Configuration conf) throws IOException {
-    Map<byte[], ListMultimap<String,TablePermission>> allPerms =
-        new TreeMap<byte[], ListMultimap<String,TablePermission>>(Bytes.BYTES_RAWCOMPARATOR);
+    Map<byte[], ListMultimap<String,TablePermission>> allPerms = new TreeMap<>(Bytes.BYTES_RAWCOMPARATOR);
 
     // do a full scan of _acl_, filtering on only first table region rows
 
@@ -530,7 +528,7 @@ public class AccessControlLists {
     ListMultimap<String,TablePermission> allPerms = getPermissions(
         conf, entryName, null);
 
-    List<UserPermission> perms = new ArrayList<UserPermission>();
+    List<UserPermission> perms = new ArrayList<>();
 
     if(isNamespaceEntry(entryName)) {  // Namespace
       for (Map.Entry<String, TablePermission> entry : allPerms.entries()) {
@@ -591,8 +589,7 @@ public class AccessControlLists {
 
     //Handle namespace entry
     if(isNamespaceEntry(entryName)) {
-      return new Pair<String, TablePermission>(username,
-          new TablePermission(Bytes.toString(fromNamespaceEntry(entryName)), value));
+      return new Pair<>(username, new TablePermission(Bytes.toString(fromNamespaceEntry(entryName)), value));
     }
 
     //Handle table and global entry
@@ -612,8 +609,7 @@ public class AccessControlLists {
       }
     }
 
-    return new Pair<String,TablePermission>(username,
-        new TablePermission(TableName.valueOf(entryName), permFamily, permQualifier, value));
+    return new Pair<>(username, new TablePermission(TableName.valueOf(entryName), permFamily, permQualifier, value));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 814f209..64ac900 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -259,8 +259,7 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
    */
   void updateACL(RegionCoprocessorEnvironment e,
       final Map<byte[], List<Cell>> familyMap) {
-    Set<byte[]> entries =
-        new TreeSet<byte[]>(Bytes.BYTES_RAWCOMPARATOR);
+    Set<byte[]> entries = new TreeSet<>(Bytes.BYTES_RAWCOMPARATOR);
     for (Map.Entry<byte[], List<Cell>> f : familyMap.entrySet()) {
       List<Cell> cells = f.getValue();
       for (Cell cell: cells) {
@@ -793,7 +792,7 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
     // This Map is identical to familyMap. The key is a BR rather than byte[].
     // It will be easy to do gets over this new Map as we can create get keys over the Cell cf by
     // new SimpleByteRange(cell.familyArray, cell.familyOffset, cell.familyLen)
-    Map<ByteRange, List<Cell>> familyMap1 = new HashMap<ByteRange, List<Cell>>();
+    Map<ByteRange, List<Cell>> familyMap1 = new HashMap<>();
     for (Entry<byte[], ? extends Collection<?>> entry : familyMap.entrySet()) {
       if (entry.getValue() instanceof List) {
         familyMap1.put(new SimpleMutableByteRange(entry.getKey()), (List<Cell>) entry.getValue());
@@ -882,7 +881,7 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
       List<Cell> newCells = Lists.newArrayList();
       for (Cell cell: e.getValue()) {
         // Prepend the supplied perms in a new ACL tag to an update list of tags for the cell
-        List<Tag> tags = new ArrayList<Tag>();
+        List<Tag> tags = new ArrayList<>();
         tags.add(new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE, perms));
         Iterator<Tag> tagIterator = CellUtil.tagsIterator(cell);
         while (tagIterator.hasNext()) {
@@ -990,7 +989,7 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
   public void preCreateTable(ObserverContext<MasterCoprocessorEnvironment> c,
       HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
     Set<byte[]> families = desc.getFamiliesKeys();
-    Map<byte[], Set<byte[]>> familyMap = new TreeMap<byte[], Set<byte[]>>(Bytes.BYTES_COMPARATOR);
+    Map<byte[], Set<byte[]>> familyMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
     for (byte[] family: families) {
       familyMap.put(family, null);
     }
@@ -2407,8 +2406,7 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
                   tperm.getTableName()));
             }
 
-            Map<byte[], Set<byte[]>> familyMap =
-                new TreeMap<byte[], Set<byte[]>>(Bytes.BYTES_COMPARATOR);
+            Map<byte[], Set<byte[]>> familyMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
             if (tperm.getFamily() != null) {
               if (tperm.getQualifier() != null) {
                 Set<byte[]> qualifiers = Sets.newTreeSet(Bytes.BYTES_COMPARATOR);
@@ -2515,7 +2513,7 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
       return null;
     }
 
-    Map<byte[], Collection<byte[]>> familyMap = new TreeMap<byte[], Collection<byte[]>>(Bytes.BYTES_COMPARATOR);
+    Map<byte[], Collection<byte[]>> familyMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
     familyMap.put(family, qualifier != null ? ImmutableSet.of(qualifier) : null);
     return familyMap;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
index eae9e4e..0d539ce 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
@@ -103,10 +103,10 @@ public class TableAuthManager implements Closeable {
   private volatile PermissionCache<Permission> globalCache;
 
   private ConcurrentSkipListMap<TableName, PermissionCache<TablePermission>> tableCache =
-      new ConcurrentSkipListMap<TableName, PermissionCache<TablePermission>>();
+      new ConcurrentSkipListMap<>();
 
   private ConcurrentSkipListMap<String, PermissionCache<TablePermission>> nsCache =
-    new ConcurrentSkipListMap<String, PermissionCache<TablePermission>>();
+    new ConcurrentSkipListMap<>();
 
   private Configuration conf;
   private ZKPermissionWatcher zkperms;
@@ -143,7 +143,7 @@ public class TableAuthManager implements Closeable {
       throw new IOException("Unable to obtain the current user, " +
           "authorization checks for internal operations will not work correctly!");
     }
-    PermissionCache<Permission> newCache = new PermissionCache<Permission>();
+    PermissionCache<Permission> newCache = new PermissionCache<>();
     String currentUser = user.getShortName();
 
     // the system user is always included
@@ -239,7 +239,7 @@ public class TableAuthManager implements Closeable {
    */
   private void updateTableCache(TableName table,
                                 ListMultimap<String,TablePermission> tablePerms) {
-    PermissionCache<TablePermission> newTablePerms = new PermissionCache<TablePermission>();
+    PermissionCache<TablePermission> newTablePerms = new PermissionCache<>();
 
     for (Map.Entry<String,TablePermission> entry : tablePerms.entries()) {
       if (AuthUtil.isGroupPrincipal(entry.getKey())) {
@@ -263,7 +263,7 @@ public class TableAuthManager implements Closeable {
    */
   private void updateNsCache(String namespace,
                              ListMultimap<String, TablePermission> tablePerms) {
-    PermissionCache<TablePermission> newTablePerms = new PermissionCache<TablePermission>();
+    PermissionCache<TablePermission> newTablePerms = new PermissionCache<>();
 
     for (Map.Entry<String, TablePermission> entry : tablePerms.entries()) {
       if (AuthUtil.isGroupPrincipal(entry.getKey())) {
@@ -734,8 +734,7 @@ public class TableAuthManager implements Closeable {
     return mtime.get();
   }
 
-  private static Map<ZooKeeperWatcher,TableAuthManager> managerMap =
-    new HashMap<ZooKeeperWatcher,TableAuthManager>();
+  private static Map<ZooKeeperWatcher,TableAuthManager> managerMap = new HashMap<>();
 
   private static Map<TableAuthManager, Integer> refCount = new HashMap<>();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
index f21e877..3324b90 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
@@ -58,8 +58,7 @@ public class ZKPermissionWatcher extends ZooKeeperListener implements Closeable
   TableAuthManager authManager;
   String aclZNode;
   CountDownLatch initialized = new CountDownLatch(1);
-  AtomicReference<List<ZKUtil.NodeAndData>> nodes =
-      new AtomicReference<List<ZKUtil.NodeAndData>>(null);
+  AtomicReference<List<ZKUtil.NodeAndData>> nodes = new AtomicReference<>(null);
   ExecutorService executor;
 
   public ZKPermissionWatcher(ZooKeeperWatcher watcher,

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
index 26448b1..a569cf3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
@@ -70,8 +70,7 @@ public class AuthenticationTokenSecretManager
   private LeaderElector leaderElector;
   private ZKClusterId clusterId;
 
-  private Map<Integer,AuthenticationKey> allKeys =
-      new ConcurrentHashMap<Integer, AuthenticationKey>();
+  private Map<Integer,AuthenticationKey> allKeys = new ConcurrentHashMap<>();
   private AuthenticationKey currentKey;
 
   private int idSeq;
@@ -181,8 +180,7 @@ public class AuthenticationTokenSecretManager
   public Token<AuthenticationTokenIdentifier> generateToken(String username) {
     AuthenticationTokenIdentifier ident =
         new AuthenticationTokenIdentifier(username);
-    Token<AuthenticationTokenIdentifier> token =
-        new Token<AuthenticationTokenIdentifier>(ident, this);
+    Token<AuthenticationTokenIdentifier> token = new Token<>(ident, this);
     if (clusterId.hasId()) {
       token.setService(new Text(clusterId.getId()));
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
index f767ed3..1d42450 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
@@ -314,7 +314,7 @@ public class TokenUtil {
    * @return the Token instance
    */
   public static Token<AuthenticationTokenIdentifier> toToken(AuthenticationProtos.Token proto) {
-    return new Token<AuthenticationTokenIdentifier>(
+    return new Token<>(
         proto.hasIdentifier() ? proto.getIdentifier().toByteArray() : null,
         proto.hasPassword() ? proto.getPassword().toByteArray() : null,
         AuthenticationTokenIdentifier.AUTH_TOKEN_TYPE,

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
index 9abb3a2..d4a5627 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
@@ -151,10 +151,10 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
   protected List<List<Cell>> getExistingLabelsWithAuths() throws IOException {
     Scan scan = new Scan();
     RegionScanner scanner = labelsRegion.getScanner(scan);
-    List<List<Cell>> existingLabels = new ArrayList<List<Cell>>();
+    List<List<Cell>> existingLabels = new ArrayList<>();
     try {
       while (true) {
-        List<Cell> cells = new ArrayList<Cell>();
+        List<Cell> cells = new ArrayList<>();
         scanner.next(cells);
         if (cells.isEmpty()) {
           break;
@@ -169,8 +169,8 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
 
   protected Pair<Map<String, Integer>, Map<String, List<Integer>>> extractLabelsAndAuths(
       List<List<Cell>> labelDetails) {
-    Map<String, Integer> labels = new HashMap<String, Integer>();
-    Map<String, List<Integer>> userAuths = new HashMap<String, List<Integer>>();
+    Map<String, Integer> labels = new HashMap<>();
+    Map<String, List<Integer>> userAuths = new HashMap<>();
     for (List<Cell> cells : labelDetails) {
       for (Cell cell : cells) {
         if (CellUtil.matchingQualifier(cell, LABEL_QUALIFIER)) {
@@ -183,14 +183,14 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
               cell.getQualifierLength());
           List<Integer> auths = userAuths.get(user);
           if (auths == null) {
-            auths = new ArrayList<Integer>();
+            auths = new ArrayList<>();
             userAuths.put(user, auths);
           }
           auths.add(CellUtil.getRowAsInt(cell));
         }
       }
     }
-    return new Pair<Map<String, Integer>, Map<String, List<Integer>>>(labels, userAuths);
+    return new Pair<>(labels, userAuths);
   }
 
   protected void addSystemLabel(Region region, Map<String, Integer> labels,
@@ -207,7 +207,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
   public OperationStatus[] addLabels(List<byte[]> labels) throws IOException {
     assert labelsRegion != null;
     OperationStatus[] finalOpStatus = new OperationStatus[labels.size()];
-    List<Mutation> puts = new ArrayList<Mutation>(labels.size());
+    List<Mutation> puts = new ArrayList<>(labels.size());
     int i = 0;
     for (byte[] label : labels) {
       String labelStr = Bytes.toString(label);
@@ -235,7 +235,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
   public OperationStatus[] setAuths(byte[] user, List<byte[]> authLabels) throws IOException {
     assert labelsRegion != null;
     OperationStatus[] finalOpStatus = new OperationStatus[authLabels.size()];
-    List<Mutation> puts = new ArrayList<Mutation>(authLabels.size());
+    List<Mutation> puts = new ArrayList<>(authLabels.size());
     int i = 0;
     for (byte[] auth : authLabels) {
       String authStr = Bytes.toString(auth);
@@ -269,7 +269,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
     else {
       currentAuths = this.getUserAuths(user, true);
     }
-    List<Mutation> deletes = new ArrayList<Mutation>(authLabels.size());
+    List<Mutation> deletes = new ArrayList<>(authLabels.size());
     int i = 0;
     for (byte[] authLabel : authLabels) {
       String authLabelStr = Bytes.toString(authLabel);
@@ -334,10 +334,10 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
     Filter filter = VisibilityUtils.createVisibilityLabelFilter(this.labelsRegion,
         new Authorizations(SYSTEM_LABEL));
     s.setFilter(filter);
-    ArrayList<String> auths = new ArrayList<String>();
+    ArrayList<String> auths = new ArrayList<>();
     RegionScanner scanner = this.labelsRegion.getScanner(s);
     try {
-      List<Cell> results = new ArrayList<Cell>(1);
+      List<Cell> results = new ArrayList<>(1);
       while (true) {
         scanner.next(results);
         if (results.isEmpty()) break;
@@ -371,10 +371,10 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
     Filter filter = VisibilityUtils.createVisibilityLabelFilter(this.labelsRegion,
         new Authorizations(SYSTEM_LABEL));
     s.setFilter(filter);
-    Set<String> auths = new HashSet<String>();
+    Set<String> auths = new HashSet<>();
     RegionScanner scanner = this.labelsRegion.getScanner(s);
     try {
-      List<Cell> results = new ArrayList<Cell>(1);
+      List<Cell> results = new ArrayList<>(1);
       while (true) {
         scanner.next(results);
         if (results.isEmpty()) break;
@@ -389,7 +389,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
     } finally {
       scanner.close();
     }
-    return new ArrayList<String>(auths);
+    return new ArrayList<>(auths);
   }
 
   @Override
@@ -401,7 +401,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
     labels.remove(SYSTEM_LABEL);
     if (regex != null) {
       Pattern pattern = Pattern.compile(regex);
-      ArrayList<String> matchedLabels = new ArrayList<String>();
+      ArrayList<String> matchedLabels = new ArrayList<>();
       for (String label : labels.keySet()) {
         if (pattern.matcher(label).matches()) {
           matchedLabels.add(label);
@@ -409,13 +409,13 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
       }
       return matchedLabels;
     }
-    return new ArrayList<String>(labels.keySet());
+    return new ArrayList<>(labels.keySet());
   }
 
   @Override
   public List<Tag> createVisibilityExpTags(String visExpression, boolean withSerializationFormat,
       boolean checkAuths) throws IOException {
-    Set<Integer> auths = new HashSet<Integer>();
+    Set<Integer> auths = new HashSet<>();
     if (checkAuths) {
       User user = VisibilityUtils.getActiveUser();
       auths.addAll(this.labelsCache.getUserAuthsAsOrdinals(user.getShortName()));
@@ -461,7 +461,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
       try {
         // null authorizations to be handled inside SLG impl.
         authLabels = scanLabelGenerator.getLabels(VisibilityUtils.getActiveUser(), authorizations);
-        authLabels = (authLabels == null) ? new ArrayList<String>() : authLabels;
+        authLabels = (authLabels == null) ? new ArrayList<>() : authLabels;
         authorizations = new Authorizations(authLabels);
       } catch (Throwable t) {
         LOG.error(t);
@@ -605,7 +605,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
   }
 
   private static List<List<Integer>> sortTagsBasedOnOrdinal(List<Tag> tags) throws IOException {
-    List<List<Integer>> fullTagsList = new ArrayList<List<Integer>>();
+    List<List<Integer>> fullTagsList = new ArrayList<>();
     for (Tag tag : tags) {
       if (tag.getType() == VISIBILITY_TAG_TYPE) {
         getSortedTagOrdinals(fullTagsList, tag);
@@ -616,7 +616,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
 
   private static void getSortedTagOrdinals(List<List<Integer>> fullTagsList, Tag tag)
       throws IOException {
-    List<Integer> tagsOrdinalInSortedOrder = new ArrayList<Integer>();
+    List<Integer> tagsOrdinalInSortedOrder = new ArrayList<>();
     int offset = tag.getValueOffset();
     int endOffset = offset + tag.getValueLength();
     while (offset < endOffset) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java
index 2c7d253..2126ee7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java
@@ -62,18 +62,18 @@ public class DefinedSetFilterScanLabelGenerator implements ScanLabelGenerator {
     if (authorizations != null) {
       List<String> labels = authorizations.getLabels();
       String userName = user.getShortName();
-      Set<String> auths = new HashSet<String>();
+      Set<String> auths = new HashSet<>();
       auths.addAll(this.labelsCache.getUserAuths(userName));
       auths.addAll(this.labelsCache.getGroupAuths(user.getGroupNames()));
-      return dropLabelsNotInUserAuths(labels, new ArrayList<String>(auths), userName);
+      return dropLabelsNotInUserAuths(labels, new ArrayList<>(auths), userName);
     }
     return null;
   }
 
   private List<String> dropLabelsNotInUserAuths(List<String> labels, List<String> auths,
       String userName) {
-    List<String> droppedLabels = new ArrayList<String>();
-    List<String> passedLabels = new ArrayList<String>(labels.size());
+    List<String> droppedLabels = new ArrayList<>();
+    List<String> passedLabels = new ArrayList<>(labels.size());
     for (String label : labels) {
       if (auths.contains(label)) {
         passedLabels.add(label);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java
index dd0497c..177f4d2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java
@@ -62,10 +62,10 @@ public class EnforcingScanLabelGenerator implements ScanLabelGenerator {
     if (authorizations != null) {
       LOG.warn("Dropping authorizations requested by user " + userName + ": " + authorizations);
     }
-    Set<String> auths = new HashSet<String>();
+    Set<String> auths = new HashSet<>();
     auths.addAll(this.labelsCache.getUserAuths(userName));
     auths.addAll(this.labelsCache.getGroupAuths(user.getGroupNames()));
-    return new ArrayList<String>(auths);
+    return new ArrayList<>(auths);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java
index db3caff..2b9a56e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java
@@ -40,7 +40,7 @@ public class ExpressionParser {
   private static final char DOUBLE_QUOTES = '"';
   public ExpressionNode parse(String expS) throws ParseException {
     expS = expS.trim();
-    Stack<ExpressionNode> expStack = new Stack<ExpressionNode>();
+    Stack<ExpressionNode> expStack = new Stack<>();
     int index = 0;
     byte[] exp = Bytes.toBytes(expS);
     int endPos = exp.length;
@@ -68,7 +68,7 @@ public class ExpressionParser {
           // We have to rewrite the expression within double quotes as incase of expressions 
           // with escape characters we may have to avoid them as the original expression did
           // not have them
-          List<Byte> list = new ArrayList<Byte>();
+          List<Byte> list = new ArrayList<>();
           while (index < endPos && !endDoubleQuotesFound(exp[index])) {
             if (exp[index] == '\\') {
               index++;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java
index 1f90682..f4cf762 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java
@@ -65,10 +65,10 @@ public class FeedUserAuthScanLabelGenerator implements ScanLabelGenerator {
     if (authorizations == null || authorizations.getLabels() == null
         || authorizations.getLabels().isEmpty()) {
       String userName = user.getShortName();
-      Set<String> auths = new HashSet<String>();
+      Set<String> auths = new HashSet<>();
       auths.addAll(this.labelsCache.getUserAuths(userName));
       auths.addAll(this.labelsCache.getGroupAuths(user.getGroupNames()));
-      return new ArrayList<String>(auths);
+      return new ArrayList<>(auths);
     }
     return authorizations.getLabels();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
index fb685bc..476921b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
@@ -145,7 +145,7 @@ public class VisibilityController implements MasterObserver, RegionObserver,
   boolean authorizationEnabled;
 
   // Add to this list if there are any reserved tag types
-  private static ArrayList<Byte> RESERVED_VIS_TAG_TYPES = new ArrayList<Byte>();
+  private static ArrayList<Byte> RESERVED_VIS_TAG_TYPES = new ArrayList<>();
   static {
     RESERVED_VIS_TAG_TYPES.add(TagType.VISIBILITY_TAG_TYPE);
     RESERVED_VIS_TAG_TYPES.add(TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE);
@@ -328,7 +328,7 @@ public class VisibilityController implements MasterObserver, RegionObserver,
       return;
     }
     // TODO this can be made as a global LRU cache at HRS level?
-    Map<String, List<Tag>> labelCache = new HashMap<String, List<Tag>>();
+    Map<String, List<Tag>> labelCache = new HashMap<>();
     for (int i = 0; i < miniBatchOp.size(); i++) {
       Mutation m = miniBatchOp.getOperation(i);
       CellVisibility cellVisibility = null;
@@ -341,7 +341,7 @@ public class VisibilityController implements MasterObserver, RegionObserver,
       }
       boolean sanityFailure = false;
       boolean modifiedTagFound = false;
-      Pair<Boolean, Tag> pair = new Pair<Boolean, Tag>(false, null);
+      Pair<Boolean, Tag> pair = new Pair<>(false, null);
       for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
         pair = checkForReservedVisibilityTagPresence(cellScanner.current(), pair);
         if (!pair.getFirst()) {
@@ -381,7 +381,7 @@ public class VisibilityController implements MasterObserver, RegionObserver,
             }
           }
           if (visibilityTags != null) {
-            List<Cell> updatedCells = new ArrayList<Cell>();
+            List<Cell> updatedCells = new ArrayList<>();
             for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
               Cell cell = cellScanner.current();
               List<Tag> tags = CellUtil.getTags(cell);
@@ -427,7 +427,7 @@ public class VisibilityController implements MasterObserver, RegionObserver,
     }
     // The check for checkForReservedVisibilityTagPresence happens in preBatchMutate happens.
     // It happens for every mutation and that would be enough.
-    List<Tag> visibilityTags = new ArrayList<Tag>();
+    List<Tag> visibilityTags = new ArrayList<>();
     if (cellVisibility != null) {
       String labelsExp = cellVisibility.getExpression();
       try {
@@ -474,7 +474,7 @@ public class VisibilityController implements MasterObserver, RegionObserver,
   private Pair<Boolean, Tag> checkForReservedVisibilityTagPresence(Cell cell,
       Pair<Boolean, Tag> pair) throws IOException {
     if (pair == null) {
-      pair = new Pair<Boolean, Tag>(false, null);
+      pair = new Pair<>(false, null);
     } else {
       pair.setFirst(false);
       pair.setSecond(null);
@@ -782,7 +782,7 @@ public class VisibilityController implements MasterObserver, RegionObserver,
         new VisibilityControllerNotReadyException("VisibilityController not yet initialized!"),
         response);
     } else {
-      List<byte[]> labels = new ArrayList<byte[]>(visLabels.size());
+      List<byte[]> labels = new ArrayList<>(visLabels.size());
       try {
         if (authorizationEnabled) {
           checkCallingUserAuth();
@@ -844,7 +844,7 @@ public class VisibilityController implements MasterObserver, RegionObserver,
         response);
     } else {
       byte[] user = request.getUser().toByteArray();
-      List<byte[]> labelAuths = new ArrayList<byte[]>(auths.size());
+      List<byte[]> labelAuths = new ArrayList<>(auths.size());
       try {
         if (authorizationEnabled) {
           checkCallingUserAuth();
@@ -959,7 +959,7 @@ public class VisibilityController implements MasterObserver, RegionObserver,
           "VisibilityController not yet initialized"), response);
     } else {
       byte[] requestUser = request.getUser().toByteArray();
-      List<byte[]> labelAuths = new ArrayList<byte[]>(auths.size());
+      List<byte[]> labelAuths = new ArrayList<>(auths.size());
       try {
         // When AC is ON, do AC based user auth check
         if (authorizationEnabled && accessControllerAvailable && !isSystemOrSuperUser()) {
@@ -1071,7 +1071,7 @@ public class VisibilityController implements MasterObserver, RegionObserver,
 
     @Override
     public ReturnCode filterKeyValue(Cell cell) throws IOException {
-      List<Tag> putVisTags = new ArrayList<Tag>();
+      List<Tag> putVisTags = new ArrayList<>();
       Byte putCellVisTagsFormat = VisibilityUtils.extractVisibilityTags(cell, putVisTags);
       boolean matchFound = VisibilityLabelServiceManager
           .getInstance().getVisibilityLabelService()

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
index 0948520..e27a4f8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
@@ -54,10 +54,10 @@ public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider {
   private static VisibilityLabelsCache instance;
 
   private ZKVisibilityLabelWatcher zkVisibilityWatcher;
-  private Map<String, Integer> labels = new HashMap<String, Integer>();
-  private Map<Integer, String> ordinalVsLabels = new HashMap<Integer, String>();
-  private Map<String, Set<Integer>> userAuths = new HashMap<String, Set<Integer>>();
-  private Map<String, Set<Integer>> groupAuths = new HashMap<String, Set<Integer>>();
+  private Map<String, Integer> labels = new HashMap<>();
+  private Map<Integer, String> ordinalVsLabels = new HashMap<>();
+  private Map<String, Set<Integer>> userAuths = new HashMap<>();
+  private Map<String, Set<Integer>> groupAuths = new HashMap<>();
 
   /**
    * This covers the members labels, ordinalVsLabels and userAuths
@@ -145,10 +145,9 @@ public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider {
       for (UserAuthorizations userAuths : multiUserAuths.getUserAuthsList()) {
         String user = Bytes.toString(userAuths.getUser().toByteArray());
         if (AuthUtil.isGroupPrincipal(user)) {
-          this.groupAuths.put(AuthUtil.getGroupName(user),
-            new HashSet<Integer>(userAuths.getAuthList()));
+          this.groupAuths.put(AuthUtil.getGroupName(user), new HashSet<>(userAuths.getAuthList()));
         } else {
-          this.userAuths.put(user, new HashSet<Integer>(userAuths.getAuthList()));
+          this.userAuths.put(user, new HashSet<>(userAuths.getAuthList()));
         }
       }
     } finally {
@@ -210,7 +209,7 @@ public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider {
       List<String> auths = EMPTY_LIST;
       Set<Integer> authOrdinals = getUserAuthsAsOrdinals(user);
       if (!authOrdinals.equals(EMPTY_SET)) {
-        auths = new ArrayList<String>(authOrdinals.size());
+        auths = new ArrayList<>(authOrdinals.size());
         for (Integer authOrdinal : authOrdinals) {
           auths.add(ordinalVsLabels.get(authOrdinal));
         }
@@ -227,7 +226,7 @@ public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider {
       List<String> auths = EMPTY_LIST;
       Set<Integer> authOrdinals = getGroupAuthsAsOrdinals(groups);
       if (!authOrdinals.equals(EMPTY_SET)) {
-        auths = new ArrayList<String>(authOrdinals.size());
+        auths = new ArrayList<>(authOrdinals.size());
         for (Integer authOrdinal : authOrdinals) {
           auths.add(ordinalVsLabels.get(authOrdinal));
         }
@@ -263,7 +262,7 @@ public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider {
   public Set<Integer> getGroupAuthsAsOrdinals(String[] groups) {
     this.lock.readLock().lock();
     try {
-      Set<Integer> authOrdinals = new HashSet<Integer>();
+      Set<Integer> authOrdinals = new HashSet<>();
       if (groups != null && groups.length > 0) {
         Set<Integer> groupAuthOrdinals = null;
         for (String group : groups) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java
index c1c3852..c77b776 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java
@@ -69,9 +69,9 @@ public class VisibilityReplicationEndpoint implements ReplicationEndpoint {
       // string based tags. But for intra cluster replication like region
       // replicas it is not needed.
       List<Entry> entries = replicateContext.getEntries();
-      List<Tag> visTags = new ArrayList<Tag>();
-      List<Tag> nonVisTags = new ArrayList<Tag>();
-      List<Entry> newEntries = new ArrayList<Entry>(entries.size());
+      List<Tag> visTags = new ArrayList<>();
+      List<Tag> nonVisTags = new ArrayList<>();
+      List<Entry> newEntries = new ArrayList<>(entries.size());
       for (Entry entry : entries) {
         WALEdit newEdit = new WALEdit();
         ArrayList<Cell> cells = entry.getEdit().getCells();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
index 2595fe0..67181e1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
@@ -53,17 +53,14 @@ public class VisibilityScanDeleteTracker extends ScanDeleteTracker {
   // type would solve this problem and also ensure that the combination of different type
   // of deletes with diff ts would also work fine
   // Track per TS
-  private List<Triple<List<Tag>, Byte, Long>> visibilityTagsDeleteFamily =
-      new ArrayList<Triple<List<Tag>, Byte, Long>>();
+  private List<Triple<List<Tag>, Byte, Long>> visibilityTagsDeleteFamily = new ArrayList<>();
   // Delete family version with different ts and different visibility expression could come.
   // Need to track it per ts.
-  private List<Triple<List<Tag>, Byte, Long>> visibilityTagsDeleteFamilyVersion =
-      new ArrayList<Triple<List<Tag>, Byte, Long>>();
+  private List<Triple<List<Tag>, Byte, Long>> visibilityTagsDeleteFamilyVersion = new ArrayList<>();
   private List<Pair<List<Tag>, Byte>> visibilityTagsDeleteColumns;
   // Tracking as List<List> is to handle same ts cell but different visibility tag. 
   // TODO : Need to handle puts with same ts but different vis tags.
-  private List<Pair<List<Tag>, Byte>> visiblityTagsDeleteColumnVersion =
-      new ArrayList<Pair<List<Tag>, Byte>>();
+  private List<Pair<List<Tag>, Byte>> visiblityTagsDeleteColumnVersion = new ArrayList<>();
 
   public VisibilityScanDeleteTracker() {
     super();
@@ -117,50 +114,46 @@ public class VisibilityScanDeleteTracker extends ScanDeleteTracker {
       Byte deleteCellVisTagsFormat = null;
       switch (type) {
       case DeleteFamily:
-        List<Tag> delTags = new ArrayList<Tag>();
+        List<Tag> delTags = new ArrayList<>();
         if (visibilityTagsDeleteFamily == null) {
-          visibilityTagsDeleteFamily = new ArrayList<Triple<List<Tag>, Byte, Long>>();
+          visibilityTagsDeleteFamily = new ArrayList<>();
         }
         deleteCellVisTagsFormat = VisibilityUtils.extractVisibilityTags(delCell, delTags);
         if (!delTags.isEmpty()) {
-          visibilityTagsDeleteFamily.add(new Triple<List<Tag>, Byte, Long>(delTags,
-              deleteCellVisTagsFormat, delCell.getTimestamp()));
+          visibilityTagsDeleteFamily.add(new Triple<>(delTags, deleteCellVisTagsFormat, delCell.getTimestamp()));
           hasVisTag = true;
         }
         break;
       case DeleteFamilyVersion:
         if(visibilityTagsDeleteFamilyVersion == null) {
-          visibilityTagsDeleteFamilyVersion = new ArrayList<Triple<List<Tag>, Byte, Long>>();
+          visibilityTagsDeleteFamilyVersion = new ArrayList<>();
         }
-        delTags = new ArrayList<Tag>();
+        delTags = new ArrayList<>();
         deleteCellVisTagsFormat = VisibilityUtils.extractVisibilityTags(delCell, delTags);
         if (!delTags.isEmpty()) {
-          visibilityTagsDeleteFamilyVersion.add(new Triple<List<Tag>, Byte, Long>(delTags,
-              deleteCellVisTagsFormat, delCell.getTimestamp()));
+          visibilityTagsDeleteFamilyVersion.add(new Triple<>(delTags, deleteCellVisTagsFormat, delCell.getTimestamp()));
           hasVisTag = true;
         }
         break;
       case DeleteColumn:
         if (visibilityTagsDeleteColumns == null) {
-          visibilityTagsDeleteColumns = new ArrayList<Pair<List<Tag>, Byte>>();
+          visibilityTagsDeleteColumns = new ArrayList<>();
         }
-        delTags = new ArrayList<Tag>();
+        delTags = new ArrayList<>();
         deleteCellVisTagsFormat = VisibilityUtils.extractVisibilityTags(delCell, delTags);
         if (!delTags.isEmpty()) {
-          visibilityTagsDeleteColumns.add(new Pair<List<Tag>, Byte>(delTags,
-              deleteCellVisTagsFormat));
+          visibilityTagsDeleteColumns.add(new Pair<>(delTags, deleteCellVisTagsFormat));
           hasVisTag = true;
         }
         break;
       case Delete:
         if (visiblityTagsDeleteColumnVersion == null) {
-          visiblityTagsDeleteColumnVersion = new ArrayList<Pair<List<Tag>, Byte>>();
+          visiblityTagsDeleteColumnVersion = new ArrayList<>();
         }
-        delTags = new ArrayList<Tag>();
+        delTags = new ArrayList<>();
         deleteCellVisTagsFormat = VisibilityUtils.extractVisibilityTags(delCell, delTags);
         if (!delTags.isEmpty()) {
-          visiblityTagsDeleteColumnVersion.add(new Pair<List<Tag>, Byte>(delTags,
-              deleteCellVisTagsFormat));
+          visiblityTagsDeleteColumnVersion.add(new Pair<>(delTags, deleteCellVisTagsFormat));
           hasVisTag = true;
         }
         break;
@@ -182,7 +175,7 @@ public class VisibilityScanDeleteTracker extends ScanDeleteTracker {
               // visibilityTagsDeleteFamily is ArrayList
               Triple<List<Tag>, Byte, Long> triple = visibilityTagsDeleteFamily.get(i);
               if (timestamp <= triple.getThird()) {
-                List<Tag> putVisTags = new ArrayList<Tag>();
+                List<Tag> putVisTags = new ArrayList<>();
                 Byte putCellVisTagsFormat = VisibilityUtils.extractVisibilityTags(cell, putVisTags);
                 boolean matchFound = VisibilityLabelServiceManager.getInstance()
                     .getVisibilityLabelService().matchVisibility(putVisTags, putCellVisTagsFormat,
@@ -218,7 +211,7 @@ public class VisibilityScanDeleteTracker extends ScanDeleteTracker {
               // visibilityTagsDeleteFamilyVersion is ArrayList
               Triple<List<Tag>, Byte, Long> triple = visibilityTagsDeleteFamilyVersion.get(i);
               if (timestamp == triple.getThird()) {
-                List<Tag> putVisTags = new ArrayList<Tag>();
+                List<Tag> putVisTags = new ArrayList<>();
                 Byte putCellVisTagsFormat = VisibilityUtils.extractVisibilityTags(cell, putVisTags);
                 boolean matchFound = VisibilityLabelServiceManager.getInstance()
                     .getVisibilityLabelService().matchVisibility(putVisTags, putCellVisTagsFormat,
@@ -248,7 +241,7 @@ public class VisibilityScanDeleteTracker extends ScanDeleteTracker {
             if (visibilityTagsDeleteColumns != null) {
               if (!visibilityTagsDeleteColumns.isEmpty()) {
                 for (Pair<List<Tag>, Byte> tags : visibilityTagsDeleteColumns) {
-                  List<Tag> putVisTags = new ArrayList<Tag>();
+                  List<Tag> putVisTags = new ArrayList<>();
                   Byte putCellVisTagsFormat =
                       VisibilityUtils.extractVisibilityTags(cell, putVisTags);
                   boolean matchFound = VisibilityLabelServiceManager.getInstance()
@@ -277,7 +270,7 @@ public class VisibilityScanDeleteTracker extends ScanDeleteTracker {
             if (visiblityTagsDeleteColumnVersion != null) {
               if (!visiblityTagsDeleteColumnVersion.isEmpty()) {
                 for (Pair<List<Tag>, Byte> tags : visiblityTagsDeleteColumnVersion) {
-                  List<Tag> putVisTags = new ArrayList<Tag>();
+                  List<Tag> putVisTags = new ArrayList<>();
                   Byte putCellVisTagsFormat =
                       VisibilityUtils.extractVisibilityTags(cell, putVisTags);
                   boolean matchFound = VisibilityLabelServiceManager.getInstance()

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
index 1db506d..4441c08 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
@@ -175,7 +175,7 @@ public class VisibilityUtils {
     String slgClassesCommaSeparated = conf.get(VISIBILITY_LABEL_GENERATOR_CLASS);
     // We have only System level SLGs now. The order of execution will be same as the order in the
     // comma separated config value
-    List<ScanLabelGenerator> slgs = new ArrayList<ScanLabelGenerator>();
+    List<ScanLabelGenerator> slgs = new ArrayList<>();
     if (StringUtils.isNotEmpty(slgClassesCommaSeparated)) {
       String[] slgClasses = slgClassesCommaSeparated.split(COMMA);
       for (String slgClass : slgClasses) {
@@ -266,7 +266,7 @@ public class VisibilityUtils {
 
   public static Filter createVisibilityLabelFilter(Region region, Authorizations authorizations)
       throws IOException {
-    Map<ByteRange, Integer> cfVsMaxVersions = new HashMap<ByteRange, Integer>();
+    Map<ByteRange, Integer> cfVsMaxVersions = new HashMap<>();
     for (HColumnDescriptor hcd : region.getTableDesc().getFamilies()) {
       cfVsMaxVersions.put(new SimpleMutableByteRange(hcd.getName()), hcd.getMaxVersions());
     }
@@ -302,10 +302,10 @@ public class VisibilityUtils {
       throw new IOException(e);
     }
     node = EXP_EXPANDER.expand(node);
-    List<Tag> tags = new ArrayList<Tag>();
+    List<Tag> tags = new ArrayList<>();
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     DataOutputStream dos = new DataOutputStream(baos);
-    List<Integer> labelOrdinals = new ArrayList<Integer>();
+    List<Integer> labelOrdinals = new ArrayList<>();
     // We will be adding this tag before the visibility tags and the presence of this
     // tag indicates we are supporting deletes with cell visibility
     if (withSerializationFormat) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java
index 4399ecc..9903b9b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 @InterfaceAudience.Private
 public class NonLeafExpressionNode implements ExpressionNode {
   private Operator op;
-  private List<ExpressionNode> childExps = new ArrayList<ExpressionNode>(2);
+  private List<ExpressionNode> childExps = new ArrayList<>(2);
 
   public NonLeafExpressionNode() {
 
@@ -46,7 +46,7 @@ public class NonLeafExpressionNode implements ExpressionNode {
 
   public NonLeafExpressionNode(Operator op, ExpressionNode... exps) {
     this.op = op;
-    List<ExpressionNode> expLst = new ArrayList<ExpressionNode>();
+    List<ExpressionNode> expLst = new ArrayList<>();
     Collections.addAll(expLst, exps);
     this.childExps = expLst;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index a5507fc..efae7e4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -564,7 +564,7 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
       final FileSystem fs, final Path snapshotDir) throws IOException {
     SnapshotDescription snapshotDesc = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
 
-    final List<Pair<SnapshotFileInfo, Long>> files = new ArrayList<Pair<SnapshotFileInfo, Long>>();
+    final List<Pair<SnapshotFileInfo, Long>> files = new ArrayList<>();
     final TableName table = TableName.valueOf(snapshotDesc.getTable());
 
     // Get snapshot files
@@ -591,7 +591,7 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
             } else {
               size = HFileLink.buildFromHFileLinkPattern(conf, path).getFileStatus(fs).getLen();
             }
-            files.add(new Pair<SnapshotFileInfo, Long>(fileInfo, size));
+            files.add(new Pair<>(fileInfo, size));
           }
         }
     });
@@ -618,8 +618,7 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
     });
 
     // create balanced groups
-    List<List<Pair<SnapshotFileInfo, Long>>> fileGroups =
-      new LinkedList<List<Pair<SnapshotFileInfo, Long>>>();
+    List<List<Pair<SnapshotFileInfo, Long>>> fileGroups = new LinkedList<>();
     long[] sizeGroups = new long[ngroups];
     int hi = files.size() - 1;
     int lo = 0;
@@ -630,7 +629,7 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
 
     while (hi >= lo) {
       if (g == fileGroups.size()) {
-        group = new LinkedList<Pair<SnapshotFileInfo, Long>>();
+        group = new LinkedList<>();
         fileGroups.add(group);
       } else {
         group = fileGroups.get(g);
@@ -703,7 +702,7 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
       public ExportSnapshotInputSplit(final List<Pair<SnapshotFileInfo, Long>> snapshotFiles) {
         this.files = new ArrayList(snapshotFiles.size());
         for (Pair<SnapshotFileInfo, Long> fileInfo: snapshotFiles) {
-          this.files.add(new Pair<BytesWritable, Long>(
+          this.files.add(new Pair<>(
             new BytesWritable(fileInfo.getFirst().toByteArray()), fileInfo.getSecond()));
           this.length += fileInfo.getSecond();
         }
@@ -726,13 +725,13 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
       @Override
       public void readFields(DataInput in) throws IOException {
         int count = in.readInt();
-        files = new ArrayList<Pair<BytesWritable, Long>>(count);
+        files = new ArrayList<>(count);
         length = 0;
         for (int i = 0; i < count; ++i) {
           BytesWritable fileInfo = new BytesWritable();
           fileInfo.readFields(in);
           long size = in.readLong();
-          files.add(new Pair<BytesWritable, Long>(fileInfo, size));
+          files.add(new Pair<>(fileInfo, size));
           length += size;
         }
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index 98afe8b..63839c4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -108,11 +108,9 @@ import org.apache.hadoop.io.IOUtils;
 public class RestoreSnapshotHelper {
   private static final Log LOG = LogFactory.getLog(RestoreSnapshotHelper.class);
 
-  private final Map<byte[], byte[]> regionsMap =
-        new TreeMap<byte[], byte[]>(Bytes.BYTES_COMPARATOR);
+  private final Map<byte[], byte[]> regionsMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
 
-  private final Map<String, Pair<String, String> > parentsMap =
-      new HashMap<String, Pair<String, String> >();
+  private final Map<String, Pair<String, String> > parentsMap = new HashMap<>();
 
   private final ForeignExceptionDispatcher monitor;
   private final MonitoredTask status;
@@ -187,7 +185,7 @@ public class RestoreSnapshotHelper {
 
     // Take a copy of the manifest.keySet() since we are going to modify
     // this instance, by removing the regions already present in the restore dir.
-    Set<String> regionNames = new HashSet<String>(regionManifests.keySet());
+    Set<String> regionNames = new HashSet<>(regionManifests.keySet());
 
     HRegionInfo mobRegion = MobUtils.getMobRegionInfo(snapshotManifest.getTableDescriptor()
         .getTableName());
@@ -213,7 +211,7 @@ public class RestoreSnapshotHelper {
       status.setStatus("Restoring table regions...");
       if (regionNames.contains(mobRegion.getEncodedName())) {
         // restore the mob region in case
-        List<HRegionInfo> mobRegions = new ArrayList<HRegionInfo>(1);
+        List<HRegionInfo> mobRegions = new ArrayList<>(1);
         mobRegions.add(mobRegion);
         restoreHdfsMobRegions(exec, regionManifests, mobRegions);
         regionNames.remove(mobRegion.getEncodedName());
@@ -230,7 +228,7 @@ public class RestoreSnapshotHelper {
 
     // Regions to Add: present in the snapshot but not in the current table
     if (regionNames.size() > 0) {
-      List<HRegionInfo> regionsToAdd = new ArrayList<HRegionInfo>(regionNames.size());
+      List<HRegionInfo> regionsToAdd = new ArrayList<>(regionNames.size());
 
       monitor.rethrowException();
       // add the mob region
@@ -344,14 +342,14 @@ public class RestoreSnapshotHelper {
 
     void addRegionToRemove(final HRegionInfo hri) {
       if (regionsToRemove == null) {
-        regionsToRemove = new LinkedList<HRegionInfo>();
+        regionsToRemove = new LinkedList<>();
       }
       regionsToRemove.add(hri);
     }
 
     void addRegionToRestore(final HRegionInfo hri) {
       if (regionsToRestore == null) {
-        regionsToRestore = new LinkedList<HRegionInfo>();
+        regionsToRestore = new LinkedList<>();
       }
       regionsToRestore.add(hri);
     }
@@ -361,7 +359,7 @@ public class RestoreSnapshotHelper {
       if (regionInfos == null || parentsMap.isEmpty()) return;
 
       // Extract region names and offlined regions
-      Map<String, HRegionInfo> regionsByName = new HashMap<String, HRegionInfo>(regionInfos.size());
+      Map<String, HRegionInfo> regionsByName = new HashMap<>(regionInfos.size());
       List<HRegionInfo> parentRegions = new LinkedList<>();
       for (HRegionInfo regionInfo: regionInfos) {
         if (regionInfo.isSplitParent()) {
@@ -441,10 +439,10 @@ public class RestoreSnapshotHelper {
   private Map<String, List<SnapshotRegionManifest.StoreFile>> getRegionHFileReferences(
       final SnapshotRegionManifest manifest) {
     Map<String, List<SnapshotRegionManifest.StoreFile>> familyMap =
-      new HashMap<String, List<SnapshotRegionManifest.StoreFile>>(manifest.getFamilyFilesCount());
+      new HashMap<>(manifest.getFamilyFilesCount());
     for (SnapshotRegionManifest.FamilyFiles familyFiles: manifest.getFamilyFilesList()) {
       familyMap.put(familyFiles.getFamilyName().toStringUtf8(),
-        new ArrayList<SnapshotRegionManifest.StoreFile>(familyFiles.getStoreFilesList()));
+        new ArrayList<>(familyFiles.getStoreFilesList()));
     }
     return familyMap;
   }
@@ -489,8 +487,7 @@ public class RestoreSnapshotHelper {
       List<SnapshotRegionManifest.StoreFile> snapshotFamilyFiles =
           snapshotFiles.remove(familyDir.getName());
       if (snapshotFamilyFiles != null) {
-        List<SnapshotRegionManifest.StoreFile> hfilesToAdd =
-            new ArrayList<SnapshotRegionManifest.StoreFile>();
+        List<SnapshotRegionManifest.StoreFile> hfilesToAdd = new ArrayList<>();
         for (SnapshotRegionManifest.StoreFile storeFile: snapshotFamilyFiles) {
           if (familyFiles.contains(storeFile.getName())) {
             // HFile already present
@@ -546,7 +543,7 @@ public class RestoreSnapshotHelper {
     FileStatus[] hfiles = FSUtils.listStatus(fs, familyDir);
     if (hfiles == null) return Collections.emptySet();
 
-    Set<String> familyFiles = new HashSet<String>(hfiles.length);
+    Set<String> familyFiles = new HashSet<>(hfiles.length);
     for (int i = 0; i < hfiles.length; ++i) {
       String hfileName = hfiles[i].getPath().getName();
       familyFiles.add(hfileName);
@@ -564,8 +561,7 @@ public class RestoreSnapshotHelper {
       final List<HRegionInfo> regions) throws IOException {
     if (regions == null || regions.isEmpty()) return null;
 
-    final Map<String, HRegionInfo> snapshotRegions =
-      new HashMap<String, HRegionInfo>(regions.size());
+    final Map<String, HRegionInfo> snapshotRegions = new HashMap<>(regions.size());
 
     // clone region info (change embedded tableName with the new one)
     HRegionInfo[] clonedRegionsInfo = new HRegionInfo[regions.size()];
@@ -742,7 +738,7 @@ public class RestoreSnapshotHelper {
     synchronized (parentsMap) {
       Pair<String, String> daughters = parentsMap.get(clonedRegionName);
       if (daughters == null) {
-        daughters = new Pair<String, String>(regionName, null);
+        daughters = new Pair<>(regionName, null);
         parentsMap.put(clonedRegionName, daughters);
       } else if (!regionName.equals(daughters.getFirst())) {
         daughters.setSecond(regionName);
@@ -778,7 +774,7 @@ public class RestoreSnapshotHelper {
     FileStatus[] regionDirs = FSUtils.listStatus(fs, tableDir, new FSUtils.RegionDirFilter(fs));
     if (regionDirs == null) return null;
 
-    List<HRegionInfo> regions = new ArrayList<HRegionInfo>(regionDirs.length);
+    List<HRegionInfo> regions = new ArrayList<>(regionDirs.length);
     for (int i = 0; i < regionDirs.length; ++i) {
       HRegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDirs[i].getPath());
       regions.add(hri);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index 85d3af3..6dbd3f0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -598,8 +598,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
     Path snapshotDir = SnapshotDescriptionUtils.getSnapshotsDir(rootDir);
     FileStatus[] snapshots = fs.listStatus(snapshotDir,
         new SnapshotDescriptionUtils.CompletedSnaphotDirectoriesFilter(fs));
-    List<SnapshotDescription> snapshotLists =
-      new ArrayList<SnapshotDescription>(snapshots.length);
+    List<SnapshotDescription> snapshotLists = new ArrayList<>(snapshots.length);
     for (FileStatus snapshotDirStat: snapshots) {
       HBaseProtos.SnapshotDescription snapshotDesc =
           SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDirStat.getPath());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
index 47e3073..4e838ad 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
@@ -220,7 +220,7 @@ public final class SnapshotManifest {
       Object familyData = visitor.familyOpen(regionData, store.getFamily().getName());
       monitor.rethrowException();
 
-      List<StoreFile> storeFiles = new ArrayList<StoreFile>(store.getStorefiles());
+      List<StoreFile> storeFiles = new ArrayList<>(store.getStorefiles());
       if (LOG.isDebugEnabled()) {
         LOG.debug("Adding snapshot references for " + storeFiles  + " hfiles");
       }
@@ -305,7 +305,7 @@ public final class SnapshotManifest {
     FileStatus[] stats = FSUtils.listStatus(fs, storeDir);
     if (stats == null) return null;
 
-    ArrayList<StoreFileInfo> storeFiles = new ArrayList<StoreFileInfo>(stats.length);
+    ArrayList<StoreFileInfo> storeFiles = new ArrayList<>(stats.length);
     for (int i = 0; i < stats.length; ++i) {
       storeFiles.add(new StoreFileInfo(conf, fs, stats[i]));
     }
@@ -374,8 +374,7 @@ public final class SnapshotManifest {
             tpool.shutdown();
           }
           if (v1Regions != null && v2Regions != null) {
-            regionManifests =
-              new ArrayList<SnapshotRegionManifest>(v1Regions.size() + v2Regions.size());
+            regionManifests = new ArrayList<>(v1Regions.size() + v2Regions.size());
             regionManifests.addAll(v1Regions);
             regionManifests.addAll(v2Regions);
           } else if (v1Regions != null) {
@@ -427,8 +426,7 @@ public final class SnapshotManifest {
   public Map<String, SnapshotRegionManifest> getRegionManifestsMap() {
     if (regionManifests == null || regionManifests.isEmpty()) return null;
 
-    HashMap<String, SnapshotRegionManifest> regionsMap =
-        new HashMap<String, SnapshotRegionManifest>(regionManifests.size());
+    HashMap<String, SnapshotRegionManifest> regionsMap = new HashMap<>(regionManifests.size());
     for (SnapshotRegionManifest manifest: regionManifests) {
       String regionName = getRegionNameFromManifest(manifest);
       regionsMap.put(regionName, manifest);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
index cceeebc..46893f9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
@@ -121,7 +121,7 @@ public final class SnapshotManifestV1 {
     }
 
     final ExecutorCompletionService<SnapshotRegionManifest> completionService =
-      new ExecutorCompletionService<SnapshotRegionManifest>(executor);
+      new ExecutorCompletionService<>(executor);
     for (final FileStatus region: regions) {
       completionService.submit(new Callable<SnapshotRegionManifest>() {
         @Override
@@ -132,8 +132,7 @@ public final class SnapshotManifestV1 {
       });
     }
 
-    ArrayList<SnapshotRegionManifest> regionsManifest =
-        new ArrayList<SnapshotRegionManifest>(regions.length);
+    ArrayList<SnapshotRegionManifest> regionsManifest = new ArrayList<>(regions.length);
     try {
       for (int i = 0; i < regions.length; ++i) {
         regionsManifest.add(completionService.take().get());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
index a1341fb..567f42d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
@@ -139,7 +139,7 @@ public final class SnapshotManifestV2 {
     if (manifestFiles == null || manifestFiles.length == 0) return null;
 
     final ExecutorCompletionService<SnapshotRegionManifest> completionService =
-      new ExecutorCompletionService<SnapshotRegionManifest>(executor);
+      new ExecutorCompletionService<>(executor);
     for (final FileStatus st: manifestFiles) {
       completionService.submit(new Callable<SnapshotRegionManifest>() {
         @Override
@@ -157,8 +157,7 @@ public final class SnapshotManifestV2 {
       });
     }
 
-    ArrayList<SnapshotRegionManifest> regionsManifest =
-        new ArrayList<SnapshotRegionManifest>(manifestFiles.length);
+    ArrayList<SnapshotRegionManifest> regionsManifest = new ArrayList<>(manifestFiles.length);
     try {
       for (int i = 0; i < manifestFiles.length; ++i) {
         regionsManifest.add(completionService.take().get());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
index 8cd438e..7a2bfe6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
@@ -210,8 +210,7 @@ public final class SnapshotReferenceUtil {
       return;
     }
 
-    final ExecutorCompletionService<Void> completionService =
-      new ExecutorCompletionService<Void>(exec);
+    final ExecutorCompletionService<Void> completionService = new ExecutorCompletionService<>(exec);
 
     for (final SnapshotRegionManifest regionManifest : regionManifests) {
       completionService.submit(new Callable<Void>() {
@@ -345,7 +344,7 @@ public final class SnapshotReferenceUtil {
   private static Set<String> getHFileNames(final Configuration conf, final FileSystem fs,
       final Path snapshotDir, final SnapshotDescription snapshotDesc)
       throws IOException {
-    final Set<String> names = new HashSet<String>();
+    final Set<String> names = new HashSet<>();
     visitTableStoreFiles(conf, fs, snapshotDir, snapshotDesc, new StoreFileVisitor() {
       @Override
       public void storeFile(final HRegionInfo regionInfo, final String family,

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
index 73160bc..ee93cdb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
@@ -140,8 +140,8 @@ public final class Canary implements Tool {
     private AtomicLong readFailureCount = new AtomicLong(0),
         writeFailureCount = new AtomicLong(0);
 
-    private Map<String, String> readFailures = new ConcurrentHashMap<String, String>();
-    private Map<String, String> writeFailures = new ConcurrentHashMap<String, String>();
+    private Map<String, String> readFailures = new ConcurrentHashMap<>();
+    private Map<String, String> writeFailures = new ConcurrentHashMap<>();
 
     @Override
     public long getReadFailureCount() {
@@ -949,7 +949,7 @@ public final class Canary implements Tool {
     public void run() {
       if (this.initAdmin()) {
         try {
-          List<Future<Void>> taskFutures = new LinkedList<Future<Void>>();
+          List<Future<Void>> taskFutures = new LinkedList<>();
           if (this.targets != null && this.targets.length > 0) {
             String[] tables = generateMonitorTables(this.targets);
             this.initialized = true;
@@ -996,7 +996,7 @@ public final class Canary implements Tool {
       if (this.useRegExp) {
         Pattern pattern = null;
         HTableDescriptor[] tds = null;
-        Set<String> tmpTables = new TreeSet<String>();
+        Set<String> tmpTables = new TreeSet<>();
         try {
           if (LOG.isDebugEnabled()) {
             LOG.debug(String.format("reading list of tables"));
@@ -1040,7 +1040,7 @@ public final class Canary implements Tool {
       if (LOG.isDebugEnabled()) {
         LOG.debug(String.format("reading list of tables"));
       }
-      List<Future<Void>> taskFutures = new LinkedList<Future<Void>>();
+      List<Future<Void>> taskFutures = new LinkedList<>();
       for (HTableDescriptor table : admin.listTables()) {
         if (admin.isTableEnabled(table.getTableName())
             && (!table.getTableName().equals(writeTableName))) {
@@ -1078,7 +1078,7 @@ public final class Canary implements Tool {
         admin.deleteTable(writeTableName);
         createWriteTable(numberOfServers);
       }
-      HashSet<ServerName> serverSet = new HashSet<ServerName>();
+      HashSet<ServerName> serverSet = new HashSet<>();
       for (Pair<HRegionInfo, ServerName> pair : pairs) {
         serverSet.add(pair.getSecond());
       }
@@ -1165,7 +1165,7 @@ public final class Canary implements Tool {
     } else {
       LOG.warn(String.format("Table %s is not enabled", tableName));
     }
-    return new LinkedList<Future<Void>>();
+    return new LinkedList<>();
   }
 
   /*
@@ -1183,7 +1183,7 @@ public final class Canary implements Tool {
     try {
       table = admin.getConnection().getTable(tableDesc.getTableName());
     } catch (TableNotFoundException e) {
-      return new ArrayList<Future<Void>>();
+      return new ArrayList<>();
     }
     finally {
       if (table !=null) {
@@ -1191,7 +1191,7 @@ public final class Canary implements Tool {
       }
     }
 
-    List<RegionTask> tasks = new ArrayList<RegionTask>();
+    List<RegionTask> tasks = new ArrayList<>();
     RegionLocator regionLocator = null;
     try {
       regionLocator = admin.getConnection().getRegionLocator(tableDesc.getTableName());
@@ -1290,7 +1290,7 @@ public final class Canary implements Tool {
     }
 
     private boolean checkNoTableNames() {
-      List<String> foundTableNames = new ArrayList<String>();
+      List<String> foundTableNames = new ArrayList<>();
       TableName[] tableNames = null;
 
       if (LOG.isDebugEnabled()) {
@@ -1323,8 +1323,8 @@ public final class Canary implements Tool {
     }
 
     private void monitorRegionServers(Map<String, List<HRegionInfo>> rsAndRMap) {
-      List<RegionServerTask> tasks = new ArrayList<RegionServerTask>();
-      Map<String, AtomicLong> successMap = new HashMap<String, AtomicLong>();
+      List<RegionServerTask> tasks = new ArrayList<>();
+      Map<String, AtomicLong> successMap = new HashMap<>();
       Random rand = new Random();
       for (Map.Entry<String, List<HRegionInfo>> entry : rsAndRMap.entrySet()) {
         String serverName = entry.getKey();
@@ -1379,7 +1379,7 @@ public final class Canary implements Tool {
     }
 
     private Map<String, List<HRegionInfo>> getAllRegionServerByName() {
-      Map<String, List<HRegionInfo>> rsAndRMap = new HashMap<String, List<HRegionInfo>>();
+      Map<String, List<HRegionInfo>> rsAndRMap = new HashMap<>();
       Table table = null;
       RegionLocator regionLocator = null;
       try {
@@ -1400,7 +1400,7 @@ public final class Canary implements Tool {
             if (rsAndRMap.containsKey(rsName)) {
               regions = rsAndRMap.get(rsName);
             } else {
-              regions = new ArrayList<HRegionInfo>();
+              regions = new ArrayList<>();
               rsAndRMap.put(rsName, regions);
             }
             regions.add(r);
@@ -1438,7 +1438,7 @@ public final class Canary implements Tool {
       Map<String, List<HRegionInfo>> filteredRsAndRMap = null;
 
       if (this.targets != null && this.targets.length > 0) {
-        filteredRsAndRMap = new HashMap<String, List<HRegionInfo>>();
+        filteredRsAndRMap = new HashMap<>();
         Pattern pattern = null;
         Matcher matcher = null;
         boolean regExpFound = false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java
index 4a93151..354382c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java
@@ -152,7 +152,7 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
    */
   public BoundedPriorityBlockingQueue(int capacity,
       Comparator<? super E> comparator) {
-    this.queue = new PriorityQueue<E>(capacity, comparator);
+    this.queue = new PriorityQueue<>(capacity, comparator);
   }
 
   public boolean offer(E e) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java
index 3f05969..9e36290 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java
@@ -66,7 +66,7 @@ public class CollectionBackedScanner extends NonReversedNonLazyKeyValueScanner {
       Cell... array) {
     this.comparator = comparator;
 
-    List<Cell> tmp = new ArrayList<Cell>(array.length);
+    List<Cell> tmp = new ArrayList<>(array.length);
     Collections.addAll(tmp, array);
     Collections.sort(tmp, comparator);
     data = tmp;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
index 0659a0d..87e867f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java
@@ -50,9 +50,8 @@ import org.apache.commons.logging.LogFactory;
 public class ConnectionCache {
   private static final Log LOG = LogFactory.getLog(ConnectionCache.class);
 
-  private final Map<String, ConnectionInfo>
-   connections = new ConcurrentHashMap<String, ConnectionInfo>();
-  private final KeyLocker<String> locker = new KeyLocker<String>();
+  private final Map<String, ConnectionInfo> connections = new ConcurrentHashMap<>();
+  private final KeyLocker<String> locker = new KeyLocker<>();
   private final String realUserName;
   private final UserGroupInformation realUser;
   private final UserProvider userProvider;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java
index f45ecff..6692ee8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java
@@ -40,10 +40,9 @@ import org.apache.hadoop.hbase.security.EncryptionUtil;
 public class EncryptionTest {
   private static final Log LOG = LogFactory.getLog(EncryptionTest.class);
 
-  static final Map<String, Boolean> keyProviderResults = new ConcurrentHashMap<String, Boolean>();
-  static final Map<String, Boolean> cipherProviderResults =
-    new ConcurrentHashMap<String, Boolean>();
-  static final Map<String, Boolean> cipherResults = new ConcurrentHashMap<String, Boolean>();
+  static final Map<String, Boolean> keyProviderResults = new ConcurrentHashMap<>();
+  static final Map<String, Boolean> cipherProviderResults = new ConcurrentHashMap<>();
+  static final Map<String, Boolean> cipherResults = new ConcurrentHashMap<>();
 
   private EncryptionTest() {
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
index 0d880d0..de49d38 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
@@ -58,7 +58,7 @@ public class FSHDFSUtils extends FSUtils {
    */
   private static Set<InetSocketAddress> getNNAddresses(DistributedFileSystem fs,
                                                       Configuration conf) {
-    Set<InetSocketAddress> addresses = new HashSet<InetSocketAddress>();
+    Set<InetSocketAddress> addresses = new HashSet<>();
     String serviceName = fs.getCanonicalServiceName();
 
     if (serviceName.startsWith("ha-hdfs")) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
index b0af52b..0bc8783 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
@@ -72,7 +72,7 @@ class FSRegionScanner implements Runnable {
   public void run() {
     try {
       // empty the map for each region
-      Map<String, AtomicInteger> blockCountMap = new HashMap<String, AtomicInteger>();
+      Map<String, AtomicInteger> blockCountMap = new HashMap<>();
 
       //get table name
       String tableName = regionPath.getParent().getName();
@@ -145,7 +145,7 @@ class FSRegionScanner implements Runnable {
       }
 
       if (regionDegreeLocalityMapping != null && totalBlkCount > 0) {
-        Map<String, Float> hostLocalityMap = new HashMap<String, Float>();
+        Map<String, Float> hostLocalityMap = new HashMap<>();
         for (Map.Entry<String, AtomicInteger> entry : blockCountMap.entrySet()) {
           String host = entry.getKey();
           if (host.endsWith(".")) {