You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/03/07 19:23:32 UTC

[19/22] hbase git commit: HBASE-17532 Replaced explicit type with diamond operator

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
index c3635cb..bc51440 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java
@@ -130,13 +130,13 @@ public class CoprocessorClassLoader extends ClassLoaderBase {
   /**
    * A locker used to synchronize class loader initialization per coprocessor jar file
    */
-  private static final KeyLocker<String> locker = new KeyLocker<String>();
+  private static final KeyLocker<String> locker = new KeyLocker<>();
 
   /**
    * A set used to synchronized parent path clean up.  Generally, there
    * should be only one parent path, but using a set so that we can support more.
    */
-  static final HashSet<String> parentDirLockSet = new HashSet<String>();
+  static final HashSet<String> parentDirLockSet = new HashSet<>();
 
   /**
    * Creates a JarClassLoader that loads classes from the given paths.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Counter.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Counter.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Counter.java
index 0d3a5c6..36ca7ad 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Counter.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Counter.java
@@ -94,7 +94,7 @@ public class Counter {
   }
 
   private Counter(Cell initCell) {
-    containerRef = new AtomicReference<Container>(new Container(initCell));
+    containerRef = new AtomicReference<>(new Container(initCell));
   }
 
   private static int hash() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
index 595cc5b..1a73069 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
@@ -102,7 +102,7 @@ public class DynamicClassLoader extends ClassLoaderBase {
   // FindBugs: Making synchronized to avoid IS2_INCONSISTENT_SYNC complaints about
   // remoteDirFs and jarModifiedTime being part synchronized protected.
   private synchronized void initTempDir(final Configuration conf) {
-    jarModifiedTime = new HashMap<String, Long>();
+    jarModifiedTime = new HashMap<>();
     String localDirPath = conf.get(
       LOCAL_DIR_KEY, DEFAULT_LOCAL_DIR) + DYNAMIC_JARS_DIR;
     localDir = new File(localDirPath);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IterableUtils.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IterableUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IterableUtils.java
index 41e837d..862da43 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IterableUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IterableUtils.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 public class IterableUtils {
 
   private static final List<Object> EMPTY_LIST = Collections
-      .unmodifiableList(new ArrayList<Object>(0));
+      .unmodifiableList(new ArrayList<>(0));
 
   @SuppressWarnings("unchecked")
   public static <T> Iterable<T> nullSafe(Iterable<T> in) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/KeyLocker.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/KeyLocker.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/KeyLocker.java
index 05bd66d..6acf584 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/KeyLocker.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/KeyLocker.java
@@ -49,7 +49,7 @@ public class KeyLocker<K> {
   private static final int NB_CONCURRENT_LOCKS = 1000;
 
   private final WeakObjectPool<K, ReentrantLock> lockPool =
-      new WeakObjectPool<K, ReentrantLock>(
+      new WeakObjectPool<>(
           new WeakObjectPool.ObjectFactory<K, ReentrantLock>() {
             @Override
             public ReentrantLock createObject(K key) {
@@ -85,7 +85,7 @@ public class KeyLocker<K> {
     Arrays.sort(keyArray);
 
     lockPool.purge();
-    Map<K, Lock> locks = new LinkedHashMap<K, Lock>(keyArray.length);
+    Map<K, Lock> locks = new LinkedHashMap<>(keyArray.length);
     for (Object o : keyArray) {
       @SuppressWarnings("unchecked")
       K key = (K)o;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java
index 159924f..719d1ee 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java
@@ -64,7 +64,7 @@ public class Pair<T1, T2> implements Serializable
    * @return a new pair containing the passed arguments
    */
   public static <T1,T2> Pair<T1,T2> newPair(T1 a, T2 b) {
-    return new Pair<T1,T2>(a, b);
+    return new Pair<>(a, b);
   }
   
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
index 21b376c..279ce95 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
@@ -191,7 +191,7 @@ public class Threads {
       ThreadFactory threadFactory) {
     ThreadPoolExecutor boundedCachedThreadPool =
       new ThreadPoolExecutor(maxCachedThread, maxCachedThread, timeout,
-        unit, new LinkedBlockingQueue<Runnable>(), threadFactory);
+        unit, new LinkedBlockingQueue<>(), threadFactory);
     // allow the core pool threads timeout and terminate
     boundedCachedThreadPool.allowCoreThreadTimeOut(true);
     return boundedCachedThreadPool;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
index 1de6bee..9ee0ab5 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
@@ -41,7 +41,7 @@ public class Triple<A, B, C> {
 
   // ctor cannot infer types w/o warning but a method can.
   public static <A, B, C> Triple<A, B, C> create(A first, B second, C third) {
-    return new Triple<A, B, C>(first, second, third);
+    return new Triple<>(first, second, third);
   }
 
   public int hashCode() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeakObjectPool.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeakObjectPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeakObjectPool.java
index 7757c6c..478864b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeakObjectPool.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/WeakObjectPool.java
@@ -50,7 +50,7 @@ public class WeakObjectPool<K, V> {
     V createObject(K key);
   }
 
-  private final ReferenceQueue<V> staleRefQueue = new ReferenceQueue<V>();
+  private final ReferenceQueue<V> staleRefQueue = new ReferenceQueue<>();
 
   private class ObjectReference extends WeakReference<V> {
     final K key;
@@ -126,8 +126,7 @@ public class WeakObjectPool<K, V> {
     }
     this.objectFactory = objectFactory;
 
-    this.referenceCache = new ConcurrentHashMap<K, ObjectReference>(
-        initialCapacity, 0.75f, concurrencyLevel);
+    this.referenceCache = new ConcurrentHashMap<>(initialCapacity, 0.75f, concurrencyLevel);
     // 0.75f is the default load factor threshold of ConcurrentHashMap.
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
index c73705a..87d56a9 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
@@ -171,7 +171,7 @@ public class RedundantKVGenerator {
 
   private List<byte[]> generateRows() {
     // generate prefixes
-    List<byte[]> prefixes = new ArrayList<byte[]>();
+    List<byte[]> prefixes = new ArrayList<>();
     prefixes.add(new byte[0]);
     for (int i = 1; i < numberOfRowPrefixes; ++i) {
       int prefixLength = averagePrefixLength;
@@ -184,7 +184,7 @@ public class RedundantKVGenerator {
     }
 
     // generate rest of the row
-    List<byte[]> rows = new ArrayList<byte[]>();
+    List<byte[]> rows = new ArrayList<>();
     for (int i = 0; i < numberOfRows; ++i) {
       int suffixLength = averageSuffixLength;
       suffixLength += randomizer.nextInt(2 * suffixLengthVariance + 1) -
@@ -213,10 +213,10 @@ public class RedundantKVGenerator {
    * @return sorted list of key values
    */
   public List<KeyValue> generateTestKeyValues(int howMany, boolean useTags) {
-    List<KeyValue> result = new ArrayList<KeyValue>();
+    List<KeyValue> result = new ArrayList<>();
 
     List<byte[]> rows = generateRows();
-    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<Integer, List<byte[]>>();
+    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
 
     if(family==null){
       family = new byte[columnFamilyLength];
@@ -249,7 +249,7 @@ public class RedundantKVGenerator {
 
         // add it to map
         if (!rowsToQualifier.containsKey(rowId)) {
-          rowsToQualifier.put(rowId, new ArrayList<byte[]>());
+          rowsToQualifier.put(rowId, new ArrayList<>());
         }
         rowsToQualifier.get(rowId).add(qualifier);
       } else if (qualifierChance > chanceForSameQualifier) {
@@ -299,9 +299,9 @@ public class RedundantKVGenerator {
    * @return sorted list of key values
    */
   public List<Cell> generateTestExtendedOffheapKeyValues(int howMany, boolean useTags) {
-    List<Cell> result = new ArrayList<Cell>();
+    List<Cell> result = new ArrayList<>();
     List<byte[]> rows = generateRows();
-    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<Integer, List<byte[]>>();
+    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
 
     if (family == null) {
       family = new byte[columnFamilyLength];
@@ -334,7 +334,7 @@ public class RedundantKVGenerator {
 
         // add it to map
         if (!rowsToQualifier.containsKey(rowId)) {
-          rowsToQualifier.put(rowId, new ArrayList<byte[]>());
+          rowsToQualifier.put(rowId, new ArrayList<>());
         }
         rowsToQualifier.get(rowId).add(qualifier);
       } else if (qualifierChance > chanceForSameQualifier) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
index 01d387c..0aa30ee 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
@@ -148,8 +148,8 @@ public class ClassFinder {
     final Pattern jarResourceRe = Pattern.compile("^file:(.+\\.jar)!/" + path + "$");
 
     Enumeration<URL> resources = ClassLoader.getSystemClassLoader().getResources(path);
-    List<File> dirs = new ArrayList<File>();
-    List<String> jars = new ArrayList<String>();
+    List<File> dirs = new ArrayList<>();
+    List<String> jars = new ArrayList<>();
 
     while (resources.hasMoreElements()) {
       URL resource = resources.nextElement();
@@ -168,7 +168,7 @@ public class ClassFinder {
       }
     }
 
-    Set<Class<?>> classes = new HashSet<Class<?>>();
+    Set<Class<?>> classes = new HashSet<>();
     for (File directory : dirs) {
       classes.addAll(findClassesFromFiles(directory, packageName, proceedOnExceptions));
     }
@@ -189,7 +189,7 @@ public class ClassFinder {
       throw ioEx;
     }
 
-    Set<Class<?>> classes = new HashSet<Class<?>>();
+    Set<Class<?>> classes = new HashSet<>();
     JarEntry entry = null;
     try {
       while (true) {
@@ -236,7 +236,7 @@ public class ClassFinder {
 
   private Set<Class<?>> findClassesFromFiles(File baseDirectory, String packageName,
       boolean proceedOnExceptions) throws ClassNotFoundException, LinkageError {
-    Set<Class<?>> classes = new HashSet<Class<?>>();
+    Set<Class<?>> classes = new HashSet<>();
     if (!baseDirectory.exists()) {
       LOG.warn(baseDirectory.getAbsolutePath() + " does not exist");
       return classes;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java
index 310a2fb..c0b9836 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java
@@ -94,7 +94,7 @@ public class ResourceChecker {
     public List<String> getStringsToLog() { return null; }
   }
 
-  private List<ResourceAnalyzer> ras = new ArrayList<ResourceAnalyzer>();
+  private List<ResourceAnalyzer> ras = new ArrayList<>();
   private int[] initialValues;
   private int[] endingValues;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java
index 6264a5e..751b9e3 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java
@@ -41,10 +41,10 @@ import org.junit.runner.notification.RunListener;
  * When surefire forkMode=once/always/perthread, this code is executed on the forked process.
  */
 public class ResourceCheckerJUnitListener extends RunListener {
-  private Map<String, ResourceChecker> rcs = new ConcurrentHashMap<String, ResourceChecker>();
+  private Map<String, ResourceChecker> rcs = new ConcurrentHashMap<>();
 
   static class ThreadResourceAnalyzer extends ResourceChecker.ResourceAnalyzer {
-    private static Set<String> initialThreadNames = new HashSet<String>();
+    private static Set<String> initialThreadNames = new HashSet<>();
     private static List<String> stringsToLog = null;
 
     @Override
@@ -57,7 +57,7 @@ public class ResourceCheckerJUnitListener extends RunListener {
         }
       } else if (phase == Phase.END) {
         if (stackTraces.size() > initialThreadNames.size()) {
-          stringsToLog = new ArrayList<String>();
+          stringsToLog = new ArrayList<>();
           for (Thread t : stackTraces.keySet()) {
             if (!initialThreadNames.contains(t.getName())) {
               stringsToLog.add("\nPotentially hanging thread: " + t.getName() + "\n");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
index 41a011d..441d1b5 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
@@ -215,13 +215,13 @@ public class TestCellUtil {
     consume(doCreateCellArray(1), 1);
     consume(doCreateCellArray(0), 0);
     consume(doCreateCellArray(3), 3);
-    List<CellScannable> cells = new ArrayList<CellScannable>(hundredK);
+    List<CellScannable> cells = new ArrayList<>(hundredK);
     for (int i = 0; i < hundredK; i++) {
       cells.add(new TestCellScannable(1));
     }
     consume(CellUtil.createCellScanner(cells), hundredK * 1);
-    NavigableMap<byte [], List<Cell>> m = new TreeMap<byte [], List<Cell>>(Bytes.BYTES_COMPARATOR);
-    List<Cell> cellArray = new ArrayList<Cell>(hundredK);
+    NavigableMap<byte [], List<Cell>> m = new TreeMap<>(Bytes.BYTES_COMPARATOR);
+    List<Cell> cellArray = new ArrayList<>(hundredK);
     for (int i = 0; i < hundredK; i++) cellArray.add(new TestCell(i));
     m.put(new byte [] {'f'}, cellArray);
     consume(CellUtil.createCellScanner(m), hundredK * 1);
@@ -237,7 +237,7 @@ public class TestCellUtil {
 
   private CellScanner doCreateCellScanner(final int listsCount, final int itemsPerList)
   throws IOException {
-    List<CellScannable> cells = new ArrayList<CellScannable>(listsCount);
+    List<CellScannable> cells = new ArrayList<>(listsCount);
     for (int i = 0; i < listsCount; i++) {
       CellScannable cs = new CellScannable() {
         @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
index 5154810..244c267 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
@@ -381,7 +381,7 @@ public class TestClassFinder {
     // Directory entries for all packages have to be added explicitly for
     // resources to be findable via ClassLoader. Directory entries must end
     // with "/"; the initial one is expected to, also.
-    Set<String> pathsInJar = new HashSet<String>();
+    Set<String> pathsInJar = new HashSet<>();
     for (FileAndPath fileAndPath : filesInJar) {
       String pathToAdd = fileAndPath.path;
       while (pathsInJar.add(pathToAdd)) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCompoundConfiguration.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCompoundConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCompoundConfiguration.java
index 57409b6..0a0a1d2 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCompoundConfiguration.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCompoundConfiguration.java
@@ -121,8 +121,7 @@ public class TestCompoundConfiguration extends TestCase {
 
   @Test
   public void testWithIbwMap() {
-    Map<Bytes, Bytes> map =
-      new HashMap<Bytes, Bytes>();
+    Map<Bytes, Bytes> map = new HashMap<>();
     map.put(strToIb("B"), strToIb("2b"));
     map.put(strToIb("C"), strToIb("33"));
     map.put(strToIb("D"), strToIb("4"));
@@ -162,7 +161,7 @@ public class TestCompoundConfiguration extends TestCase {
 
   @Test
   public void testWithStringMap() {
-    Map<String, String> map = new HashMap<String, String>();
+    Map<String, String> map = new HashMap<>();
     map.put("B", "2b");
     map.put("C", "33");
     map.put("D", "4");
@@ -199,10 +198,10 @@ public class TestCompoundConfiguration extends TestCase {
 
   @Test
   public void testLaterConfigsOverrideEarlier() {
-    Map<String, String> map1 = new HashMap<String, String>();
+    Map<String, String> map1 = new HashMap<>();
     map1.put("A", "2");
     map1.put("D", "5");
-    Map<String, String> map2 = new HashMap<String, String>();
+    Map<String, String> map2 = new HashMap<>();
     String newValueForA = "3", newValueForB = "4";
     map2.put("A", newValueForA);
     map2.put("B", newValueForB);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
index 4e0090d..562c008 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
@@ -222,7 +222,7 @@ public class TestKeyValue extends TestCase {
   }
 
   public void testBinaryKeys() throws Exception {
-    Set<KeyValue> set = new TreeSet<KeyValue>(CellComparator.COMPARATOR);
+    Set<KeyValue> set = new TreeSet<>(CellComparator.COMPARATOR);
     final byte [] fam = Bytes.toBytes("col");
     final byte [] qf = Bytes.toBytes("umn");
     final byte [] nb = new byte[0];
@@ -248,7 +248,7 @@ public class TestKeyValue extends TestCase {
     }
     assertTrue(assertion);
     // Make set with good comparator
-    set = new TreeSet<KeyValue>(CellComparator.META_COMPARATOR);
+    set = new TreeSet<>(CellComparator.META_COMPARATOR);
     Collections.addAll(set, keys);
     count = 0;
     for (KeyValue k: set) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestBoundedByteBufferPool.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestBoundedByteBufferPool.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestBoundedByteBufferPool.java
index 8775d7f..eca7712 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestBoundedByteBufferPool.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestBoundedByteBufferPool.java
@@ -93,7 +93,7 @@ public class TestBoundedByteBufferPool {
 
   @Test
   public void testBufferSizeGrowWithMultiThread() throws Exception {
-    final ConcurrentLinkedDeque<ByteBuffer> bufferQueue = new ConcurrentLinkedDeque<ByteBuffer>();
+    final ConcurrentLinkedDeque<ByteBuffer> bufferQueue = new ConcurrentLinkedDeque<>();
     int takeBufferThreadsCount = 30;
     int putBufferThreadsCount = 1;
     Thread takeBufferThreads[] = new Thread[takeBufferThreadsCount];

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
index 5e609ad..dddd9e7 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
@@ -150,7 +150,7 @@ public class TestTagCompressionContext {
   }
 
   private KeyValue createKVWithTags(int noOfTags) {
-    List<Tag> tags = new ArrayList<Tag>();
+    List<Tag> tags = new ArrayList<>();
     for (int i = 0; i < noOfTags; i++) {
       tags.add(new ArrayBackedTag((byte) i, "tagValue" + i));
     }
@@ -159,7 +159,7 @@ public class TestTagCompressionContext {
   }
 
   private Cell createOffheapKVWithTags(int noOfTags) {
-    List<Tag> tags = new ArrayList<Tag>();
+    List<Tag> tags = new ArrayList<>();
     for (int i = 0; i < noOfTags; i++) {
       tags.add(new ArrayBackedTag((byte) i, "tagValue" + i));
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java
index af4c464..48922d9 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/nio/TestMultiByteBuff.java
@@ -243,7 +243,7 @@ public class TestMultiByteBuff {
     assertFalse(bb2 == sub);
     assertEquals(l2, ByteBufferUtils.toLong(sub, sub.position()));
     multi.rewind();
-    ObjectIntPair<ByteBuffer> p = new ObjectIntPair<ByteBuffer>();
+    ObjectIntPair<ByteBuffer> p = new ObjectIntPair<>();
     multi.asSubByteBuffer(8, Bytes.SIZEOF_LONG, p);
     assertFalse(bb1 == p.getFirst());
     assertFalse(bb2 == p.getFirst());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestFixedLengthWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestFixedLengthWrapper.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestFixedLengthWrapper.java
index b259429..c2c5a6d 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestFixedLengthWrapper.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestFixedLengthWrapper.java
@@ -51,7 +51,7 @@ public class TestFixedLengthWrapper {
       for (Order ord : new Order[] { Order.ASCENDING, Order.DESCENDING }) {
         for (byte[] val : VALUES) {
           buff.setPosition(0);
-          DataType<byte[]> type = new FixedLengthWrapper<byte[]>(new RawBytes(ord), limit);
+          DataType<byte[]> type = new FixedLengthWrapper<>(new RawBytes(ord), limit);
           assertEquals(limit, type.encode(buff, val));
           buff.setPosition(0);
           byte[] actual = type.decode(buff);
@@ -67,21 +67,21 @@ public class TestFixedLengthWrapper {
   @Test(expected = IllegalArgumentException.class)
   public void testInsufficientRemainingRead() {
     PositionedByteRange buff = new SimplePositionedMutableByteRange(0);
-    DataType<byte[]> type = new FixedLengthWrapper<byte[]>(new RawBytes(), 3);
+    DataType<byte[]> type = new FixedLengthWrapper<>(new RawBytes(), 3);
     type.decode(buff);
   }
 
   @Test(expected = IllegalArgumentException.class)
   public void testInsufficientRemainingWrite() {
     PositionedByteRange buff = new SimplePositionedMutableByteRange(0);
-    DataType<byte[]> type = new FixedLengthWrapper<byte[]>(new RawBytes(), 3);
+    DataType<byte[]> type = new FixedLengthWrapper<>(new RawBytes(), 3);
     type.encode(buff, Bytes.toBytes(""));
   }
 
   @Test(expected = IllegalArgumentException.class)
   public void testOverflowPassthrough() {
     PositionedByteRange buff = new SimplePositionedMutableByteRange(3);
-    DataType<byte[]> type = new FixedLengthWrapper<byte[]>(new RawBytes(), 0);
+    DataType<byte[]> type = new FixedLengthWrapper<>(new RawBytes(), 0);
     type.encode(buff, Bytes.toBytes("foo"));
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStructNullExtension.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStructNullExtension.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStructNullExtension.java
index e87438d..2b2efe6 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStructNullExtension.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStructNullExtension.java
@@ -58,7 +58,7 @@ public class TestStructNullExtension {
     Struct shorter = builder.toStruct();
     Struct longer = builder
         // intentionally include a wrapped instance to test wrapper behavior.
-        .add(new TerminatedWrapper<String>(OrderedString.ASCENDING, "/"))
+        .add(new TerminatedWrapper<>(OrderedString.ASCENDING, "/"))
         .add(OrderedNumeric.ASCENDING)
         .toStruct();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestTerminatedWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestTerminatedWrapper.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestTerminatedWrapper.java
index e36a141..310067b 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestTerminatedWrapper.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestTerminatedWrapper.java
@@ -47,7 +47,7 @@ public class TestTerminatedWrapper {
 
   @Test(expected = IllegalArgumentException.class)
   public void testEmptyDelimiter() {
-    new TerminatedWrapper<byte[]>(new RawBytes(), "");
+    new TerminatedWrapper<>(new RawBytes(), "");
   }
 
   @Test(expected = IllegalArgumentException.class)
@@ -58,7 +58,7 @@ public class TestTerminatedWrapper {
 
   @Test(expected = IllegalArgumentException.class)
   public void testEncodedValueContainsTerm() {
-    DataType<byte[]> type = new TerminatedWrapper<byte[]>(new RawBytes(), "foo");
+    DataType<byte[]> type = new TerminatedWrapper<>(new RawBytes(), "foo");
     PositionedByteRange buff = new SimplePositionedMutableByteRange(16);
     type.encode(buff, Bytes.toBytes("hello foobar!"));
   }
@@ -72,7 +72,7 @@ public class TestTerminatedWrapper {
       for (byte[] term : TERMINATORS) {
         for (String val : VALUES_STRINGS) {
           buff.setPosition(0);
-          DataType<String> type = new TerminatedWrapper<String>(t, term);
+          DataType<String> type = new TerminatedWrapper<>(t, term);
           assertEquals(val.length() + 2 + term.length, type.encode(buff, val));
           buff.setPosition(0);
           assertEquals(val, type.decode(buff));
@@ -89,7 +89,7 @@ public class TestTerminatedWrapper {
       for (byte[] term : TERMINATORS) {
         for (byte[] val : VALUES_BYTES) {
           buff.setPosition(0);
-          DataType<byte[]> type = new TerminatedWrapper<byte[]>(new RawBytes(ord), term);
+          DataType<byte[]> type = new TerminatedWrapper<>(new RawBytes(ord), term);
           assertEquals(val.length + term.length, type.encode(buff, val));
           buff.setPosition(0);
           assertArrayEquals(val, type.decode(buff));
@@ -108,7 +108,7 @@ public class TestTerminatedWrapper {
       for (byte[] term : TERMINATORS) {
         for (String val : VALUES_STRINGS) {
           buff.setPosition(0);
-          DataType<String> type = new TerminatedWrapper<String>(t, term);
+          DataType<String> type = new TerminatedWrapper<>(t, term);
           int expected = val.length() + 2 + term.length;
           assertEquals(expected, type.encode(buff, val));
           buff.setPosition(0);
@@ -126,7 +126,7 @@ public class TestTerminatedWrapper {
       for (byte[] term : TERMINATORS) {
         for (byte[] val : VALUES_BYTES) {
           buff.setPosition(0);
-          DataType<byte[]> type = new TerminatedWrapper<byte[]>(new RawBytes(ord), term);
+          DataType<byte[]> type = new TerminatedWrapper<>(new RawBytes(ord), term);
           int expected = type.encode(buff, val);
           buff.setPosition(0);
           assertEquals(expected, type.skip(buff));
@@ -139,7 +139,7 @@ public class TestTerminatedWrapper {
   @Test(expected = IllegalArgumentException.class)
   public void testInvalidSkip() {
     PositionedByteRange buff = new SimplePositionedMutableByteRange(Bytes.toBytes("foo"));
-    DataType<byte[]> type = new TerminatedWrapper<byte[]>(new RawBytes(), new byte[] { 0x00 });
+    DataType<byte[]> type = new TerminatedWrapper<>(new RawBytes(), new byte[] { 0x00 });
     type.skip(buff);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
index 30e33d9..ba6cea0 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
@@ -133,13 +133,13 @@ public class ClassLoaderTestHelper {
 
     // compile it by JavaCompiler
     JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
-    ArrayList<String> srcFileNames = new ArrayList<String>(1);
+    ArrayList<String> srcFileNames = new ArrayList<>(1);
     srcFileNames.add(sourceCodeFile.toString());
     StandardJavaFileManager fm = compiler.getStandardFileManager(null, null,
       null);
     Iterable<? extends JavaFileObject> cu =
       fm.getJavaFileObjects(sourceCodeFile);
-    List<String> options = new ArrayList<String>(2);
+    List<String> options = new ArrayList<>(2);
     options.add("-classpath");
     // only add hbase classes to classpath. This is a little bit tricky: assume
     // the classpath is {hbaseSrc}/target/classes.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java
index 3c7b680..554e108 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestAvlUtil.java
@@ -48,7 +48,7 @@ public class TestAvlUtil {
     final int MAX_KEY = 99999999;
     final int NELEM = 10000;
 
-    final TreeMap<Integer, Object> treeMap = new TreeMap<Integer, Object>();
+    final TreeMap<Integer, Object> treeMap = new TreeMap<>();
     TestAvlNode root = null;
 
     final Random rand = new Random();
@@ -117,7 +117,7 @@ public class TestAvlUtil {
       root = AvlTree.insert(root, new TestAvlNode(i));
     }
 
-    AvlTreeIterator<TestAvlNode> iter = new AvlTreeIterator<TestAvlNode>(root);
+    AvlTreeIterator<TestAvlNode> iter = new AvlTreeIterator<>(root);
     assertTrue(iter.hasNext());
     long prevKey = 0;
     while (iter.hasNext()) {
@@ -139,7 +139,7 @@ public class TestAvlUtil {
     }
 
     for (int i = MIN_KEY - 1; i < MAX_KEY + 1; ++i) {
-      AvlTreeIterator<TestAvlNode> iter = new AvlTreeIterator<TestAvlNode>(root, i, KEY_COMPARATOR);
+      AvlTreeIterator<TestAvlNode> iter = new AvlTreeIterator<>(root, i, KEY_COMPARATOR);
       if (i < MAX_KEY) {
         assertTrue(iter.hasNext());
       } else {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java
index 09ef707..7c74bca 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBase64.java
@@ -55,7 +55,7 @@ public class TestBase64 extends TestCase {
    * @throws UnsupportedEncodingException
    */
   public void testBase64() throws UnsupportedEncodingException {
-    TreeMap<String, String> sorted = new TreeMap<String, String>();
+    TreeMap<String, String> sorted = new TreeMap<>();
 
     for (int i = 0; i < uris.length; i++) {
       byte[] bytes = uris[i].getBytes("UTF-8");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBoundedArrayQueue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBoundedArrayQueue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBoundedArrayQueue.java
index 2cc3751..6d9c496 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBoundedArrayQueue.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBoundedArrayQueue.java
@@ -31,7 +31,7 @@ import org.junit.experimental.categories.Category;
 public class TestBoundedArrayQueue {
 
   private int qMaxElements = 5;
-  private BoundedArrayQueue<Integer> queue = new BoundedArrayQueue<Integer>(qMaxElements);
+  private BoundedArrayQueue<Integer> queue = new BoundedArrayQueue<>(qMaxElements);
 
   @Test
   public void testBoundedArrayQueueOperations() throws Exception {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
index e94293c..b78574a 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
@@ -77,7 +77,7 @@ public class TestByteBufferUtils {
   }
 
   static {
-    SortedSet<Long> a = new TreeSet<Long>();
+    SortedSet<Long> a = new TreeSet<>();
     for (int i = 0; i <= 63; ++i) {
       long v = (-1L) << i;
       assertTrue(v < 0);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java
index 717e24c..8ae2a29 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java
@@ -64,7 +64,7 @@ public class TestByteRangeWithKVSerialization {
     final byte[] QUALIFIER = Bytes.toBytes("q1");
     final byte[] VALUE = Bytes.toBytes("v");
     int kvCount = 1000000;
-    List<KeyValue> kvs = new ArrayList<KeyValue>(kvCount);
+    List<KeyValue> kvs = new ArrayList<>(kvCount);
     int totalSize = 0;
     Tag[] tags = new Tag[] { new ArrayBackedTag((byte) 1, "tag1") };
     for (int i = 0; i < kvCount; i++) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
index e145642..38b01b8 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
@@ -498,7 +498,7 @@ public class TestBytes extends TestCase {
   }
   
   public void testToFromHex() {
-    List<String> testStrings = new ArrayList<String>(8);
+    List<String> testStrings = new ArrayList<>(8);
     testStrings.addAll(Arrays.asList(new String[] {
         "",
         "00",
@@ -517,7 +517,7 @@ public class TestBytes extends TestCase {
       Assert.assertTrue(testString.equalsIgnoreCase(result));
     }
     
-    List<byte[]> testByteData = new ArrayList<byte[]>(5);
+    List<byte[]> testByteData = new ArrayList<>(5);
     testByteData.addAll(Arrays.asList(new byte[][] {
       new byte[0],
       new byte[1],

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java
index fd4baf5..cfd288d 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java
@@ -39,7 +39,7 @@ public class TestConcatenatedLists {
   @Test
   public void testUnsupportedOps() {
     // If adding support, add tests.
-    ConcatenatedLists<Long> c = new ConcatenatedLists<Long>();
+    ConcatenatedLists<Long> c = new ConcatenatedLists<>();
     c.addSublist(Arrays.asList(0L, 1L));
     try {
       c.add(2L);
@@ -82,19 +82,19 @@ public class TestConcatenatedLists {
 
   @Test
   public void testEmpty() {
-    verify(new ConcatenatedLists<Long>(), -1);
+    verify(new ConcatenatedLists<>(), -1);
   }
 
   @Test
   public void testOneOne() {
-    ConcatenatedLists<Long> c = new ConcatenatedLists<Long>();
+    ConcatenatedLists<Long> c = new ConcatenatedLists<>();
     c.addSublist(Arrays.asList(0L));
     verify(c, 0);
   }
 
   @Test
   public void testOneMany() {
-    ConcatenatedLists<Long> c = new ConcatenatedLists<Long>();
+    ConcatenatedLists<Long> c = new ConcatenatedLists<>();
     c.addSublist(Arrays.asList(0L, 1L, 2L));
     verify(c, 2);
   }
@@ -102,7 +102,7 @@ public class TestConcatenatedLists {
   @Test
   @SuppressWarnings("unchecked")
   public void testManyOne() {
-    ConcatenatedLists<Long> c = new ConcatenatedLists<Long>();
+    ConcatenatedLists<Long> c = new ConcatenatedLists<>();
     c.addSublist(Arrays.asList(0L));
     c.addAllSublists(Arrays.asList(Arrays.asList(1L), Arrays.asList(2L)));
     verify(c, 2);
@@ -111,7 +111,7 @@ public class TestConcatenatedLists {
   @Test
   @SuppressWarnings("unchecked")
   public void testManyMany() {
-    ConcatenatedLists<Long> c = new ConcatenatedLists<Long>();
+    ConcatenatedLists<Long> c = new ConcatenatedLists<>();
     c.addAllSublists(Arrays.asList(Arrays.asList(0L, 1L)));
     c.addSublist(Arrays.asList(2L, 3L, 4L));
     c.addAllSublists(Arrays.asList(Arrays.asList(5L), Arrays.asList(6L, 7L)));
@@ -123,7 +123,7 @@ public class TestConcatenatedLists {
     assertEquals(last + 1, c.size());
     assertTrue(c.containsAll(c));
     Long[] array = c.toArray(new Long[c.size()]);
-    List<Long> all = new ArrayList<Long>();
+    List<Long> all = new ArrayList<>();
     Iterator<Long> iter = c.iterator();
     for (Long i = 0L; i <= last; ++i) {
       assertTrue(iter.hasNext());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestKeyLocker.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestKeyLocker.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestKeyLocker.java
index 40b918c..edf2f78 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestKeyLocker.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestKeyLocker.java
@@ -30,7 +30,7 @@ import org.junit.experimental.categories.Category;
 public class TestKeyLocker {
   @Test
   public void testLocker(){
-    KeyLocker<String> locker = new KeyLocker<String>();
+    KeyLocker<String> locker = new KeyLocker<>();
     ReentrantLock lock1 = locker.acquireLock("l1");
     Assert.assertTrue(lock1.isHeldByCurrentThread());
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
index 120f2b6..cf74a3e 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
@@ -64,7 +64,7 @@ public class TestLoadTestKVGenerator {
 
   @Test
   public void testCorrectAndUniqueKeys() {
-    Set<String> keys = new HashSet<String>();
+    Set<String> keys = new HashSet<>();
     for (int i = 0; i < 1000; ++i) {
       String k = LoadTestKVGenerator.md5PrefixedKey(i);
       assertFalse(keys.contains(k));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestWeakObjectPool.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestWeakObjectPool.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestWeakObjectPool.java
index bf1b4eb..d9fefa2 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestWeakObjectPool.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestWeakObjectPool.java
@@ -35,7 +35,7 @@ public class TestWeakObjectPool {
 
   @Before
   public void setUp() {
-    pool = new WeakObjectPool<String, Object>(
+    pool = new WeakObjectPool<>(
         new WeakObjectPool.ObjectFactory<String, Object>() {
           @Override
           public Object createObject(String key) {
@@ -94,7 +94,7 @@ public class TestWeakObjectPool {
     final int THREAD_COUNT = 100;
 
     final AtomicBoolean assertionFailed = new AtomicBoolean();
-    final AtomicReference<Object> expectedObjRef = new AtomicReference<Object>();
+    final AtomicReference<Object> expectedObjRef = new AtomicReference<>();
     final CountDownLatch prepareLatch = new CountDownLatch(THREAD_COUNT);
     final CountDownLatch startLatch = new CountDownLatch(1);
     final CountDownLatch endLatch = new CountDownLatch(THREAD_COUNT);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
index 304722e..810778b 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
@@ -208,7 +208,7 @@ public class AggregationClient implements Closeable {
           public R call(AggregateService instance) throws IOException {
             RpcController controller = new AggregationClientRpcController();
             CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
-                new CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse>();
+                new CoprocessorRpcUtils.BlockingRpcCallback<>();
             instance.getMax(controller, requestArg, rpcCallback);
             AggregateResponse response = rpcCallback.get();
             if (controller.failed()) {
@@ -280,7 +280,7 @@ public class AggregationClient implements Closeable {
           public R call(AggregateService instance) throws IOException {
             RpcController controller = new AggregationClientRpcController();
             CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
-                new CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse>();
+                new CoprocessorRpcUtils.BlockingRpcCallback<>();
             instance.getMin(controller, requestArg, rpcCallback);
             AggregateResponse response = rpcCallback.get();
             if (controller.failed()) {
@@ -355,7 +355,7 @@ public class AggregationClient implements Closeable {
           public Long call(AggregateService instance) throws IOException {
             RpcController controller = new AggregationClientRpcController();
             CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
-                new CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse>();
+                new CoprocessorRpcUtils.BlockingRpcCallback<>();
             instance.getRowNum(controller, requestArg, rpcCallback);
             AggregateResponse response = rpcCallback.get();
             if (controller.failed()) {
@@ -421,7 +421,7 @@ public class AggregationClient implements Closeable {
             RpcController controller = new AggregationClientRpcController();
             // Not sure what is going on here why I have to do these casts. TODO.
             CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
-                new CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse>();
+                new CoprocessorRpcUtils.BlockingRpcCallback<>();
             instance.getSum(controller, requestArg, rpcCallback);
             AggregateResponse response = rpcCallback.get();
             if (controller.failed()) {
@@ -472,7 +472,7 @@ public class AggregationClient implements Closeable {
       Long rowCount = 0l;
 
       public synchronized Pair<S, Long> getAvgArgs() {
-        return new Pair<S, Long>(sum, rowCount);
+        return new Pair<>(sum, rowCount);
       }
 
       @Override
@@ -488,13 +488,13 @@ public class AggregationClient implements Closeable {
           public Pair<S, Long> call(AggregateService instance) throws IOException {
             RpcController controller = new AggregationClientRpcController();
             CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
-                new CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse>();
+                new CoprocessorRpcUtils.BlockingRpcCallback<>();
             instance.getAvg(controller, requestArg, rpcCallback);
             AggregateResponse response = rpcCallback.get();
             if (controller.failed()) {
               throw new IOException(controller.errorText());
             }
-            Pair<S, Long> pair = new Pair<S, Long>(null, 0L);
+            Pair<S, Long> pair = new Pair<>(null, 0L);
             if (response.getFirstPartCount() == 0) {
               return pair;
             }
@@ -569,10 +569,10 @@ public class AggregationClient implements Closeable {
       S sumVal = null, sumSqVal = null;
 
       public synchronized Pair<List<S>, Long> getStdParams() {
-        List<S> l = new ArrayList<S>(2);
+        List<S> l = new ArrayList<>(2);
         l.add(sumVal);
         l.add(sumSqVal);
-        Pair<List<S>, Long> p = new Pair<List<S>, Long>(l, rowCountVal);
+        Pair<List<S>, Long> p = new Pair<>(l, rowCountVal);
         return p;
       }
 
@@ -592,17 +592,17 @@ public class AggregationClient implements Closeable {
           public Pair<List<S>, Long> call(AggregateService instance) throws IOException {
             RpcController controller = new AggregationClientRpcController();
             CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
-                new CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse>();
+                new CoprocessorRpcUtils.BlockingRpcCallback<>();
             instance.getStd(controller, requestArg, rpcCallback);
             AggregateResponse response = rpcCallback.get();
             if (controller.failed()) {
               throw new IOException(controller.errorText());
             }
-            Pair<List<S>, Long> pair = new Pair<List<S>, Long>(new ArrayList<S>(), 0L);
+            Pair<List<S>, Long> pair = new Pair<>(new ArrayList<>(), 0L);
             if (response.getFirstPartCount() == 0) {
               return pair;
             }
-            List<S> list = new ArrayList<S>();
+            List<S> list = new ArrayList<>();
             for (int i = 0; i < response.getFirstPartCount(); i++) {
               ByteString b = response.getFirstPart(i);
               T t = getParsedGenericInstance(ci.getClass(), 4, b);
@@ -680,17 +680,15 @@ public class AggregationClient implements Closeable {
   getMedianArgs(final Table table,
       final ColumnInterpreter<R, S, P, Q, T> ci, final Scan scan) throws Throwable {
     final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false);
-    final NavigableMap<byte[], List<S>> map =
-      new TreeMap<byte[], List<S>>(Bytes.BYTES_COMPARATOR);
+    final NavigableMap<byte[], List<S>> map = new TreeMap<>(Bytes.BYTES_COMPARATOR);
     class StdCallback implements Batch.Callback<List<S>> {
       S sumVal = null, sumWeights = null;
 
       public synchronized Pair<NavigableMap<byte[], List<S>>, List<S>> getMedianParams() {
-        List<S> l = new ArrayList<S>(2);
+        List<S> l = new ArrayList<>(2);
         l.add(sumVal);
         l.add(sumWeights);
-        Pair<NavigableMap<byte[], List<S>>, List<S>> p =
-          new Pair<NavigableMap<byte[], List<S>>, List<S>>(map, l);
+        Pair<NavigableMap<byte[], List<S>>, List<S>> p = new Pair<>(map, l);
         return p;
       }
 
@@ -708,14 +706,14 @@ public class AggregationClient implements Closeable {
           public List<S> call(AggregateService instance) throws IOException {
             RpcController controller = new AggregationClientRpcController();
             CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse> rpcCallback =
-                new CoprocessorRpcUtils.BlockingRpcCallback<AggregateResponse>();
+                new CoprocessorRpcUtils.BlockingRpcCallback<>();
             instance.getMedian(controller, requestArg, rpcCallback);
             AggregateResponse response = rpcCallback.get();
             if (controller.failed()) {
               throw new IOException(controller.errorText());
             }
 
-            List<S> list = new ArrayList<S>();
+            List<S> list = new ArrayList<>();
             for (int i = 0; i < response.getFirstPartCount(); i++) {
               ByteString b = response.getFirstPart(i);
               T t = getParsedGenericInstance(ci.getClass(), 4, b);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
index bccb76a..3fbbd52 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
@@ -83,7 +83,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
       T temp;
       Scan scan = ProtobufUtil.toScan(request.getScan());
       scanner = env.getRegion().getScanner(scan);
-      List<Cell> results = new ArrayList<Cell>();
+      List<Cell> results = new ArrayList<>();
       byte[] colFamily = scan.getFamilies()[0];
       NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
       byte[] qualifier = null;
@@ -138,7 +138,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
       T temp;
       Scan scan = ProtobufUtil.toScan(request.getScan());
       scanner = env.getRegion().getScanner(scan);
-      List<Cell> results = new ArrayList<Cell>();
+      List<Cell> results = new ArrayList<>();
       byte[] colFamily = scan.getFamilies()[0];
       NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
       byte[] qualifier = null;
@@ -198,7 +198,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
       if (qualifiers != null && !qualifiers.isEmpty()) {
         qualifier = qualifiers.pollFirst();
       }
-      List<Cell> results = new ArrayList<Cell>();
+      List<Cell> results = new ArrayList<>();
       boolean hasMoreRows = false;
       do {
         hasMoreRows = scanner.next(results);
@@ -237,7 +237,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
       RpcCallback<AggregateResponse> done) {
     AggregateResponse response = null;
     long counter = 0l;
-    List<Cell> results = new ArrayList<Cell>();
+    List<Cell> results = new ArrayList<>();
     InternalScanner scanner = null;
     try {
       Scan scan = ProtobufUtil.toScan(request.getScan());
@@ -308,7 +308,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
       if (qualifiers != null && !qualifiers.isEmpty()) {
         qualifier = qualifiers.pollFirst();
       }
-      List<Cell> results = new ArrayList<Cell>();
+      List<Cell> results = new ArrayList<>();
       boolean hasMoreRows = false;
     
       do {
@@ -368,7 +368,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
       if (qualifiers != null && !qualifiers.isEmpty()) {
         qualifier = qualifiers.pollFirst();
       }
-      List<Cell> results = new ArrayList<Cell>();
+      List<Cell> results = new ArrayList<>();
 
       boolean hasMoreRows = false;
     
@@ -434,7 +434,7 @@ extends AggregateService implements CoprocessorService, Coprocessor {
         // if weighted median is requested, get qualifier for the weight column
         weightQualifier = qualifiers.pollLast();
       }
-      List<Cell> results = new ArrayList<Cell>();
+      List<Cell> results = new ArrayList<>();
 
       boolean hasMoreRows = false;
     

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
index a9d10e8..b52e5f9 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
@@ -84,7 +84,7 @@ implements Coprocessor, CoprocessorService {
     InternalScanner scanner = null;
     try {
       scanner = this.env.getRegion().getScanner(scan);
-      List<Cell> curVals = new ArrayList<Cell>();
+      List<Cell> curVals = new ArrayList<>();
       boolean hasMore = false;
       do {
         curVals.clear();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
index 22dac6d..54e3358 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java
@@ -94,7 +94,7 @@ implements Coprocessor, CoprocessorService  {
         return;
       }
       scanner = region.getScanner(scan);
-      List<Cell> curVals = new ArrayList<Cell>();
+      List<Cell> curVals = new ArrayList<>();
       boolean hasMore = false;
       do {
         curVals.clear();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
index c75fb31..6e8c571 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java
@@ -94,7 +94,7 @@ implements Coprocessor, CoprocessorService  {
         throw new DoNotRetryIOException("An expected exception");
       }
       scanner = region.getScanner(scan);
-      List<Cell> curVals = new ArrayList<Cell>();
+      List<Cell> curVals = new ArrayList<>();
       boolean hasMore = false;
       do {
         curVals.clear();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
index 3ed8a56..56fdca6 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
@@ -168,8 +168,7 @@ public class TestClassLoading {
     // verify that the coprocessors were loaded
     boolean foundTableRegion=false;
     boolean found1 = true, found2 = true, found2_k1 = true, found2_k2 = true, found2_k3 = true;
-    Map<Region, Set<ClassLoader>> regionsActiveClassLoaders =
-        new HashMap<Region, Set<ClassLoader>>();
+    Map<Region, Set<ClassLoader>> regionsActiveClassLoaders = new HashMap<>();
     MiniHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
     for (Region region:
         hbase.getRegionServer(0).getOnlineRegionsLocalContext()) {
@@ -209,7 +208,7 @@ public class TestClassLoading {
       " of external jar files",
       2, CoprocessorClassLoader.getAllCached().size());
     //check if region active classloaders are shared across all RS regions
-    Set<ClassLoader> externalClassLoaders = new HashSet<ClassLoader>(
+    Set<ClassLoader> externalClassLoaders = new HashSet<>(
       CoprocessorClassLoader.getAllCached());
     for (Map.Entry<Region, Set<ClassLoader>> regionCP : regionsActiveClassLoaders.entrySet()) {
       assertTrue("Some CP classloaders for region " + regionCP.getKey() + " are not cached."
@@ -312,7 +311,7 @@ public class TestClassLoading {
     // add 2 coprocessor by using new htd.addCoprocessor() api
     htd.addCoprocessor(cpName5, new Path(getLocalPath(jarFile5)),
         Coprocessor.PRIORITY_USER, null);
-    Map<String, String> kvs = new HashMap<String, String>();
+    Map<String, String> kvs = new HashMap<>();
     kvs.put("k1", "v1");
     kvs.put("k2", "v2");
     kvs.put("k3", "v3");
@@ -466,8 +465,7 @@ public class TestClassLoading {
    * @return subset of all servers.
    */
   Map<ServerName, ServerLoad> serversForTable(String tableName) {
-    Map<ServerName, ServerLoad> serverLoadHashMap =
-        new HashMap<ServerName, ServerLoad>();
+    Map<ServerName, ServerLoad> serverLoadHashMap = new HashMap<>();
     for(Map.Entry<ServerName,ServerLoad> server:
         TEST_UTIL.getMiniHBaseCluster().getMaster().getServerManager().
             getOnlineServers().entrySet()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
index 547b7e9..adfd8d5 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
@@ -124,7 +124,7 @@ public class TestCoprocessorEndpoint {
         public Long call(ColumnAggregationProtos.ColumnAggregationService instance)
         throws IOException {
           CoprocessorRpcUtils.BlockingRpcCallback<ColumnAggregationProtos.SumResponse> rpcCallback =
-              new CoprocessorRpcUtils.BlockingRpcCallback<ColumnAggregationProtos.SumResponse>();
+              new CoprocessorRpcUtils.BlockingRpcCallback<>();
           ColumnAggregationProtos.SumRequest.Builder builder =
             ColumnAggregationProtos.SumRequest.newBuilder();
           builder.setFamily(ByteStringer.wrap(family));
@@ -193,7 +193,7 @@ public class TestCoprocessorEndpoint {
                 throws IOException {
               LOG.debug("Default response is " + TestProtos.EchoRequestProto.getDefaultInstance());
               CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto> callback =
-                  new CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto>();
+                  new CoprocessorRpcUtils.BlockingRpcCallback<>();
               instance.echo(controller, request, callback);
               TestProtos.EchoResponseProto response = callback.get();
               LOG.debug("Batch.Call returning result " + response);
@@ -226,7 +226,7 @@ public class TestCoprocessorEndpoint {
                 throws IOException {
               LOG.debug("Default response is " + TestProtos.EchoRequestProto.getDefaultInstance());
               CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto> callback =
-                  new CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto>();
+                  new CoprocessorRpcUtils.BlockingRpcCallback<>();
               instance.echo(controller, request, callback);
               TestProtos.EchoResponseProto response = callback.get();
               LOG.debug("Batch.Call returning result " + response);
@@ -271,7 +271,7 @@ public class TestCoprocessorEndpoint {
             public String call(TestRpcServiceProtos.TestProtobufRpcProto instance)
                 throws IOException {
               CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto> callback =
-                  new CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto>();
+                  new CoprocessorRpcUtils.BlockingRpcCallback<>();
               instance.echo(controller, request, callback);
               TestProtos.EchoResponseProto response = callback.get();
               LOG.debug("Batch.Call got result " + response);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
index 0af655a..0783131 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
@@ -117,7 +117,7 @@ public class TestCoprocessorTableEndpoint {
       public Long call(ColumnAggregationProtos.ColumnAggregationService instance)
       throws IOException {
         CoprocessorRpcUtils.BlockingRpcCallback<ColumnAggregationProtos.SumResponse> rpcCallback =
-            new CoprocessorRpcUtils.BlockingRpcCallback<ColumnAggregationProtos.SumResponse>();
+            new CoprocessorRpcUtils.BlockingRpcCallback<>();
         ColumnAggregationProtos.SumRequest.Builder builder =
           ColumnAggregationProtos.SumRequest.newBuilder();
         builder.setFamily(ByteString.copyFrom(family));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
index 69742a6..9dc4822 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
@@ -73,8 +73,7 @@ public class TestRegionServerCoprocessorEndpoint {
     final ServerName serverName = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
     final ServerRpcController controller = new ServerRpcController();
     final CoprocessorRpcUtils.BlockingRpcCallback<DummyRegionServerEndpointProtos.DummyResponse>
-        rpcCallback =
-      new CoprocessorRpcUtils.BlockingRpcCallback<DummyRegionServerEndpointProtos.DummyResponse>();
+        rpcCallback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
     DummyRegionServerEndpointProtos.DummyService service =
         ProtobufUtil.newServiceStub(DummyRegionServerEndpointProtos.DummyService.class,
           TEST_UTIL.getAdmin().coprocessorService(serverName));
@@ -91,8 +90,7 @@ public class TestRegionServerCoprocessorEndpoint {
     final ServerName serverName = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
     final ServerRpcController controller = new ServerRpcController();
     final CoprocessorRpcUtils.BlockingRpcCallback<DummyRegionServerEndpointProtos.DummyResponse>
-        rpcCallback =
-      new CoprocessorRpcUtils.BlockingRpcCallback<DummyRegionServerEndpointProtos.DummyResponse>();
+        rpcCallback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
     DummyRegionServerEndpointProtos.DummyService service =
         ProtobufUtil.newServiceStub(DummyRegionServerEndpointProtos.DummyService.class,
             TEST_UTIL.getAdmin().coprocessorService(serverName));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
index 07d2042..ed53027 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
@@ -166,10 +166,9 @@ public class TestRowProcessorEndpoint {
     ProcessResponse protoResult = service.process(null, request);
     FriendsOfFriendsProcessorResponse response =
         FriendsOfFriendsProcessorResponse.parseFrom(protoResult.getRowProcessorResult());
-    Set<String> result = new HashSet<String>();
+    Set<String> result = new HashSet<>();
     result.addAll(response.getResultList());
-    Set<String> expected =
-      new HashSet<String>(Arrays.asList(new String[]{"d", "e", "f", "g"}));
+    Set<String> expected = new HashSet<>(Arrays.asList(new String[]{"d", "e", "f", "g"}));
     Get get = new Get(ROW);
     LOG.debug("row keyvalues:" + stringifyKvs(table.get(get).listCells()));
     assertEquals(expected, result);
@@ -349,7 +348,7 @@ public class TestRowProcessorEndpoint {
       public void process(long now, HRegion region,
           List<Mutation> mutations, WALEdit walEdit) throws IOException {
         // Scan current counter
-        List<Cell> kvs = new ArrayList<Cell>();
+        List<Cell> kvs = new ArrayList<>();
         Scan scan = new Scan(row, row);
         scan.addColumn(FAM, COUNTER);
         doScan(region, scan, kvs);
@@ -398,7 +397,7 @@ public class TestRowProcessorEndpoint {
         BaseRowProcessor<FriendsOfFriendsProcessorRequest, FriendsOfFriendsProcessorResponse> {
       byte[] row = null;
       byte[] person = null;
-      final Set<String> result = new HashSet<String>();
+      final Set<String> result = new HashSet<>();
 
       /**
        * Empty constructor for Writable
@@ -432,7 +431,7 @@ public class TestRowProcessorEndpoint {
       @Override
       public void process(long now, HRegion region,
           List<Mutation> mutations, WALEdit walEdit) throws IOException {
-        List<Cell> kvs = new ArrayList<Cell>();
+        List<Cell> kvs = new ArrayList<>();
         { // First scan to get friends of the person
           Scan scan = new Scan(row, row);
           scan.addColumn(FAM, person);
@@ -497,7 +496,7 @@ public class TestRowProcessorEndpoint {
 
       @Override
       public Collection<byte[]> getRowsToLock() {
-        List<byte[]> rows = new ArrayList<byte[]>(2);
+        List<byte[]> rows = new ArrayList<>(2);
         rows.add(row1);
         rows.add(row2);
         return rows;
@@ -522,8 +521,8 @@ public class TestRowProcessorEndpoint {
         now = myTimer.getAndIncrement();
 
         // Scan both rows
-        List<Cell> kvs1 = new ArrayList<Cell>();
-        List<Cell> kvs2 = new ArrayList<Cell>();
+        List<Cell> kvs1 = new ArrayList<>();
+        List<Cell> kvs2 = new ArrayList<>();
         doScan(region, new Scan(row1, row1), kvs1);
         doScan(region, new Scan(row2, row2), kvs2);
 
@@ -538,7 +537,7 @@ public class TestRowProcessorEndpoint {
         swapped = !swapped;
 
         // Add and delete keyvalues
-        List<List<Cell>> kvs = new ArrayList<List<Cell>>(2);
+        List<List<Cell>> kvs = new ArrayList<>(2);
         kvs.add(kvs1);
         kvs.add(kvs2);
         byte[][] rows = new byte[][]{row1, row2};

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
index 06e45eb..323999d 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
@@ -64,7 +64,7 @@ public class SecureBulkLoadEndpointClient {
       ServerRpcController controller = new ServerRpcController();
 
       CoprocessorRpcUtils.BlockingRpcCallback<PrepareBulkLoadResponse> rpcCallback =
-          new CoprocessorRpcUtils.BlockingRpcCallback<PrepareBulkLoadResponse>();
+          new CoprocessorRpcUtils.BlockingRpcCallback<>();
 
       PrepareBulkLoadRequest request =
           PrepareBulkLoadRequest.newBuilder()
@@ -92,7 +92,7 @@ public class SecureBulkLoadEndpointClient {
       ServerRpcController controller = new ServerRpcController();
 
       CoprocessorRpcUtils.BlockingRpcCallback<CleanupBulkLoadResponse> rpcCallback =
-          new CoprocessorRpcUtils.BlockingRpcCallback<CleanupBulkLoadResponse>();
+          new CoprocessorRpcUtils.BlockingRpcCallback<>();
 
       CleanupBulkLoadRequest request =
           CleanupBulkLoadRequest.newBuilder()
@@ -133,7 +133,7 @@ public class SecureBulkLoadEndpointClient {
       }
 
       List<ClientProtos.BulkLoadHFileRequest.FamilyPath> protoFamilyPaths =
-          new ArrayList<ClientProtos.BulkLoadHFileRequest.FamilyPath>(familyPaths.size());
+          new ArrayList<>(familyPaths.size());
       for(Pair<byte[], String> el: familyPaths) {
         protoFamilyPaths.add(ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder()
           .setFamily(ByteStringer.wrap(el.getFirst()))
@@ -148,8 +148,7 @@ public class SecureBulkLoadEndpointClient {
 
       ServerRpcController controller = new ServerRpcController();
       CoprocessorRpcUtils.BlockingRpcCallback<SecureBulkLoadProtos.SecureBulkLoadHFilesResponse>
-            rpcCallback =
-          new CoprocessorRpcUtils.BlockingRpcCallback<SecureBulkLoadProtos.SecureBulkLoadHFilesResponse>();
+            rpcCallback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
       instance.secureBulkLoadHFiles(controller,
         request,
         rpcCallback);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
index c0d2719..10a4d19 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
@@ -92,8 +92,7 @@ public class TestHRegionServerBulkLoadWithOldSecureEndpoint extends TestHRegionS
       // create HFiles for different column families
       FileSystem fs = UTIL.getTestFileSystem();
       byte[] val = Bytes.toBytes(String.format("%010d", iteration));
-      final List<Pair<byte[], String>> famPaths = new ArrayList<Pair<byte[], String>>(
-          NUM_CFS);
+      final List<Pair<byte[], String>> famPaths = new ArrayList<>(NUM_CFS);
       for (int i = 0; i < NUM_CFS; i++) {
         Path hfile = new Path(dir, family(i));
         byte[] fam = Bytes.toBytes(family(i));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
index 9bff701..83c7dbf 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
@@ -198,7 +198,7 @@ public class TestServerCustomProtocol {
         @Override
         public Integer call(PingProtos.PingService instance) throws IOException {
           CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.CountResponse> rpcCallback =
-            new CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.CountResponse>();
+            new CoprocessorRpcUtils.BlockingRpcCallback<>();
           instance.count(null, PingProtos.CountRequest.newBuilder().build(), rpcCallback);
           return rpcCallback.get().getCount();
         }
@@ -215,7 +215,7 @@ public class TestServerCustomProtocol {
         @Override
         public Integer call(PingProtos.PingService instance) throws IOException {
           CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.IncrementCountResponse> rpcCallback =
-            new CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.IncrementCountResponse>();
+            new CoprocessorRpcUtils.BlockingRpcCallback<>();
           instance.increment(null,
               PingProtos.IncrementCountRequest.newBuilder().setDiff(diff).build(),
             rpcCallback);
@@ -253,7 +253,7 @@ public class TestServerCustomProtocol {
           @Override
           public String call(PingProtos.PingService instance) throws IOException {
             CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.HelloResponse> rpcCallback =
-              new CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.HelloResponse>();
+              new CoprocessorRpcUtils.BlockingRpcCallback<>();
             PingProtos.HelloRequest.Builder builder = PingProtos.HelloRequest.newBuilder();
             if (send != null) builder.setName(send);
             instance.hello(null, builder.build(), rpcCallback);
@@ -272,7 +272,7 @@ public class TestServerCustomProtocol {
           @Override
           public String call(PingProtos.PingService instance) throws IOException {
             CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.HelloResponse> rpcCallback =
-              new CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.HelloResponse>();
+              new CoprocessorRpcUtils.BlockingRpcCallback<>();
             PingProtos.HelloRequest.Builder builder = PingProtos.HelloRequest.newBuilder();
             // Call ping on same instance.  Use result calling hello on same instance.
             builder.setName(doPing(instance));
@@ -291,7 +291,7 @@ public class TestServerCustomProtocol {
           @Override
           public String call(PingProtos.PingService instance) throws IOException {
             CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.NoopResponse> rpcCallback =
-              new CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.NoopResponse>();
+              new CoprocessorRpcUtils.BlockingRpcCallback<>();
             PingProtos.NoopRequest.Builder builder = PingProtos.NoopRequest.newBuilder();
             instance.noop(null, builder.build(), rpcCallback);
             rpcCallback.get();
@@ -311,7 +311,7 @@ public class TestServerCustomProtocol {
           @Override
           public String call(PingProtos.PingService instance) throws IOException {
             CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.PingResponse> rpcCallback =
-              new CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.PingResponse>();
+              new CoprocessorRpcUtils.BlockingRpcCallback<>();
             instance.ping(null, PingProtos.PingRequest.newBuilder().build(), rpcCallback);
             return rpcCallback.get().getPong();
           }
@@ -406,7 +406,7 @@ public class TestServerCustomProtocol {
 
   private static String doPing(PingProtos.PingService instance) throws IOException {
     CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.PingResponse> rpcCallback =
-        new CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.PingResponse>();
+        new CoprocessorRpcUtils.BlockingRpcCallback<>();
       instance.ping(null, PingProtos.PingRequest.newBuilder().build(), rpcCallback);
       return rpcCallback.get().getPong();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
index f54c632..75f8ee2 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java
@@ -71,7 +71,7 @@ public class TestReplicationSyncUpToolWithBulkLoadedData extends TestReplication
      * Prepare 16 random hfile ranges required for creating hfiles
      */
     Iterator<String> randomHFileRangeListIterator = null;
-    Set<String> randomHFileRanges = new HashSet<String>(16);
+    Set<String> randomHFileRanges = new HashSet<>(16);
     for (int i = 0; i < 16; i++) {
       randomHFileRanges.add(UUID.randomUUID().toString());
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
index 7e6c290..79ff25b 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
@@ -133,9 +133,9 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
       // filter and having necessary column(s).
       scanner = region.getScanner(scan);
       while (hasMore) {
-        List<List<Cell>> deleteRows = new ArrayList<List<Cell>>(rowBatchSize);
+        List<List<Cell>> deleteRows = new ArrayList<>(rowBatchSize);
         for (int i = 0; i < rowBatchSize; i++) {
-          List<Cell> results = new ArrayList<Cell>();
+          List<Cell> results = new ArrayList<>();
           hasMore = scanner.next(results);
           if (results.size() > 0) {
             deleteRows.add(results);
@@ -202,14 +202,14 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
     byte[] row = CellUtil.cloneRow(deleteRow.get(0));
     Delete delete = new Delete(row, ts);
     if (deleteType == DeleteType.FAMILY) {
-      Set<byte[]> families = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+      Set<byte[]> families = new TreeSet<>(Bytes.BYTES_COMPARATOR);
       for (Cell kv : deleteRow) {
         if (families.add(CellUtil.cloneFamily(kv))) {
           delete.addFamily(CellUtil.cloneFamily(kv), ts);
         }
       }
     } else if (deleteType == DeleteType.COLUMN) {
-      Set<Column> columns = new HashSet<Column>();
+      Set<Column> columns = new HashSet<>();
       for (Cell kv : deleteRow) {
         Column column = new Column(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv));
         if (columns.add(column)) {
@@ -231,7 +231,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
           noOfVersionsToDelete++;
         }
       } else {
-        Set<Column> columns = new HashSet<Column>();
+        Set<Column> columns = new HashSet<>();
         for (Cell kv : deleteRow) {
           Column column = new Column(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv));
           // Only one version of particular column getting deleted.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
index c2387c5..36d8488 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
@@ -75,7 +75,7 @@ public class RowCountEndpoint extends ExampleProtos.RowCountService
     InternalScanner scanner = null;
     try {
       scanner = env.getRegion().getScanner(scan);
-      List<Cell> results = new ArrayList<Cell>();
+      List<Cell> results = new ArrayList<>();
       boolean hasMore = false;
       byte[] lastRow = null;
       long count = 0;
@@ -115,7 +115,7 @@ public class RowCountEndpoint extends ExampleProtos.RowCountService
     InternalScanner scanner = null;
     try {
       scanner = env.getRegion().getScanner(new Scan());
-      List<Cell> results = new ArrayList<Cell>();
+      List<Cell> results = new ArrayList<>();
       boolean hasMore = false;
       long count = 0;
       do {