You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/03/07 19:23:20 UTC

[07/22] hbase git commit: HBASE-17532 Replaced explicit type with diamond operator

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java
index 49f57de..bbc612f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java
@@ -128,8 +128,8 @@ public class RandomDistribution {
         throw new IllegalArgumentException("Invalid arguments");
       }
       random = r;
-      k = new ArrayList<Integer>();
-      v = new ArrayList<Double>();
+      k = new ArrayList<>();
+      v = new ArrayList<>();
 
       double sum = 0;
       int last = -1;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index 8f9c4f7..3315b6f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -154,7 +154,7 @@ public class TestCacheOnWrite {
 
   private static List<BlockCache> getBlockCaches() throws IOException {
     Configuration conf = TEST_UTIL.getConfiguration();
-    List<BlockCache> blockcaches = new ArrayList<BlockCache>();
+    List<BlockCache> blockcaches = new ArrayList<>();
     // default
     blockcaches.add(new CacheConfig(conf).getBlockCache());
 
@@ -176,7 +176,7 @@ public class TestCacheOnWrite {
 
   @Parameters
   public static Collection<Object[]> getParameters() throws IOException {
-    List<Object[]> params = new ArrayList<Object[]>();
+    List<Object[]> params = new ArrayList<>();
     for (BlockCache blockCache : getBlockCaches()) {
       for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
         for (Compression.Algorithm compress : HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
@@ -261,12 +261,11 @@ public class TestCacheOnWrite {
     assertTrue(testDescription, scanner.seekTo());
 
     long offset = 0;
-    EnumMap<BlockType, Integer> blockCountByType =
-        new EnumMap<BlockType, Integer>(BlockType.class);
+    EnumMap<BlockType, Integer> blockCountByType = new EnumMap<>(BlockType.class);
 
     DataBlockEncoding encodingInCache = NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding();
-    List<Long> cachedBlocksOffset = new ArrayList<Long>();
-    Map<Long, HFileBlock> cachedBlocks = new HashMap<Long, HFileBlock>();
+    List<Long> cachedBlocksOffset = new ArrayList<>();
+    Map<Long, HFileBlock> cachedBlocks = new HashMap<>();
     while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
       // Flags: don't cache the block, use pread, this is not a compaction.
       // Also, pass null for expected block type to avoid checking it.
@@ -383,7 +382,7 @@ public class TestCacheOnWrite {
       KeyValue kv;
       if(useTags) {
         Tag t = new ArrayBackedTag((byte) 1, "visibility");
-        List<Tag> tagList = new ArrayList<Tag>();
+        List<Tag> tagList = new ArrayList<>();
         tagList.add(t);
         Tag[] tags = new Tag[1];
         tags[0] = t;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
index 95063ce..6145eca 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
@@ -77,7 +77,7 @@ public class TestFixedFileTrailer {
 
   @Parameters
   public static Collection<Object[]> getParameters() {
-    List<Object[]> versionsToTest = new ArrayList<Object[]>();
+    List<Object[]> versionsToTest = new ArrayList<>();
     for (int v = HFile.MIN_FORMAT_VERSION; v <= HFile.MAX_FORMAT_VERSION; ++v)
       versionsToTest.add(new Integer[] { v } );
     return versionsToTest;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
index c75232a..1c87af4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
@@ -123,7 +123,7 @@ public class TestHFileBlock {
 
   static int writeTestKeyValues(HFileBlock.Writer hbw, int seed, boolean includesMemstoreTS,
       boolean useTag) throws IOException {
-    List<KeyValue> keyValues = new ArrayList<KeyValue>();
+    List<KeyValue> keyValues = new ArrayList<>();
     Random randomizer = new Random(42l + seed); // just any fixed number
 
     // generate keyValues
@@ -383,8 +383,8 @@ public class TestHFileBlock {
                               .build();
           HFileBlock.Writer hbw = new HFileBlock.Writer(dataBlockEncoder, meta);
           long totalSize = 0;
-          final List<Integer> encodedSizes = new ArrayList<Integer>();
-          final List<ByteBuffer> encodedBlocks = new ArrayList<ByteBuffer>();
+          final List<Integer> encodedSizes = new ArrayList<>();
+          final List<ByteBuffer> encodedBlocks = new ArrayList<>();
           for (int blockId = 0; blockId < numBlocks; ++blockId) {
             hbw.startWriting(BlockType.DATA);
             writeTestKeyValues(hbw, blockId, includesMemstoreTS, includesTag);
@@ -532,11 +532,10 @@ public class TestHFileBlock {
                    ", pread=" + pread +
                    ", cacheOnWrite=" + cacheOnWrite);
           Path path = new Path(TEST_UTIL.getDataTestDir(), "prev_offset");
-          List<Long> expectedOffsets = new ArrayList<Long>();
-          List<Long> expectedPrevOffsets = new ArrayList<Long>();
-          List<BlockType> expectedTypes = new ArrayList<BlockType>();
-          List<ByteBuffer> expectedContents = cacheOnWrite
-              ? new ArrayList<ByteBuffer>() : null;
+          List<Long> expectedOffsets = new ArrayList<>();
+          List<Long> expectedPrevOffsets = new ArrayList<>();
+          List<BlockType> expectedTypes = new ArrayList<>();
+          List<ByteBuffer> expectedContents = cacheOnWrite ? new ArrayList<>() : null;
           long totalSize = writeBlocks(rand, algo, path, expectedOffsets,
               expectedPrevOffsets, expectedTypes, expectedContents);
 
@@ -718,8 +717,8 @@ public class TestHFileBlock {
     for (Compression.Algorithm compressAlgo : COMPRESSION_ALGORITHMS) {
       Path path = new Path(TEST_UTIL.getDataTestDir(), "concurrent_reading");
       Random rand = defaultRandom();
-      List<Long> offsets = new ArrayList<Long>();
-      List<BlockType> types = new ArrayList<BlockType>();
+      List<Long> offsets = new ArrayList<>();
+      List<BlockType> types = new ArrayList<>();
       writeBlocks(rand, compressAlgo, path, offsets, null, types, null);
       FSDataInputStream is = fs.open(path);
       long fileSize = fs.getFileStatus(path).getLen();
@@ -732,8 +731,7 @@ public class TestHFileBlock {
       HFileBlock.FSReader hbr = new HFileBlock.FSReaderImpl(is, fileSize, meta);
 
       Executor exec = Executors.newFixedThreadPool(NUM_READER_THREADS);
-      ExecutorCompletionService<Boolean> ecs =
-          new ExecutorCompletionService<Boolean>(exec);
+      ExecutorCompletionService<Boolean> ecs = new ExecutorCompletionService<>(exec);
 
       for (int i = 0; i < NUM_READER_THREADS; ++i) {
         ecs.submit(new BlockReaderThread("reader_" + (char) ('A' + i), hbr,
@@ -768,7 +766,7 @@ public class TestHFileBlock {
                         .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
                         .build();
     HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta);
-    Map<BlockType, Long> prevOffsetByType = new HashMap<BlockType, Long>();
+    Map<BlockType, Long> prevOffsetByType = new HashMap<>();
     long totalSize = 0;
     for (int i = 0; i < NUM_TEST_BLOCKS; ++i) {
       long pos = os.getPos();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
index ce6ec82..28930db 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
@@ -95,7 +95,7 @@ public class TestHFileBlockIndex {
   private long rootIndexOffset;
   private int numRootEntries;
   private int numLevels;
-  private static final List<byte[]> keys = new ArrayList<byte[]>();
+  private static final List<byte[]> keys = new ArrayList<>();
   private final Compression.Algorithm compr;
   private byte[] firstKeyInFile;
   private Configuration conf;
@@ -604,7 +604,7 @@ public class TestHFileBlockIndex {
       blockCache.evictBlocksByHfileName(hfilePath.getName());
 
       conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, indexBlockSize);
-      Set<String> keyStrSet = new HashSet<String>();
+      Set<String> keyStrSet = new HashSet<>();
       byte[][] keys = new byte[NUM_KV][];
       byte[][] values = new byte[NUM_KV][];
 
@@ -674,7 +674,7 @@ public class TestHFileBlockIndex {
       HFileBlock.BlockIterator iter = fsReader.blockRange(0,
           reader.getTrailer().getLoadOnOpenDataOffset());
       HFileBlock block;
-      List<byte[]> blockKeys = new ArrayList<byte[]>();
+      List<byte[]> blockKeys = new ArrayList<>();
       while ((block = iter.nextBlock()) != null) {
         if (block.getBlockType() != BlockType.LEAF_INDEX)
           return;
@@ -762,7 +762,7 @@ public class TestHFileBlockIndex {
     HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf)
             .withFileContext(context)
             .withPath(fs, hfPath).create();
-    List<byte[]> keys = new ArrayList<byte[]>();
+    List<byte[]> keys = new ArrayList<>();
 
     // This should result in leaf-level indices and a root level index
     for (int i=0; i < 100; i++) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
index 387514e..ac939d1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
@@ -243,8 +243,7 @@ public class TestHFileDataBlockEncoder {
    */
   @Parameters
   public static Collection<Object[]> getAllConfigurations() {
-    List<Object[]> configurations =
-        new ArrayList<Object[]>();
+    List<Object[]> configurations = new ArrayList<>();
 
     for (DataBlockEncoding diskAlgo : DataBlockEncoding.values()) {
       for (boolean includesMemstoreTS : new boolean[] { false, true }) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
index af4f2b8..f1528c2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
@@ -58,7 +58,7 @@ public class TestHFileInlineToRootChunkConversion {
     HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf)
             .withFileContext(context)
             .withPath(fs, hfPath).create();
-    List<byte[]> keys = new ArrayList<byte[]>();
+    List<byte[]> keys = new ArrayList<>();
     StringBuilder sb = new StringBuilder();
 
     for (int i = 0; i < 4; ++i) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
index 983ec2f..fe6b549 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
@@ -128,7 +128,7 @@ public class TestHFileWriterV3 {
             .create();
 
     Random rand = new Random(9713312); // Just a fixed seed.
-    List<KeyValue> keyValues = new ArrayList<KeyValue>(entryCount);
+    List<KeyValue> keyValues = new ArrayList<>(entryCount);
 
     for (int i = 0; i < entryCount; ++i) {
       byte[] keyBytes = RandomKeyValueUtil.randomOrderedKey(rand, i);
@@ -137,7 +137,7 @@ public class TestHFileWriterV3 {
       byte[] valueBytes = RandomKeyValueUtil.randomValue(rand);
       KeyValue keyValue = null;
       if (useTags) {
-        ArrayList<Tag> tags = new ArrayList<Tag>();
+        ArrayList<Tag> tags = new ArrayList<>();
         for (int j = 0; j < 1 + rand.nextInt(4); j++) {
           byte[] tagBytes = new byte[16];
           rand.nextBytes(tagBytes);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
index cf3c6ed..9253ce1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java
@@ -122,7 +122,7 @@ public class TestLazyDataBlockDecompression {
     reader.loadFileInfo();
     long offset = trailer.getFirstDataBlockOffset(),
       max = trailer.getLastDataBlockOffset();
-    List<HFileBlock> blocks = new ArrayList<HFileBlock>(4);
+    List<HFileBlock> blocks = new ArrayList<>(4);
     HFileBlock block;
     while (offset <= max) {
       block = reader.readBlock(offset, -1, /* cacheBlock */ true, /* pread */ false,

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
index 90e398d..a9ecf7b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
@@ -72,8 +72,8 @@ public class TestReseekTo {
 
     String valueString = "Value";
 
-    List<Integer> keyList = new ArrayList<Integer>();
-    List<String> valueList = new ArrayList<String>();
+    List<Integer> keyList = new ArrayList<>();
+    List<String> valueList = new ArrayList<>();
 
     for (int key = 0; key < numberOfKeys; key++) {
       String value = valueString + key;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java
index 94e7219..f1775d0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java
@@ -208,7 +208,7 @@ public class TestScannerFromBucketCache {
       Scan scan = new Scan(row1);
       scan.addFamily(fam1);
       scan.setMaxVersions(10);
-      actual = new ArrayList<Cell>();
+      actual = new ArrayList<>();
       InternalScanner scanner = region.getScanner(scan);
 
       boolean hasNext = scanner.next(actual);
@@ -314,7 +314,7 @@ public class TestScannerFromBucketCache {
     }
 
     // Expected
-    List<Cell> expected = new ArrayList<Cell>();
+    List<Cell> expected = new ArrayList<>();
     expected.add(kv13);
     expected.add(kv12);
     expected.add(kv23);
@@ -326,7 +326,7 @@ public class TestScannerFromBucketCache {
     Scan scan = new Scan(row1);
     scan.addFamily(fam1);
     scan.setMaxVersions(MAX_VERSIONS);
-    List<Cell> actual = new ArrayList<Cell>();
+    List<Cell> actual = new ArrayList<>();
     InternalScanner scanner = region.getScanner(scan);
 
     boolean hasNext = scanner.next(actual);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
index 9c6bb38..c834fca 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
@@ -61,7 +61,7 @@ public class TestScannerSelectionUsingKeyRange {
   private static final int NUM_ROWS = 8;
   private static final int NUM_COLS_PER_ROW = 5;
   private static final int NUM_FILES = 2;
-  private static final Map<Object, Integer> TYPE_COUNT = new HashMap<Object, Integer>(3);
+  private static final Map<Object, Integer> TYPE_COUNT = new HashMap<>(3);
   static {
     TYPE_COUNT.put(BloomType.ROWCOL, 0);
     TYPE_COUNT.put(BloomType.ROW, 0);
@@ -73,7 +73,7 @@ public class TestScannerSelectionUsingKeyRange {
 
   @Parameters
   public static Collection<Object[]> parameters() {
-    List<Object[]> params = new ArrayList<Object[]>();
+    List<Object[]> params = new ArrayList<>();
     for (Object type : TYPE_COUNT.keySet()) {
       params.add(new Object[] { type, TYPE_COUNT.get(type) });
     }
@@ -120,7 +120,7 @@ public class TestScannerSelectionUsingKeyRange {
     LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache();
     cache.clearCache();
     InternalScanner scanner = region.getScanner(scan);
-    List<Cell> results = new ArrayList<Cell>();
+    List<Cell> results = new ArrayList<>();
     while (scanner.next(results)) {
     }
     scanner.close();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
index 08b259d..4af48ce 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
@@ -79,7 +79,7 @@ public class TestScannerSelectionUsingTTL {
 
   @Parameters
   public static Collection<Object[]> parameters() {
-    List<Object[]> params = new ArrayList<Object[]>();
+    List<Object[]> params = new ArrayList<>();
     for (int numFreshFiles = 1; numFreshFiles <= 3; ++numFreshFiles) {
       for (boolean explicitCompaction : new boolean[] { false, true }) {
         params.add(new Object[] { numFreshFiles, explicitCompaction });
@@ -135,7 +135,7 @@ public class TestScannerSelectionUsingTTL {
     LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache();
     cache.clearCache();
     InternalScanner scanner = region.getScanner(scan);
-    List<Cell> results = new ArrayList<Cell>();
+    List<Cell> results = new ArrayList<>();
     final int expectedKVsPerRow = numFreshFiles * NUM_COLS_PER_ROW;
     int numReturnedRows = 0;
     LOG.info("Scanning the entire table");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
index a8fe3f0..d654bce 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
@@ -65,7 +65,7 @@ public class TestSeekTo {
   private final DataBlockEncoding encoding;
   @Parameters
   public static Collection<Object[]> parameters() {
-    List<Object[]> paramList = new ArrayList<Object[]>();
+    List<Object[]> paramList = new ArrayList<>();
     for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
       paramList.add(new Object[] { encoding });
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
index 6fe352d..0f16bfa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
@@ -143,10 +143,10 @@ public class TestBucketCache {
     final List<Integer> BLOCKSIZES = Arrays.asList(4 * 1024, 8 * 1024, 64 * 1024, 96 * 1024);
 
     boolean full = false;
-    ArrayList<Long> allocations = new ArrayList<Long>();
+    ArrayList<Long> allocations = new ArrayList<>();
     // Fill the allocated extents by choosing a random blocksize. Continues selecting blocks until
     // the cache is completely filled.
-    List<Integer> tmp = new ArrayList<Integer>(BLOCKSIZES);
+    List<Integer> tmp = new ArrayList<>(BLOCKSIZES);
     while (!full) {
       Integer blockSize = null;
       try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java
index 4d3f550..cfba69a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketWriterThread.java
@@ -169,7 +169,7 @@ public class TestBucketWriterThread {
   private static void doDrainOfOneEntry(final BucketCache bc, final BucketCache.WriterThread wt,
       final BlockingQueue<RAMQueueEntry> q)
   throws InterruptedException {
-    List<RAMQueueEntry> rqes = BucketCache.getRAMQueueEntries(q, new ArrayList<RAMQueueEntry>(1));
+    List<RAMQueueEntry> rqes = BucketCache.getRAMQueueEntries(q, new ArrayList<>(1));
     wt.doDrain(rqes);
     assertTrue(q.isEmpty());
     assertTrue(bc.ramCache.isEmpty());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
index 3535d23..04ac519 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
@@ -216,7 +216,7 @@ public class TestSimpleRpcScheduler {/*
       when(priority.getDeadline(eq(largeHead), any(Message.class))).thenReturn(50L);
       when(priority.getDeadline(eq(hugeHead), any(Message.class))).thenReturn(100L);
 
-      final ArrayList<Integer> work = new ArrayList<Integer>();
+      final ArrayList<Integer> work = new ArrayList<>();
       doAnswerTaskExecution(smallCallTask, work, 10, 250);
       doAnswerTaskExecution(largeCallTask, work, 50, 250);
       doAnswerTaskExecution(hugeCallTask, work, 100, 250);
@@ -312,7 +312,7 @@ public class TestSimpleRpcScheduler {/*
       when(scanCall.getHeader()).thenReturn(scanHead);
       when(scanCall.getParam()).thenReturn(scanCall.param);
 
-      ArrayList<Integer> work = new ArrayList<Integer>();
+      ArrayList<Integer> work = new ArrayList<>();
       doAnswerTaskExecution(putCallTask, work, 1, 1000);
       doAnswerTaskExecution(getCallTask, work, 2, 1000);
       doAnswerTaskExecution(scanCallTask, work, 3, 1000);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
index fd0db6a..22dda35 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
@@ -218,7 +218,7 @@ public class TestTableMapReduceUtil {
         OutputCollector<ImmutableBytesWritable, Put> output, Reporter reporter)
         throws IOException {
       String strKey = Bytes.toString(key.get());
-      List<Put> result = new ArrayList<Put>();
+      List<Put> result = new ArrayList<>();
       while (values.hasNext())
         result.add(values.next());
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
index ca727e4..47421f1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
@@ -234,7 +234,7 @@ public abstract class MultiTableInputFormatTestBase {
     c.set(KEY_STARTROW, start != null ? start : "");
     c.set(KEY_LASTROW, last != null ? last : "");
 
-    List<Scan> scans = new ArrayList<Scan>();
+    List<Scan> scans = new ArrayList<>();
 
     for (String tableName : TABLES) {
       Scan scan = new Scan();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
index 92888ed..efacca9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
@@ -46,15 +46,14 @@ public class NMapInputFormat extends InputFormat<NullWritable, NullWritable> {
   public RecordReader<NullWritable, NullWritable> createRecordReader(
       InputSplit split,
       TaskAttemptContext tac) throws IOException, InterruptedException {
-    return new SingleRecordReader<NullWritable, NullWritable>(
-        NullWritable.get(), NullWritable.get());
+    return new SingleRecordReader<>(NullWritable.get(), NullWritable.get());
   }
 
   @Override
   public List<InputSplit> getSplits(JobContext context) throws IOException,
       InterruptedException {
     int count = getNumMapTasks(context.getConfiguration());
-    List<InputSplit> splits = new ArrayList<InputSplit>(count);
+    List<InputSplit> splits = new ArrayList<>(count);
     for (int i = 0; i < count; i++) {
       splits.add(new NullInputSplit());
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
index fc7b102..b7fdb47 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
@@ -51,7 +51,7 @@ public class TestGroupingTableMapper {
     Mapper<ImmutableBytesWritable, Result, ImmutableBytesWritable, Result>.Context context =
         mock(Mapper.Context.class);
     context.write(any(ImmutableBytesWritable.class), any(Result.class));
-    List<Cell> keyValue = new ArrayList<Cell>();
+    List<Cell> keyValue = new ArrayList<>();
     byte[] row = {};
     keyValue.add(new KeyValue(row, Bytes.toBytes("family2"), Bytes.toBytes("clm"), Bytes
         .toBytes("value1")));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 52b2901..3c1bed8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -448,7 +448,7 @@ public class TestHFileOutputFormat2  {
       writer = hof.getRecordWriter(context);
       final byte [] b = Bytes.toBytes("b");
 
-      List< Tag > tags = new ArrayList<Tag>();
+      List< Tag > tags = new ArrayList<>();
       tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(978670)));
       KeyValue kv = new KeyValue(b, b, b, HConstants.LATEST_TIMESTAMP, b, tags);
       writer.write(new ImmutableBytesWritable(), kv);
@@ -735,8 +735,7 @@ public class TestHFileOutputFormat2  {
    */
   private Map<String, Compression.Algorithm>
       getMockColumnFamiliesForCompression (int numCfs) {
-    Map<String, Compression.Algorithm> familyToCompression
-      = new HashMap<String, Compression.Algorithm>();
+    Map<String, Compression.Algorithm> familyToCompression = new HashMap<>();
     // use column family names having special characters
     if (numCfs-- > 0) {
       familyToCompression.put("Family1!@#!@#&", Compression.Algorithm.LZO);
@@ -809,8 +808,7 @@ public class TestHFileOutputFormat2  {
    */
   private Map<String, BloomType>
   getMockColumnFamiliesForBloomType (int numCfs) {
-    Map<String, BloomType> familyToBloomType =
-        new HashMap<String, BloomType>();
+    Map<String, BloomType> familyToBloomType = new HashMap<>();
     // use column family names having special characters
     if (numCfs-- > 0) {
       familyToBloomType.put("Family1!@#!@#&", BloomType.ROW);
@@ -881,8 +879,7 @@ public class TestHFileOutputFormat2  {
    */
   private Map<String, Integer>
   getMockColumnFamiliesForBlockSize (int numCfs) {
-    Map<String, Integer> familyToBlockSize =
-        new HashMap<String, Integer>();
+    Map<String, Integer> familyToBlockSize = new HashMap<>();
     // use column family names having special characters
     if (numCfs-- > 0) {
       familyToBlockSize.put("Family1!@#!@#&", 1234);
@@ -956,8 +953,7 @@ public class TestHFileOutputFormat2  {
    */
   private Map<String, DataBlockEncoding>
       getMockColumnFamiliesForDataBlockEncoding (int numCfs) {
-    Map<String, DataBlockEncoding> familyToDataBlockEncoding =
-        new HashMap<String, DataBlockEncoding>();
+    Map<String, DataBlockEncoding> familyToDataBlockEncoding = new HashMap<>();
     // use column family names having special characters
     if (numCfs-- > 0) {
       familyToDataBlockEncoding.put("Family1!@#!@#&", DataBlockEncoding.DIFF);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java
index abb600d..2867f13 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHRegionPartitioner.java
@@ -59,7 +59,7 @@ public class TestHRegionPartitioner {
     UTIL.createTable(TableName.valueOf(name.getMethodName()), families, 1,
     Bytes.toBytes("aa"), Bytes.toBytes("cc"), 3);
 
-    HRegionPartitioner<Long, Long> partitioner = new HRegionPartitioner<Long, Long>();
+    HRegionPartitioner<Long, Long> partitioner = new HRegionPartitioner<>();
     Configuration configuration = UTIL.getConfiguration();
     configuration.set(TableOutputFormat.OUTPUT_TABLE, name.getMethodName());
     partitioner.setConf(configuration);
@@ -68,4 +68,4 @@ public class TestHRegionPartitioner {
     assertEquals(1, partitioner.getPartition(writable, 10L, 3));
     assertEquals(0, partitioner.getPartition(writable, 10L, 1));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
index 75d40a1..a7642af 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
@@ -146,8 +146,7 @@ public class TestHashTable {
       .put(95, new ImmutableBytesWritable(Bytes.fromHex("f57c447e32a08f4bf1abb2892839ac56")))
       .build();
   
-    Map<Integer, ImmutableBytesWritable> actualHashes
-      = new HashMap<Integer, ImmutableBytesWritable>();
+    Map<Integer, ImmutableBytesWritable> actualHashes = new HashMap<>();
     Path dataDir = new Path(testDir, HashTable.HASH_DATA_DIR);
     for (int i = 0; i < numHashFiles; i++) {
       Path hashPath = new Path(dataDir, HashTable.TableHash.getDataFileName(i));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index 7de012e..1866a35 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -628,7 +628,7 @@ public class TestImportExport {
   public void testAddFilterAndArguments() throws IOException {
     Configuration configuration = new Configuration();
 
-    List<String> args = new ArrayList<String>();
+    List<String> args = new ArrayList<>();
     args.add("param1");
     args.add("param2");
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
index 26f8dea..6d9b05b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java
@@ -176,7 +176,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
     }
 
     // run the import
-    List<String> argv = new ArrayList<String>(Arrays.asList(args));
+    List<String> argv = new ArrayList<>(Arrays.asList(args));
     argv.add(inputPath.toString());
     Tool tool = new ImportTsv();
     LOG.debug("Running ImportTsv with arguments: " + argv);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
index 21cae54..4ab3d29 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java
@@ -138,7 +138,7 @@ public class TestImportTSVWithTTLs implements Configurable {
     }
 
     // run the import
-    List<String> argv = new ArrayList<String>(Arrays.asList(args));
+    List<String> argv = new ArrayList<>(Arrays.asList(args));
     argv.add(inputPath.toString());
     Tool tool = new ImportTsv();
     LOG.debug("Running ImportTsv with arguments: " + argv);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
index 50d6b18..b8d973b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
@@ -357,7 +357,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
     }
 
     // run the import
-    List<String> argv = new ArrayList<String>(Arrays.asList(args));
+    List<String> argv = new ArrayList<>(Arrays.asList(args));
     argv.add(inputPath.toString());
     Tool tool = new ImportTsv();
     LOG.debug("Running ImportTsv with arguments: " + argv);
@@ -397,9 +397,9 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
 
     // validate number and content of output columns
     LOG.debug("Validating HFiles.");
-    Set<String> configFamilies = new HashSet<String>();
+    Set<String> configFamilies = new HashSet<>();
     configFamilies.add(family);
-    Set<String> foundFamilies = new HashSet<String>();
+    Set<String> foundFamilies = new HashSet<>();
     int actualKVCount = 0;
     for (FileStatus cfStatus : fs.listStatus(new Path(outputPath), new OutputFilesFilter())) {
       LOG.debug("The output path has files");

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
index fd51544..b7d5c6f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
@@ -114,7 +114,7 @@ public class TestImportTsv implements Configurable {
   @Before
   public void setup() throws Exception {
     tn = TableName.valueOf("test-" + UUID.randomUUID());
-    args = new HashMap<String, String>();
+    args = new HashMap<>();
     // Prepare the arguments required for the test.
     args.put(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A,FAM:B");
     args.put(ImportTsv.SEPARATOR_CONF_KEY, "\u001b");
@@ -515,9 +515,9 @@ public class TestImportTsv implements Configurable {
       int expectedKVCount) throws IOException {
     // validate number and content of output columns
     LOG.debug("Validating HFiles.");
-    Set<String> configFamilies = new HashSet<String>();
+    Set<String> configFamilies = new HashSet<>();
     configFamilies.add(family);
-    Set<String> foundFamilies = new HashSet<String>();
+    Set<String> foundFamilies = new HashSet<>();
     int actualKVCount = 0;
     for (FileStatus cfStatus : fs.listStatus(new Path(outputPath), new OutputFilesFilter())) {
       String[] elements = cfStatus.getPath().toString().split(Path.SEPARATOR);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
index 81e0a70..f569446 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
@@ -52,7 +52,7 @@ public class TestImportTsvParser {
   }
 
   private void checkParsing(ParsedLine parsed, Iterable<String> expected) {
-    ArrayList<String> parsedCols = new ArrayList<String>();
+    ArrayList<String> parsedCols = new ArrayList<>();
     for (int i = 0; i < parsed.getColumnCount(); i++) {
       parsedCols.add(Bytes.toString(parsed.getLineBytes(), parsed.getColumnOffset(i),
           parsed.getColumnLength(i)));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
index 7167c19..a6dacf7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
@@ -323,7 +323,7 @@ public class TestLoadIncrementalHFiles {
       list = new ArrayList<>();
     }
     if (useMap) {
-      map = new TreeMap<byte[], List<Path>>(Bytes.BYTES_COMPARATOR);
+      map = new TreeMap<>(Bytes.BYTES_COMPARATOR);
       map.put(FAMILY, list);
     }
     Path last = null;
@@ -630,7 +630,7 @@ public class TestLoadIncrementalHFiles {
 
   @Test(timeout = 120000)
   public void testInferBoundaries() {
-    TreeMap<byte[], Integer> map = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
+    TreeMap<byte[], Integer> map = new TreeMap<>(Bytes.BYTES_COMPARATOR);
 
     /* Toy example
      *     c---------i            o------p          s---------t     v------x

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiHFileOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiHFileOutputFormat.java
index 738ae5f..958ed83 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiHFileOutputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiHFileOutputFormat.java
@@ -169,7 +169,7 @@ public class TestMultiHFileOutputFormat {
             byte keyBytes[] = new byte[keyLength];
             byte valBytes[] = new byte[valLength];
 
-            ArrayList<ImmutableBytesWritable> tables = new ArrayList<ImmutableBytesWritable>();
+            ArrayList<ImmutableBytesWritable> tables = new ArrayList<>();
             for (int i = 0; i < TABLES.length; i++) {
                 tables.add(new ImmutableBytesWritable(TABLES[i]));
             }
@@ -204,7 +204,7 @@ public class TestMultiHFileOutputFormat {
         protected void reduce(ImmutableBytesWritable table, java.lang.Iterable<KeyValue> kvs,
             org.apache.hadoop.mapreduce.Reducer<ImmutableBytesWritable, KeyValue, ImmutableBytesWritable, KeyValue>.Context context)
             throws java.io.IOException, InterruptedException {
-            TreeSet<KeyValue> map = new TreeSet<KeyValue>(KeyValue.COMPARATOR);
+            TreeSet<KeyValue> map = new TreeSet<>(KeyValue.COMPARATOR);
             for (KeyValue kv : kvs) {
                 try {
                     map.add(kv.clone());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
index cd83199..3b84e2d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
@@ -319,7 +319,7 @@ public class TestRowCounter {
     final byte[] col1 = Bytes.toBytes(COL1);
     final byte[] col2 = Bytes.toBytes(COL2);
     final byte[] col3 = Bytes.toBytes(COMPOSITE_COLUMN);
-    ArrayList<Put> rowsUpdate = new ArrayList<Put>();
+    ArrayList<Put> rowsUpdate = new ArrayList<>();
     // write few rows with two columns
     int i = 0;
     for (; i < totalRows - rowsWithOneCol; i++) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java
index 119df80..0f41f33 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java
@@ -42,8 +42,7 @@ public class TestSimpleTotalOrderPartitioner {
   public void testSplit() throws Exception {
     String start = "a";
     String end = "{";
-    SimpleTotalOrderPartitioner<byte []> p =
-      new SimpleTotalOrderPartitioner<byte []>();
+    SimpleTotalOrderPartitioner<byte []> p = new SimpleTotalOrderPartitioner<>();
     
     this.conf.set(SimpleTotalOrderPartitioner.START, start);
     this.conf.set(SimpleTotalOrderPartitioner.END, end);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
index f1cda3c..4382c9c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
@@ -48,7 +48,7 @@ public class TestTableSplit {
         "row-end".getBytes(), "location");
     assertEquals (split1, split2);
     assertTrue   (split1.hashCode() == split2.hashCode());
-    HashSet<TableSplit> set = new HashSet<TableSplit>(2);
+    HashSet<TableSplit> set = new HashSet<>(2);
     set.add(split1);
     set.add(split2);
     assertTrue(set.size() == 1);
@@ -68,7 +68,7 @@ public class TestTableSplit {
 
     assertEquals (split1, split2);
     assertTrue   (split1.hashCode() == split2.hashCode());
-    HashSet<TableSplit> set = new HashSet<TableSplit>(2);
+    HashSet<TableSplit> set = new HashSet<>(2);
     set.add(split1);
     set.add(split2);
     assertTrue(set.size() == 1);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
index 8b7cdd7..6796c94 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
@@ -68,8 +68,7 @@ public class TestTimeRangeMapRed {
   private Admin admin;
 
   private static final byte [] KEY = Bytes.toBytes("row1");
-  private static final NavigableMap<Long, Boolean> TIMESTAMP =
-    new TreeMap<Long, Boolean>();
+  private static final NavigableMap<Long, Boolean> TIMESTAMP = new TreeMap<>();
   static {
     TIMESTAMP.put((long)1245620000, false);
     TIMESTAMP.put((long)1245620005, true); // include
@@ -112,7 +111,7 @@ public class TestTimeRangeMapRed {
     public void map(ImmutableBytesWritable key, Result result,
         Context context)
     throws IOException {
-      List<Long> tsList = new ArrayList<Long>();
+      List<Long> tsList = new ArrayList<>();
       for (Cell kv : result.listCells()) {
         tsList.add(kv.getTimestamp());
       }
@@ -152,7 +151,7 @@ public class TestTimeRangeMapRed {
     col.setMaxVersions(Integer.MAX_VALUE);
     desc.addFamily(col);
     admin.createTable(desc);
-    List<Put> puts = new ArrayList<Put>();
+    List<Put> puts = new ArrayList<>();
     for (Map.Entry<Long, Boolean> entry : TIMESTAMP.entrySet()) {
       Put put = new Put(KEY);
       put.setDurability(Durability.SKIP_WAL);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
index 7e142bc..427c5cc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
@@ -172,7 +172,7 @@ public class TestWALPlayer {
     when(context.getConfiguration()).thenReturn(configuration);
 
     WALEdit value = mock(WALEdit.class);
-    ArrayList<Cell> values = new ArrayList<Cell>();
+    ArrayList<Cell> values = new ArrayList<>();
     KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), null);
 
     values.add(kv1);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
index fa1b9f4..34725b4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java
@@ -81,8 +81,7 @@ public class TestWALRecordReader {
   private static HTableDescriptor htd;
   private static Path logDir;
   protected MultiVersionConcurrencyControl mvcc;
-  protected static NavigableMap<byte[], Integer> scopes = new TreeMap<byte[], Integer>(
-      Bytes.BYTES_COMPARATOR);
+  protected static NavigableMap<byte[], Integer> scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR);
 
   private static String getName() {
     return "TestWALRecordReader";

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
index 467d4a5..a5fe952 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
@@ -143,14 +143,12 @@ ClientProtos.ClientService.BlockingInterface, RegionServerServices {
    * key, need to use TreeMap and provide a Comparator.  Use
    * {@link #setGetResult(byte[], byte[], Result)} filling this map.
    */
-  private final Map<byte [], Map<byte [], Result>> gets =
-    new TreeMap<byte [], Map<byte [], Result>>(Bytes.BYTES_COMPARATOR);
+  private final Map<byte [], Map<byte [], Result>> gets = new TreeMap<>(Bytes.BYTES_COMPARATOR);
 
   /**
    * Map of regions to results to return when scanning.
    */
-  private final Map<byte [], Result []> nexts =
-    new TreeMap<byte [], Result []>(Bytes.BYTES_COMPARATOR);
+  private final Map<byte [], Result []> nexts = new TreeMap<>(Bytes.BYTES_COMPARATOR);
 
   /**
    * Data structure that holds regionname and index used scanning.
@@ -177,8 +175,7 @@ ClientProtos.ClientService.BlockingInterface, RegionServerServices {
   /**
    * Outstanding scanners and their offset into <code>nexts</code>
    */
-  private final Map<Long, RegionNameAndIndex> scannersAndOffsets =
-    new HashMap<Long, RegionNameAndIndex>();
+  private final Map<Long, RegionNameAndIndex> scannersAndOffsets = new HashMap<>();
 
   /**
    * @param sn Name of this mock regionserver
@@ -203,7 +200,7 @@ ClientProtos.ClientService.BlockingInterface, RegionServerServices {
     if (value == null) {
       // If no value already, create one.  Needs to be treemap because we are
       // using byte array as key.   Not thread safe.
-      value = new TreeMap<byte [], Result>(Bytes.BYTES_COMPARATOR);
+      value = new TreeMap<>(Bytes.BYTES_COMPARATOR);
       this.gets.put(regionName, value);
     }
     value.put(row, r);
@@ -402,7 +399,7 @@ ClientProtos.ClientService.BlockingInterface, RegionServerServices {
         Result result = next(scannerId);
         if (result != null) {
           builder.addCellsPerResult(result.size());
-          List<CellScannable> results = new ArrayList<CellScannable>(1);
+          List<CellScannable> results = new ArrayList<>(1);
           results.add(result);
           ((HBaseRpcController) controller).setCellScanner(CellUtil
               .createCellScanner(results));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
index 5100a2b..78b75d5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
@@ -333,14 +333,14 @@ public class TestAssignmentListener {
 
     // We'll start with 2 servers in draining that existed before the
     // HMaster started.
-    ArrayList<ServerName> drainingServers = new ArrayList<ServerName>();
+    ArrayList<ServerName> drainingServers = new ArrayList<>();
     drainingServers.add(SERVERNAME_A);
     drainingServers.add(SERVERNAME_B);
 
     // We'll have 2 servers that come online AFTER the DrainingServerTracker
     // is started (just as we see when we failover to the Backup HMaster).
     // One of these will already be a draining server.
-    HashMap<ServerName, ServerLoad> onlineServers = new HashMap<ServerName, ServerLoad>();
+    HashMap<ServerName, ServerLoad> onlineServers = new HashMap<>();
     onlineServers.put(SERVERNAME_A, ServerLoad.EMPTY_SERVERLOAD);
     onlineServers.put(SERVERNAME_C, ServerLoad.EMPTY_SERVERLOAD);
 
@@ -370,7 +370,7 @@ public class TestAssignmentListener {
         new ArrayList<ServerName>());
 
     // checkAndRecordNewServer() is how servers are added to the ServerManager.
-    ArrayList<ServerName> onlineDrainingServers = new ArrayList<ServerName>();
+    ArrayList<ServerName> onlineDrainingServers = new ArrayList<>();
     for (ServerName sn : onlineServers.keySet()){
       // Here's the actual test.
       serverManager.checkAndRecordNewServer(sn, onlineServers.get(sn));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java
index 242b012..449e1e6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerOnCluster.java
@@ -624,7 +624,7 @@ public class TestAssignmentManagerOnCluster {
       HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
       AssignmentManager am = master.getAssignmentManager();
 
-      Map<HRegionInfo, ServerName> regions = new HashMap<HRegionInfo, ServerName>();
+      Map<HRegionInfo, ServerName> regions = new HashMap<>();
       ServerName dest = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
       regions.put(hri, dest);
       // retainAssignment but balancer cannot find a plan
@@ -838,7 +838,7 @@ public class TestAssignmentManagerOnCluster {
       assertNotNull(destServerName);
       assertFalse("Region should be assigned on a new region server",
         oldServerName.equals(destServerName));
-      List<HRegionInfo> regions = new ArrayList<HRegionInfo>();
+      List<HRegionInfo> regions = new ArrayList<>();
       regions.add(hri);
       am.assign(destServerName, regions);
 
@@ -1214,8 +1214,8 @@ public class TestAssignmentManagerOnCluster {
     rss.start();
     // Create 10 threads and make each do 10 puts related to region state update
     Thread[] th = new Thread[10];
-    List<String> nameList = new ArrayList<String>();
-    List<TableName> tableNameList = new ArrayList<TableName>();
+    List<String> nameList = new ArrayList<>();
+    List<TableName> tableNameList = new ArrayList<>();
     for (int i = 0; i < th.length; i++) {
       th[i] = new Thread() {
         @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
index 52b58f1..cc73d9d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
@@ -340,8 +340,7 @@ public class TestCatalogJanitor {
 
     // First test that our Comparator works right up in CatalogJanitor.
     // Just fo kicks.
-    SortedMap<HRegionInfo, Result> regions =
-      new TreeMap<HRegionInfo, Result>(new CatalogJanitor.SplitParentFirstComparator());
+    SortedMap<HRegionInfo, Result> regions = new TreeMap<>(new CatalogJanitor.SplitParentFirstComparator());
     // Now make sure that this regions map sorts as we expect it to.
     regions.put(parent, createResult(parent, splita, splitb));
     regions.put(splitb, createResult(splitb, splitba, splitbb));
@@ -434,16 +433,14 @@ public class TestCatalogJanitor {
         new byte[0]);
     Thread.sleep(1001);
 
-    final Map<HRegionInfo, Result> splitParents =
-        new TreeMap<HRegionInfo, Result>(new SplitParentFirstComparator());
+    final Map<HRegionInfo, Result> splitParents = new TreeMap<>(new SplitParentFirstComparator());
     splitParents.put(parent, createResult(parent, splita, splitb));
     splita.setOffline(true); //simulate that splita goes offline when it is split
     splitParents.put(splita, createResult(splita, splitaa,splitab));
 
-    final Map<HRegionInfo, Result> mergedRegions = new TreeMap<HRegionInfo, Result>();
+    final Map<HRegionInfo, Result> mergedRegions = new TreeMap<>();
     CatalogJanitor janitor = spy(new CatalogJanitor(services));
-    doReturn(new Triple<Integer, Map<HRegionInfo, Result>, Map<HRegionInfo, Result>>(
-            10, mergedRegions, splitParents)).when(janitor)
+    doReturn(new Triple<>(10, mergedRegions, splitParents)).when(janitor)
         .getMergedRegionsAndSplitParents();
 
     //create ref from splita to parent

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java
index 5d47ede..68cab5a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClusterStatusPublisher.java
@@ -49,7 +49,7 @@ public class TestClusterStatusPublisher {
     ClusterStatusPublisher csp = new ClusterStatusPublisher() {
       @Override
       protected List<Pair<ServerName, Long>> getDeadServers(long since) {
-        return new ArrayList<Pair<ServerName, Long>>();
+        return new ArrayList<>();
       }
     };
 
@@ -61,10 +61,10 @@ public class TestClusterStatusPublisher {
     ClusterStatusPublisher csp = new ClusterStatusPublisher() {
       @Override
       protected List<Pair<ServerName, Long>> getDeadServers(long since) {
-        List<Pair<ServerName, Long>> res = new ArrayList<Pair<ServerName, Long>>();
+        List<Pair<ServerName, Long>> res = new ArrayList<>();
         switch ((int) EnvironmentEdgeManager.currentTime()) {
           case 2:
-            res.add(new Pair<ServerName, Long>(ServerName.valueOf("hn", 10, 10), 1L));
+            res.add(new Pair<>(ServerName.valueOf("hn", 10, 10), 1L));
             break;
           case 1000:
             break;
@@ -87,9 +87,9 @@ public class TestClusterStatusPublisher {
     ClusterStatusPublisher csp = new ClusterStatusPublisher() {
       @Override
       protected List<Pair<ServerName, Long>> getDeadServers(long since) {
-        List<Pair<ServerName, Long>> res = new ArrayList<Pair<ServerName, Long>>();
+        List<Pair<ServerName, Long>> res = new ArrayList<>();
         for (int i = 0; i < 25; i++) {
-          res.add(new Pair<ServerName, Long>(ServerName.valueOf("hn" + i, 10, 10), 20L));
+          res.add(new Pair<>(ServerName.valueOf("hn" + i, 10, 10), 20L));
         }
 
         return res;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
index d0b8494..4c8728f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
@@ -329,7 +329,7 @@ public class TestDistributedLogSplitting {
 
     private final PerClientRandomNonceGenerator delegate = PerClientRandomNonceGenerator.get();
     private boolean isDups = false;
-    private LinkedList<Long> nonces = new LinkedList<Long>();
+    private LinkedList<Long> nonces = new LinkedList<>();
 
     public void startDups() {
       isDups = true;
@@ -370,7 +370,7 @@ public class TestDistributedLogSplitting {
             (ClusterConnection)TEST_UTIL.getConnection(), ng);
 
     try {
-      List<Increment> reqs = new ArrayList<Increment>();
+      List<Increment> reqs = new ArrayList<>();
       for (RegionServerThread rst : cluster.getLiveRegionServerThreads()) {
         HRegionServer hrs = rst.getRegionServer();
         List<HRegionInfo> hris = ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices());
@@ -693,7 +693,7 @@ public class TestDistributedLogSplitting {
     try {
       final SplitLogManager slm = master.getMasterWalManager().getSplitLogManager();
 
-      Set<HRegionInfo> regionSet = new HashSet<HRegionInfo>();
+      Set<HRegionInfo> regionSet = new HashSet<>();
       HRegionInfo region = null;
       HRegionServer hrs = null;
       ServerName firstFailedServer = null;
@@ -942,7 +942,7 @@ public class TestDistributedLogSplitting {
     try {
       final SplitLogManager slm = master.getMasterWalManager().getSplitLogManager();
 
-      Set<HRegionInfo> regionSet = new HashSet<HRegionInfo>();
+      Set<HRegionInfo> regionSet = new HashSet<>();
       HRegionInfo region = null;
       HRegionServer hrs = null;
       HRegionServer dstRS = null;
@@ -1214,10 +1214,10 @@ public class TestDistributedLogSplitting {
     List<HRegionInfo> regions = ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices());
 
     LOG.info("#regions = " + regions.size());
-    Set<HRegionInfo> tmpRegions = new HashSet<HRegionInfo>();
+    Set<HRegionInfo> tmpRegions = new HashSet<>();
     tmpRegions.add(HRegionInfo.FIRST_META_REGIONINFO);
     master.getMasterWalManager().prepareLogReplay(hrs.getServerName(), tmpRegions);
-    Set<HRegionInfo> userRegionSet = new HashSet<HRegionInfo>();
+    Set<HRegionInfo> userRegionSet = new HashSet<>();
     userRegionSet.addAll(regions);
     master.getMasterWalManager().prepareLogReplay(hrs.getServerName(), userRegionSet);
     boolean isMetaRegionInRecovery = false;
@@ -1591,7 +1591,7 @@ public class TestDistributedLogSplitting {
     htd.addFamily(new HColumnDescriptor(family));
     byte[] value = new byte[edit_size];
 
-    List<HRegionInfo> hris = new ArrayList<HRegionInfo>();
+    List<HRegionInfo> hris = new ArrayList<>();
     for (HRegionInfo region : regions) {
       if (!region.getTable().getNameAsString().equalsIgnoreCase(tname)) {
         continue;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
index 37e714e..fe0e7b1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
@@ -286,7 +286,7 @@ public class TestMasterNoCluster {
         // Record a newer server in server manager at first
         getServerManager().recordNewServerWithLock(newServer, ServerLoad.EMPTY_SERVERLOAD);
 
-        List<ServerName> onlineServers = new ArrayList<ServerName>();
+        List<ServerName> onlineServers = new ArrayList<>();
         onlineServers.add(deadServer);
         onlineServers.add(newServer);
         // Mock the region server tracker to pull the dead server from zk

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index bb8a995..6c737e9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -219,7 +219,7 @@ public class TestMasterOperationsForRegionReplicas {
       //just check that the number of default replica regions in the meta table are the same
       //as the number of regions the table was created with, and the count of the
       //replicas is numReplica for each region
-      Map<HRegionInfo, Integer> defaultReplicas = new HashMap<HRegionInfo, Integer>();
+      Map<HRegionInfo, Integer> defaultReplicas = new HashMap<>();
       for (HRegionInfo hri : hris) {
         Integer i;
         HRegionInfo regionReplica0 = RegionReplicaUtil.getRegionInfoForDefaultReplica(hri);
@@ -227,7 +227,7 @@ public class TestMasterOperationsForRegionReplicas {
             (i = defaultReplicas.get(regionReplica0)) == null ? 1 : i + 1);
       }
       assert(defaultReplicas.size() == numRegions);
-      Collection<Integer> counts = new HashSet<Integer>(defaultReplicas.values());
+      Collection<Integer> counts = new HashSet<>(defaultReplicas.values());
       assert(counts.size() == 1 && counts.contains(new Integer(numReplica)));
     } finally {
       ADMIN.disableTable(tableName);
@@ -248,7 +248,7 @@ public class TestMasterOperationsForRegionReplicas {
       desc.addFamily(new HColumnDescriptor("family"));
       ADMIN.createTable(desc, Bytes.toBytes("A"), Bytes.toBytes("Z"), numRegions);
       TEST_UTIL.waitTableEnabled(tableName);
-      Set<byte[]> tableRows = new HashSet<byte[]>();
+      Set<byte[]> tableRows = new HashSet<>();
       List<HRegionInfo> hris = MetaTableAccessor.getTableRegions(ADMIN.getConnection(), tableName);
       for (HRegionInfo hri : hris) {
         tableRows.add(hri.getRegionName());
@@ -317,7 +317,7 @@ public class TestMasterOperationsForRegionReplicas {
         continue;
       }
       List<HRegionInfo> regions = entry.getValue();
-      Set<byte[]> setOfStartKeys = new HashSet<byte[]>();
+      Set<byte[]> setOfStartKeys = new HashSet<>();
       for (HRegionInfo region : regions) {
         byte[] startKey = region.getStartKey();
         if (region.getTable().equals(table)) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java
index af54ffc..b59e6ff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java
@@ -90,7 +90,7 @@ public class TestMasterStatusServlet {
     // Fake AssignmentManager and RIT
     AssignmentManager am = Mockito.mock(AssignmentManager.class);
     RegionStates rs = Mockito.mock(RegionStates.class);
-    Set<RegionState> regionsInTransition = new HashSet<RegionState>();
+    Set<RegionState> regionsInTransition = new HashSet<>();
     regionsInTransition.add(new RegionState(FAKE_HRI, RegionState.State.CLOSING, 12345L, FAKE_HOST));
     Mockito.doReturn(rs).when(am).getRegionStates();
     Mockito.doReturn(regionsInTransition).when(rs).getRegionsInTransition();
@@ -145,7 +145,7 @@ public class TestMasterStatusServlet {
     List<ServerName> servers = Lists.newArrayList(
         ServerName.valueOf("rootserver,123,12345"),
         ServerName.valueOf("metaserver,123,12345"));
-    Set<ServerName> deadServers = new HashSet<ServerName>(
+    Set<ServerName> deadServers = new HashSet<>(
         Lists.newArrayList(
             ServerName.valueOf("badserver,123,12345"),
             ServerName.valueOf("uglyserver,123,12345"))
@@ -164,8 +164,7 @@ public class TestMasterStatusServlet {
     RegionStates rs = Mockito.mock(RegionStates.class);
 
     // Add 100 regions as in-transition
-    TreeSet<RegionState> regionsInTransition = new TreeSet<RegionState>(
-      RegionStates.REGION_STATE_COMPARATOR);
+    TreeSet<RegionState> regionsInTransition = new TreeSet<>(RegionStates.REGION_STATE_COMPARATOR);
     for (byte i = 0; i < 100; i++) {
       HRegionInfo hri = new HRegionInfo(FAKE_TABLE.getTableName(),
           new byte[]{i}, new byte[]{(byte) (i+1)});

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterWalManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterWalManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterWalManager.java
index 7c7531f..782c400 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterWalManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterWalManager.java
@@ -90,7 +90,7 @@ public class TestMasterWalManager {
     inRecoveringRegionPath = ZKUtil.joinZNode(inRecoveringRegionPath,
       inRecoveryServerName.getServerName());
     ZKUtil.createWithParents(zkw, inRecoveringRegionPath);
-    Set<ServerName> servers = new HashSet<ServerName>();
+    Set<ServerName> servers = new HashSet<>();
     servers.add(previouselyFaildServerName);
     mwm.removeStaleRecoveringRegionsFromZK(servers);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
index b2be237..67add2f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
@@ -80,8 +80,7 @@ public class TestRegionPlacement {
   private static Position[] positions = Position.values();
   private int lastRegionOnPrimaryRSCount = 0;
   private int REGION_NUM = 10;
-  private Map<HRegionInfo, ServerName[]> favoredNodesAssignmentPlan =
-      new HashMap<HRegionInfo, ServerName[]>();
+  private Map<HRegionInfo, ServerName[]> favoredNodesAssignmentPlan = new HashMap<>();
 
   @BeforeClass
   public static void setupBeforeClass() throws Exception {
@@ -204,14 +203,12 @@ public class TestRegionPlacement {
     } while (ServerName.isSameHostnameAndPort(metaServer, serverToKill) || isNamespaceServer ||
         TEST_UTIL.getHBaseCluster().getRegionServer(killIndex).getNumberOfOnlineRegions() == 0);
     LOG.debug("Stopping RS " + serverToKill);
-    Map<HRegionInfo, Pair<ServerName, ServerName>> regionsToVerify =
-        new HashMap<HRegionInfo, Pair<ServerName, ServerName>>();
+    Map<HRegionInfo, Pair<ServerName, ServerName>> regionsToVerify = new HashMap<>();
     // mark the regions to track
     for (Map.Entry<HRegionInfo, ServerName[]> entry : favoredNodesAssignmentPlan.entrySet()) {
       ServerName s = entry.getValue()[0];
       if (ServerName.isSameHostnameAndPort(s, serverToKill)) {
-        regionsToVerify.put(entry.getKey(), new Pair<ServerName, ServerName>(
-            entry.getValue()[1], entry.getValue()[2]));
+        regionsToVerify.put(entry.getKey(), new Pair<>(entry.getValue()[1], entry.getValue()[2]));
         LOG.debug("Adding " + entry.getKey() + " with sedcondary/tertiary " +
             entry.getValue()[1] + " " + entry.getValue()[2]);
       }
@@ -308,7 +305,7 @@ public class TestRegionPlacement {
       plan.getAssignmentMap().entrySet()) {
 
       // copy the server list from the original plan
-      List<ServerName> shuffledServerList = new ArrayList<ServerName>();
+      List<ServerName> shuffledServerList = new ArrayList<>();
       shuffledServerList.addAll(entry.getValue());
 
       // start to shuffle

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java
index 7c6f08b..f10c368 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java
@@ -79,12 +79,12 @@ public class TestRegionPlacement2 {
     LoadBalancer balancer = LoadBalancerFactory.getLoadBalancer(TEST_UTIL.getConfiguration());
     balancer.setMasterServices(TEST_UTIL.getMiniHBaseCluster().getMaster());
     balancer.initialize();
-    List<ServerName> servers = new ArrayList<ServerName>();
+    List<ServerName> servers = new ArrayList<>();
     for (int i = 0; i < SLAVES; i++) {
       ServerName server = TEST_UTIL.getMiniHBaseCluster().getRegionServer(i).getServerName();
       servers.add(server);
     }
-    List<HRegionInfo> regions = new ArrayList<HRegionInfo>(1);
+    List<HRegionInfo> regions = new ArrayList<>(1);
     HRegionInfo region = new HRegionInfo(TableName.valueOf(name.getMethodName()));
     regions.add(region);
     Map<ServerName,List<HRegionInfo>> assignmentMap = balancer.roundRobinAssignment(regions,
@@ -140,12 +140,12 @@ public class TestRegionPlacement2 {
     LoadBalancer balancer = LoadBalancerFactory.getLoadBalancer(TEST_UTIL.getConfiguration());
     balancer.setMasterServices(TEST_UTIL.getMiniHBaseCluster().getMaster());
     balancer.initialize();
-    List<ServerName> servers = new ArrayList<ServerName>();
+    List<ServerName> servers = new ArrayList<>();
     for (int i = 0; i < SLAVES; i++) {
       ServerName server = TEST_UTIL.getMiniHBaseCluster().getRegionServer(i).getServerName();
       servers.add(server);
     }
-    List<HRegionInfo> regions = new ArrayList<HRegionInfo>(1);
+    List<HRegionInfo> regions = new ArrayList<>(1);
     HRegionInfo region = new HRegionInfo(TableName.valueOf(name.getMethodName()));
     regions.add(region);
     ServerName serverBefore = balancer.randomAssignment(region, servers);
@@ -183,7 +183,7 @@ public class TestRegionPlacement2 {
 
   private List<ServerName> removeMatchingServers(Collection<ServerName> serversWithoutStartCode,
       List<ServerName> servers) {
-    List<ServerName> serversToRemove = new ArrayList<ServerName>();
+    List<ServerName> serversToRemove = new ArrayList<>();
     for (ServerName s : serversWithoutStartCode) {
       serversToRemove.addAll(removeMatchingServers(s, servers));
     }
@@ -192,7 +192,7 @@ public class TestRegionPlacement2 {
 
   private List<ServerName> removeMatchingServers(ServerName serverWithoutStartCode,
       List<ServerName> servers) {
-    List<ServerName> serversToRemove = new ArrayList<ServerName>();
+    List<ServerName> serversToRemove = new ArrayList<>();
     for (ServerName s : servers) {
       if (ServerName.isSameHostnameAndPort(s, serverWithoutStartCode)) {
         serversToRemove.add(s);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
index ac99b29..80c6f3a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
@@ -259,8 +259,8 @@ public class  TestRollingRestart {
 
   private NavigableSet<String> getDoubleAssignedRegions(
       MiniHBaseCluster cluster) throws IOException {
-    NavigableSet<String> online = new TreeSet<String>();
-    NavigableSet<String> doubled = new TreeSet<String>();
+    NavigableSet<String> online = new TreeSet<>();
+    NavigableSet<String> doubled = new TreeSet<>();
     for (RegionServerThread rst : cluster.getLiveRegionServerThreads()) {
       for (HRegionInfo region : ProtobufUtil.getOnlineRegions(
           rst.getRegionServer().getRSRpcServices())) {