You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/03/07 19:23:19 UTC
[06/22] hbase git commit: HBASE-17532 Replaced explicit type with
diamond operator
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
index 0f427ad..f93449c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
@@ -259,7 +259,7 @@ public class BalancerTestBase {
}
public List<String> resolve(List<String> names) {
- List<String> ret = new ArrayList<String>(names.size());
+ List<String> ret = new ArrayList<>(names.size());
for (String name : names) {
ret.add("rack");
}
@@ -345,14 +345,14 @@ public class BalancerTestBase {
* Checks whether region replicas are not hosted on the same host.
*/
public void assertRegionReplicaPlacement(Map<ServerName, List<HRegionInfo>> serverMap, RackManager rackManager) {
- TreeMap<String, Set<HRegionInfo>> regionsPerHost = new TreeMap<String, Set<HRegionInfo>>();
- TreeMap<String, Set<HRegionInfo>> regionsPerRack = new TreeMap<String, Set<HRegionInfo>>();
+ TreeMap<String, Set<HRegionInfo>> regionsPerHost = new TreeMap<>();
+ TreeMap<String, Set<HRegionInfo>> regionsPerRack = new TreeMap<>();
for (Entry<ServerName, List<HRegionInfo>> entry : serverMap.entrySet()) {
String hostname = entry.getKey().getHostname();
Set<HRegionInfo> infos = regionsPerHost.get(hostname);
if (infos == null) {
- infos = new HashSet<HRegionInfo>();
+ infos = new HashSet<>();
regionsPerHost.put(hostname, infos);
}
@@ -372,7 +372,7 @@ public class BalancerTestBase {
String rack = rackManager.getRack(entry.getKey());
Set<HRegionInfo> infos = regionsPerRack.get(rack);
if (infos == null) {
- infos = new HashSet<HRegionInfo>();
+ infos = new HashSet<>();
regionsPerRack.put(rack, infos);
}
@@ -399,7 +399,7 @@ public class BalancerTestBase {
}
protected List<ServerAndLoad> convertToList(final Map<ServerName, List<HRegionInfo>> servers) {
- List<ServerAndLoad> list = new ArrayList<ServerAndLoad>(servers.size());
+ List<ServerAndLoad> list = new ArrayList<>(servers.size());
for (Map.Entry<ServerName, List<HRegionInfo>> e : servers.entrySet()) {
list.add(new ServerAndLoad(e.getKey(), e.getValue().size()));
}
@@ -407,7 +407,7 @@ public class BalancerTestBase {
}
protected String printMock(List<ServerAndLoad> balancedCluster) {
- SortedSet<ServerAndLoad> sorted = new TreeSet<ServerAndLoad>(balancedCluster);
+ SortedSet<ServerAndLoad> sorted = new TreeSet<>(balancedCluster);
ServerAndLoad[] arr = sorted.toArray(new ServerAndLoad[sorted.size()]);
StringBuilder sb = new StringBuilder(sorted.size() * 4 + 4);
sb.append("{ ");
@@ -434,9 +434,9 @@ public class BalancerTestBase {
protected List<ServerAndLoad> reconcile(List<ServerAndLoad> list,
List<RegionPlan> plans,
Map<ServerName, List<HRegionInfo>> servers) {
- List<ServerAndLoad> result = new ArrayList<ServerAndLoad>(list.size());
+ List<ServerAndLoad> result = new ArrayList<>(list.size());
- Map<ServerName, ServerAndLoad> map = new HashMap<ServerName, ServerAndLoad>(list.size());
+ Map<ServerName, ServerAndLoad> map = new HashMap<>(list.size());
for (ServerAndLoad sl : list) {
map.put(sl.getServerName(), sl);
}
@@ -477,7 +477,7 @@ public class BalancerTestBase {
protected TreeMap<ServerName, List<HRegionInfo>> mockClusterServers(int[] mockCluster, int numTables) {
int numServers = mockCluster.length;
- TreeMap<ServerName, List<HRegionInfo>> servers = new TreeMap<ServerName, List<HRegionInfo>>();
+ TreeMap<ServerName, List<HRegionInfo>> servers = new TreeMap<>();
for (int i = 0; i < numServers; i++) {
int numRegions = mockCluster[i];
ServerAndLoad sal = randomServer(0);
@@ -489,7 +489,7 @@ public class BalancerTestBase {
protected TreeMap<ServerName, List<HRegionInfo>> mockUniformClusterServers(int[] mockCluster) {
int numServers = mockCluster.length;
- TreeMap<ServerName, List<HRegionInfo>> servers = new TreeMap<ServerName, List<HRegionInfo>>();
+ TreeMap<ServerName, List<HRegionInfo>> servers = new TreeMap<>();
for (int i = 0; i < numServers; i++) {
int numRegions = mockCluster[i];
ServerAndLoad sal = randomServer(0);
@@ -507,12 +507,12 @@ public class BalancerTestBase {
for (HRegionInfo hri : regions){
TreeMap<ServerName, List<HRegionInfo>> servers = result.get(hri.getTable());
if (servers == null) {
- servers = new TreeMap<ServerName, List<HRegionInfo>>();
+ servers = new TreeMap<>();
result.put(hri.getTable(), servers);
}
List<HRegionInfo> hrilist = servers.get(sal);
if (hrilist == null) {
- hrilist = new ArrayList<HRegionInfo>();
+ hrilist = new ArrayList<>();
servers.put(sal, hrilist);
}
hrilist.add(hri);
@@ -520,20 +520,20 @@ public class BalancerTestBase {
}
for(Map.Entry<TableName, TreeMap<ServerName, List<HRegionInfo>>> entry : result.entrySet()){
for(ServerName srn : clusterServers.keySet()){
- if (!entry.getValue().containsKey(srn)) entry.getValue().put(srn, new ArrayList<HRegionInfo>());
+ if (!entry.getValue().containsKey(srn)) entry.getValue().put(srn, new ArrayList<>());
}
}
return result;
}
- private Queue<HRegionInfo> regionQueue = new LinkedList<HRegionInfo>();
+ private Queue<HRegionInfo> regionQueue = new LinkedList<>();
protected List<HRegionInfo> randomRegions(int numRegions) {
return randomRegions(numRegions, -1);
}
protected List<HRegionInfo> randomRegions(int numRegions, int numTables) {
- List<HRegionInfo> regions = new ArrayList<HRegionInfo>(numRegions);
+ List<HRegionInfo> regions = new ArrayList<>(numRegions);
byte[] start = new byte[16];
byte[] end = new byte[16];
rand.nextBytes(start);
@@ -554,7 +554,7 @@ public class BalancerTestBase {
}
protected List<HRegionInfo> uniformRegions(int numRegions) {
- List<HRegionInfo> regions = new ArrayList<HRegionInfo>(numRegions);
+ List<HRegionInfo> regions = new ArrayList<>(numRegions);
byte[] start = new byte[16];
byte[] end = new byte[16];
rand.nextBytes(start);
@@ -574,7 +574,7 @@ public class BalancerTestBase {
regionQueue.addAll(regions);
}
- private Queue<ServerName> serverQueue = new LinkedList<ServerName>();
+ private Queue<ServerName> serverQueue = new LinkedList<>();
protected ServerAndLoad randomServer(final int numRegionsPerServer) {
if (!this.serverQueue.isEmpty()) {
@@ -589,7 +589,7 @@ public class BalancerTestBase {
}
protected List<ServerAndLoad> randomServers(int numServers, int numRegionsPerServer) {
- List<ServerAndLoad> servers = new ArrayList<ServerAndLoad>(numServers);
+ List<ServerAndLoad> servers = new ArrayList<>(numServers);
for (int i = 0; i < numServers; i++) {
servers.add(randomServer(numRegionsPerServer));
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
index 02032fd..751adc5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java
@@ -185,7 +185,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
// Test simple case where all same servers are there
List<ServerAndLoad> servers = randomServers(10, 10);
List<HRegionInfo> regions = randomRegions(100);
- Map<HRegionInfo, ServerName> existing = new TreeMap<HRegionInfo, ServerName>();
+ Map<HRegionInfo, ServerName> existing = new TreeMap<>();
for (int i = 0; i < regions.size(); i++) {
ServerName sn = servers.get(i % servers.size()).getServerName();
// The old server would have had same host and port, but different
@@ -200,7 +200,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
assertRetainedAssignment(existing, listOfServerNames, assignment);
// Include two new servers that were not there before
- List<ServerAndLoad> servers2 = new ArrayList<ServerAndLoad>(servers);
+ List<ServerAndLoad> servers2 = new ArrayList<>(servers);
servers2.add(randomServer(10));
servers2.add(randomServer(10));
listOfServerNames = getListOfServerNames(servers2);
@@ -208,7 +208,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
assertRetainedAssignment(existing, listOfServerNames, assignment);
// Remove two of the servers that were previously there
- List<ServerAndLoad> servers3 = new ArrayList<ServerAndLoad>(servers);
+ List<ServerAndLoad> servers3 = new ArrayList<>(servers);
servers3.remove(0);
servers3.remove(0);
listOfServerNames = getListOfServerNames(servers3);
@@ -266,9 +266,9 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
// replica from one node to a specific other node or rack lowers the
// availability of the region or not
- List<HRegionInfo> list0 = new ArrayList<HRegionInfo>();
- List<HRegionInfo> list1 = new ArrayList<HRegionInfo>();
- List<HRegionInfo> list2 = new ArrayList<HRegionInfo>();
+ List<HRegionInfo> list0 = new ArrayList<>();
+ List<HRegionInfo> list1 = new ArrayList<>();
+ List<HRegionInfo> list2 = new ArrayList<>();
// create a region (region1)
HRegionInfo hri1 = new HRegionInfo(
TableName.valueOf(name.getMethodName()), "key1".getBytes(), "key2".getBytes(),
@@ -282,8 +282,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
list0.add(hri1); //only region1
list1.add(hri2); //only replica_of_region1
list2.add(hri3); //only region2
- Map<ServerName, List<HRegionInfo>> clusterState =
- new LinkedHashMap<ServerName, List<HRegionInfo>>();
+ Map<ServerName, List<HRegionInfo>> clusterState = new LinkedHashMap<>();
clusterState.put(servers[0], list0); //servers[0] hosts region1
clusterState.put(servers[1], list1); //servers[1] hosts replica_of_region1
clusterState.put(servers[2], list2); //servers[2] hosts region2
@@ -318,7 +317,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
clusterState.put(servers[0], list0); //servers[0], rack1 hosts region1
clusterState.put(servers[5], list1); //servers[5], rack2 hosts replica_of_region1 and replica_of_region2
clusterState.put(servers[6], list2); //servers[6], rack2 hosts region2
- clusterState.put(servers[10], new ArrayList<HRegionInfo>()); //servers[10], rack3 hosts no region
+ clusterState.put(servers[10], new ArrayList<>()); //servers[10], rack3 hosts no region
// create a cluster with the above clusterState
cluster = new Cluster(clusterState, null, null, rackManager);
// check whether a move of region1 from servers[0],rack1 to servers[6],rack2 would
@@ -335,9 +334,9 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
@Test (timeout=180000)
public void testRegionAvailabilityWithRegionMoves() throws Exception {
- List<HRegionInfo> list0 = new ArrayList<HRegionInfo>();
- List<HRegionInfo> list1 = new ArrayList<HRegionInfo>();
- List<HRegionInfo> list2 = new ArrayList<HRegionInfo>();
+ List<HRegionInfo> list0 = new ArrayList<>();
+ List<HRegionInfo> list1 = new ArrayList<>();
+ List<HRegionInfo> list2 = new ArrayList<>();
// create a region (region1)
HRegionInfo hri1 = new HRegionInfo(
TableName.valueOf(name.getMethodName()), "key1".getBytes(), "key2".getBytes(),
@@ -351,8 +350,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
list0.add(hri1); //only region1
list1.add(hri2); //only replica_of_region1
list2.add(hri3); //only region2
- Map<ServerName, List<HRegionInfo>> clusterState =
- new LinkedHashMap<ServerName, List<HRegionInfo>>();
+ Map<ServerName, List<HRegionInfo>> clusterState = new LinkedHashMap<>();
clusterState.put(servers[0], list0); //servers[0] hosts region1
clusterState.put(servers[1], list1); //servers[1] hosts replica_of_region1
clusterState.put(servers[2], list2); //servers[2] hosts region2
@@ -374,7 +372,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
// start over again
clusterState.clear();
- List<HRegionInfo> list3 = new ArrayList<HRegionInfo>();
+ List<HRegionInfo> list3 = new ArrayList<>();
HRegionInfo hri4 = RegionReplicaUtil.getRegionInfoForReplica(hri3, 1);
list3.add(hri4);
clusterState.put(servers[0], list0); //servers[0], rack1 hosts region1
@@ -394,7 +392,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
}
private List<ServerName> getListOfServerNames(final List<ServerAndLoad> sals) {
- List<ServerName> list = new ArrayList<ServerName>();
+ List<ServerName> list = new ArrayList<>();
for (ServerAndLoad e : sals) {
list.add(e.getServerName());
}
@@ -417,8 +415,8 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
private void assertRetainedAssignment(Map<HRegionInfo, ServerName> existing,
List<ServerName> servers, Map<ServerName, List<HRegionInfo>> assignment) {
// Verify condition 1, every region assigned, and to online server
- Set<ServerName> onlineServerSet = new TreeSet<ServerName>(servers);
- Set<HRegionInfo> assignedRegions = new TreeSet<HRegionInfo>();
+ Set<ServerName> onlineServerSet = new TreeSet<>(servers);
+ Set<HRegionInfo> assignedRegions = new TreeSet<>();
for (Map.Entry<ServerName, List<HRegionInfo>> a : assignment.entrySet()) {
assertTrue("Region assigned to server that was not listed as online",
onlineServerSet.contains(a.getKey()));
@@ -428,7 +426,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
assertEquals(existing.size(), assignedRegions.size());
// Verify condition 2, if server had existing assignment, must have same
- Set<String> onlineHostNames = new TreeSet<String>();
+ Set<String> onlineHostNames = new TreeSet<>();
for (ServerName s : servers) {
onlineHostNames.add(s.getHostname());
}
@@ -453,12 +451,12 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
// sharing same host and port
List<ServerName> servers = getListOfServerNames(randomServers(10, 10));
List<HRegionInfo> regions = randomRegions(101);
- Map<ServerName, List<HRegionInfo>> clusterState = new HashMap<ServerName, List<HRegionInfo>>();
+ Map<ServerName, List<HRegionInfo>> clusterState = new HashMap<>();
assignRegions(regions, servers, clusterState);
// construct another list of servers, but sharing same hosts and ports
- List<ServerName> oldServers = new ArrayList<ServerName>(servers.size());
+ List<ServerName> oldServers = new ArrayList<>(servers.size());
for (ServerName sn : servers) {
// The old server would have had same host and port, but different start code!
oldServers.add(ServerName.valueOf(sn.getHostname(), sn.getPort(), sn.getStartcode() - 10));
@@ -479,7 +477,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
ServerName sn = servers.get(i % servers.size());
List<HRegionInfo> regionsOfServer = clusterState.get(sn);
if (regionsOfServer == null) {
- regionsOfServer = new ArrayList<HRegionInfo>(10);
+ regionsOfServer = new ArrayList<>(10);
clusterState.put(sn, regionsOfServer);
}
@@ -492,7 +490,7 @@ public class TestBaseLoadBalancer extends BalancerTestBase {
// tests whether region locations are handled correctly in Cluster
List<ServerName> servers = getListOfServerNames(randomServers(10, 10));
List<HRegionInfo> regions = randomRegions(101);
- Map<ServerName, List<HRegionInfo>> clusterState = new HashMap<ServerName, List<HRegionInfo>>();
+ Map<ServerName, List<HRegionInfo>> clusterState = new HashMap<>();
assignRegions(regions, servers, clusterState);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java
index 962daf7..610ecf7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java
@@ -129,16 +129,15 @@ public class TestDefaultLoadBalancer extends BalancerTestBase {
*/
@Test (timeout=60000)
public void testBalanceClusterOverall() throws Exception {
- Map<TableName, Map<ServerName, List<HRegionInfo>>> clusterLoad
- = new TreeMap<TableName, Map<ServerName, List<HRegionInfo>>>();
+ Map<TableName, Map<ServerName, List<HRegionInfo>>> clusterLoad = new TreeMap<>();
for (int[] mockCluster : clusterStateMocks) {
Map<ServerName, List<HRegionInfo>> clusterServers = mockClusterServers(mockCluster, 50);
List<ServerAndLoad> clusterList = convertToList(clusterServers);
clusterLoad.put(TableName.valueOf(name.getMethodName()), clusterServers);
HashMap<TableName, TreeMap<ServerName, List<HRegionInfo>>> result = mockClusterServersWithTables(clusterServers);
loadBalancer.setClusterLoad(clusterLoad);
- List<RegionPlan> clusterplans = new ArrayList<RegionPlan>();
- List<Pair<TableName, Integer>> regionAmountList = new ArrayList<Pair<TableName, Integer>>();
+ List<RegionPlan> clusterplans = new ArrayList<>();
+ List<Pair<TableName, Integer>> regionAmountList = new ArrayList<>();
for(TreeMap<ServerName, List<HRegionInfo>> servers : result.values()){
List<ServerAndLoad> list = convertToList(servers);
LOG.info("Mock Cluster : " + printMock(list) + " " + printStats(list));
@@ -168,8 +167,7 @@ public class TestDefaultLoadBalancer extends BalancerTestBase {
*/
@Test (timeout=60000)
public void testImpactOfBalanceClusterOverall() throws Exception {
- Map<TableName, Map<ServerName, List<HRegionInfo>>> clusterLoad
- = new TreeMap<TableName, Map<ServerName, List<HRegionInfo>>>();
+ Map<TableName, Map<ServerName, List<HRegionInfo>>> clusterLoad = new TreeMap<>();
Map<ServerName, List<HRegionInfo>> clusterServers = mockUniformClusterServers(mockUniformCluster);
List<ServerAndLoad> clusterList = convertToList(clusterServers);
clusterLoad.put(TableName.valueOf(name.getMethodName()), clusterServers);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionLocationFinder.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionLocationFinder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionLocationFinder.java
index f18d722..365059c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionLocationFinder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionLocationFinder.java
@@ -101,7 +101,7 @@ public class TestRegionLocationFinder {
@Test
public void testMapHostNameToServerName() throws Exception {
- List<String> topHosts = new ArrayList<String>();
+ List<String> topHosts = new ArrayList<>();
for (int i = 0; i < ServerNum; i++) {
HRegionServer server = cluster.getRegionServer(i);
String serverHost = server.getServerName().getHostname();
@@ -151,7 +151,7 @@ public class TestRegionLocationFinder {
if (regions.size() <= 0) {
continue;
}
- List<HRegionInfo> regionInfos = new ArrayList<HRegionInfo>(regions.size());
+ List<HRegionInfo> regionInfos = new ArrayList<>(regions.size());
for (Region region : regions) {
regionInfos.add(region.getRegionInfo());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java
index 368f4fa..fee98c9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java
@@ -73,8 +73,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
RegionLoad rl = mock(RegionLoad.class);
when(rl.getStorefileSizeMB()).thenReturn(i);
- Map<byte[], RegionLoad> regionLoadMap =
- new TreeMap<byte[], RegionLoad>(Bytes.BYTES_COMPARATOR);
+ Map<byte[], RegionLoad> regionLoadMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
regionLoadMap.put(Bytes.toBytes(REGION_KEY), rl);
when(sl.getRegionsLoad()).thenReturn(regionLoadMap);
@@ -341,7 +340,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
ServerName deadSn = ServerName.valueOf(sn.getHostname(), sn.getPort(), sn.getStartcode() - 100);
- serverMap.put(deadSn, new ArrayList<HRegionInfo>(0));
+ serverMap.put(deadSn, new ArrayList<>(0));
plans = loadBalancer.balanceCluster(serverMap);
assertNull(plans);
@@ -443,7 +442,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
List<HRegionInfo> regions = randomRegions(1);
ServerName s1 = ServerName.valueOf("host1", 1000, 11111);
ServerName s2 = ServerName.valueOf("host11", 1000, 11111);
- Map<ServerName, List<HRegionInfo>> map = new HashMap<ServerName, List<HRegionInfo>>();
+ Map<ServerName, List<HRegionInfo>> map = new HashMap<>();
map.put(s1, regions);
regions.add(RegionReplicaUtil.getRegionInfoForReplica(regions.get(0), 1));
// until the step above s1 holds two replicas of a region
@@ -454,7 +453,7 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
// and both the replicas are on the same rack
map.clear();
regions = randomRegions(1);
- List<HRegionInfo> regionsOnS2 = new ArrayList<HRegionInfo>(1);
+ List<HRegionInfo> regionsOnS2 = new ArrayList<>(1);
regionsOnS2.add(RegionReplicaUtil.getRegionInfoForReplica(regions.get(0), 1));
map.put(s1, regions);
map.put(s2, regionsOnS2);
@@ -569,12 +568,12 @@ public class TestStochasticLoadBalancer extends BalancerTestBase {
int numNodesPerHost = 4;
// create a new map with 4 RS per host.
- Map<ServerName, List<HRegionInfo>> newServerMap = new TreeMap<ServerName, List<HRegionInfo>>(serverMap);
+ Map<ServerName, List<HRegionInfo>> newServerMap = new TreeMap<>(serverMap);
for (Map.Entry<ServerName, List<HRegionInfo>> entry : serverMap.entrySet()) {
for (int i=1; i < numNodesPerHost; i++) {
ServerName s1 = entry.getKey();
ServerName s2 = ServerName.valueOf(s1.getHostname(), s1.getPort() + i, 1); // create an RS for the same host
- newServerMap.put(s2, new ArrayList<HRegionInfo>());
+ newServerMap.put(s2, new ArrayList<>());
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
index b6b5492..3467f08 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
@@ -176,7 +176,7 @@ public class TestLogsCleaner {
rqc.set(cleaner, rqcMock);
// This should return eventually when cversion stabilizes
- cleaner.getDeletableFiles(new LinkedList<FileStatus>());
+ cleaner.getDeletableFiles(new LinkedList<>());
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
index 817cfb4..6df05c0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
@@ -140,7 +140,7 @@ public class TestReplicationHFileCleaner {
cleaner.isFileDeletable(fs.getFileStatus(file)));
List<Pair<Path, Path>> files = new ArrayList<>(1);
- files.add(new Pair<Path, Path>(null, file));
+ files.add(new Pair<>(null, file));
// 4. Add the file to hfile-refs queue
rq.addHFileRefs(peerId, files);
// 5. Assert file should not be deletable
@@ -159,7 +159,7 @@ public class TestReplicationHFileCleaner {
fs.createNewFile(deletablefile);
assertTrue("Test file not created!", fs.exists(deletablefile));
- List<FileStatus> files = new ArrayList<FileStatus>(2);
+ List<FileStatus> files = new ArrayList<>(2);
FileStatus f = new FileStatus();
f.setPath(deletablefile);
files.add(f);
@@ -168,7 +168,7 @@ public class TestReplicationHFileCleaner {
files.add(f);
List<Pair<Path, Path>> hfiles = new ArrayList<>(1);
- hfiles.add(new Pair<Path, Path>(null, notDeletablefile));
+ hfiles.add(new Pair<>(null, notDeletablefile));
// 2. Add one file to hfile-refs queue
rq.addHFileRefs(peerId, hfiles);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
index 7e6691d..ce8b0c6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
@@ -462,7 +462,7 @@ public class MasterProcedureTestingUtility {
public void addProcId(long procId) {
if (procsToAbort == null) {
- procsToAbort = new TreeSet<Long>();
+ procsToAbort = new TreeSet<>();
}
procsToAbort.add(procId);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
index df431a5..6d88502 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
@@ -84,8 +84,8 @@ public class TestMasterProcedureSchedulerConcurrency {
assertEquals(opsCount.get(), queue.size());
final Thread[] threads = new Thread[NUM_TABLES * 2];
- final HashSet<TableName> concurrentTables = new HashSet<TableName>();
- final ArrayList<String> failures = new ArrayList<String>();
+ final HashSet<TableName> concurrentTables = new HashSet<>();
+ final ArrayList<String> failures = new ArrayList<>();
final AtomicInteger concurrentCount = new AtomicInteger(0);
for (int i = 0; i < threads.length; ++i) {
threads[i] = new Thread() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
index 51aff6d..76d4585 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
@@ -199,7 +199,7 @@ public class TestSnapshotFileCache {
class SnapshotFiles implements SnapshotFileCache.SnapshotFileInspector {
public Collection<String> filesUnderSnapshot(final Path snapshotDir) throws IOException {
- Collection<String> files = new HashSet<String>();
+ Collection<String> files = new HashSet<>();
files.addAll(SnapshotReferenceUtil.getHFileNames(UTIL.getConfiguration(), fs, snapshotDir));
return files;
}
@@ -223,7 +223,7 @@ public class TestSnapshotFileCache {
private void createAndTestSnapshot(final SnapshotFileCache cache,
final SnapshotMock.SnapshotBuilder builder,
final boolean tmp, final boolean removeOnExit) throws IOException {
- List<Path> files = new ArrayList<Path>();
+ List<Path> files = new ArrayList<>();
for (int i = 0; i < 3; ++i) {
for (Path filePath: builder.addRegion()) {
String fileName = filePath.getName();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
index 30bea8c..fba250d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
@@ -116,7 +116,7 @@ public class TestSnapshotHFileCleaner {
class SnapshotFiles implements SnapshotFileCache.SnapshotFileInspector {
public Collection<String> filesUnderSnapshot(final Path snapshotDir) throws IOException {
- Collection<String> files = new HashSet<String>();
+ Collection<String> files = new HashSet<>();
files.addAll(SnapshotReferenceUtil.getHFileNames(TEST_UTIL.getConfiguration(), fs, snapshotDir));
return files;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
index b73b943..83936aa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java
@@ -624,7 +624,7 @@ public class TestMobCompactor {
// the ref name is the new file
Path mobFamilyPath =
MobUtils.getMobFamilyPath(TEST_UTIL.getConfiguration(), tableName, hcd1.getNameAsString());
- List<Path> paths = new ArrayList<Path>();
+ List<Path> paths = new ArrayList<>();
if (fs.exists(mobFamilyPath)) {
FileStatus[] files = fs.listStatus(mobFamilyPath);
for (FileStatus file : files) {
@@ -1015,7 +1015,7 @@ public class TestMobCompactor {
private static ExecutorService createThreadPool(Configuration conf) {
int maxThreads = 10;
long keepAliveTime = 60;
- final SynchronousQueue<Runnable> queue = new SynchronousQueue<Runnable>();
+ final SynchronousQueue<Runnable> queue = new SynchronousQueue<>();
ThreadPoolExecutor pool = new ThreadPoolExecutor(1, maxThreads,
keepAliveTime, TimeUnit.SECONDS, queue,
Threads.newDaemonThreadFactory("MobFileCompactionChore"),
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
index 3aaf0e4..290e6f4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java
@@ -715,7 +715,7 @@ public class TestPartitionedMobCompactor {
@Override
protected List<Path> performCompaction(PartitionedMobCompactionRequest request)
throws IOException {
- List<Path> delFilePaths = new ArrayList<Path>();
+ List<Path> delFilePaths = new ArrayList<>();
for (CompactionDelPartition delPartition: request.getDelPartitions()) {
for (Path p : delPartition.listDelFiles()) {
delFilePaths.add(p);
@@ -848,7 +848,7 @@ public class TestPartitionedMobCompactor {
* @return the cell size
*/
private int countDelCellsInDelFiles(List<Path> paths) throws IOException {
- List<StoreFile> sfs = new ArrayList<StoreFile>();
+ List<StoreFile> sfs = new ArrayList<>();
int size = 0;
for(Path path : paths) {
StoreFile sf = new StoreFile(fs, path, conf, cacheConf, BloomType.NONE);
@@ -878,7 +878,7 @@ public class TestPartitionedMobCompactor {
private static ExecutorService createThreadPool() {
int maxThreads = 10;
long keepAliveTime = 60;
- final SynchronousQueue<Runnable> queue = new SynchronousQueue<Runnable>();
+ final SynchronousQueue<Runnable> queue = new SynchronousQueue<>();
ThreadPoolExecutor pool = new ThreadPoolExecutor(1, maxThreads, keepAliveTime,
TimeUnit.SECONDS, queue, Threads.newDaemonThreadFactory("MobFileCompactionChore"),
new RejectedExecutionHandler() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java
index 91279b6..e71318b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java
@@ -82,7 +82,7 @@ public class SimpleMasterProcedureManager extends MasterProcedureManager {
ForeignExceptionDispatcher monitor = new ForeignExceptionDispatcher(desc.getInstance());
List<ServerName> serverNames = master.getServerManager().getOnlineServersList();
- List<String> servers = new ArrayList<String>();
+ List<String> servers = new ArrayList<>();
for (ServerName sn : serverNames) {
servers.add(sn.toString());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java
index 7620bbb..58efa87 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java
@@ -119,15 +119,15 @@ public class SimpleRSProcedureManager extends RegionServerProcedureManager {
private final ExecutorCompletionService<Void> taskPool;
private final ThreadPoolExecutor executor;
private volatile boolean aborted;
- private final List<Future<Void>> futures = new ArrayList<Future<Void>>();
+ private final List<Future<Void>> futures = new ArrayList<>();
private final String name;
public SimpleSubprocedurePool(String name, Configuration conf) {
this.name = name;
executor = new ThreadPoolExecutor(1, 1, 500, TimeUnit.SECONDS,
- new LinkedBlockingQueue<Runnable>(),
+ new LinkedBlockingQueue<>(),
new DaemonThreadFactory("rs(" + name + ")-procedure-pool"));
- taskPool = new ExecutorCompletionService<Void>(executor);
+ taskPool = new ExecutorCompletionService<>(executor);
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
index c424b6d..fa934d9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
@@ -86,7 +86,7 @@ public class TestProcedure {
@Test(timeout = 60000)
public void testSingleMember() throws Exception {
// The member
- List<String> members = new ArrayList<String>();
+ List<String> members = new ArrayList<>();
members.add("member");
LatchedProcedure proc = new LatchedProcedure(coord, new ForeignExceptionDispatcher(), 100,
Integer.MAX_VALUE, "op", null, members);
@@ -130,7 +130,7 @@ public class TestProcedure {
@Test(timeout = 60000)
public void testMultipleMember() throws Exception {
// 2 members
- List<String> members = new ArrayList<String>();
+ List<String> members = new ArrayList<>();
members.add("member1");
members.add("member2");
@@ -181,7 +181,7 @@ public class TestProcedure {
@Test(timeout = 60000)
public void testErrorPropagation() throws Exception {
- List<String> members = new ArrayList<String>();
+ List<String> members = new ArrayList<>();
members.add("member");
Procedure proc = new Procedure(coord, new ForeignExceptionDispatcher(), 100,
Integer.MAX_VALUE, "op", null, members);
@@ -206,7 +206,7 @@ public class TestProcedure {
@Test(timeout = 60000)
public void testBarrieredErrorPropagation() throws Exception {
- List<String> members = new ArrayList<String>();
+ List<String> members = new ArrayList<>();
members.add("member");
LatchedProcedure proc = new LatchedProcedure(coord, new ForeignExceptionDispatcher(), 100,
Integer.MAX_VALUE, "op", null, members);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java
index b52a8d6..2f0b5b9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java
@@ -64,7 +64,7 @@ public class TestProcedureManager {
Admin admin = util.getAdmin();
byte[] result = admin.execProcedureWithRet(SimpleMasterProcedureManager.SIMPLE_SIGNATURE,
- "mytest", new HashMap<String, String>());
+ "mytest", new HashMap<>());
assertArrayEquals("Incorrect return data from execProcedure",
SimpleMasterProcedureManager.SIMPLE_DATA.getBytes(), result);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
index 211e9e6..9a77ce5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
@@ -141,20 +141,19 @@ public class TestZKProcedure {
// build and start members
// NOTE: There is a single subprocedure builder for all members here.
SubprocedureFactory subprocFactory = Mockito.mock(SubprocedureFactory.class);
- List<Pair<ProcedureMember, ZKProcedureMemberRpcs>> procMembers = new ArrayList<Pair<ProcedureMember, ZKProcedureMemberRpcs>>(
- members.length);
+ List<Pair<ProcedureMember, ZKProcedureMemberRpcs>> procMembers = new ArrayList<>(members.length);
// start each member
for (String member : members) {
ZooKeeperWatcher watcher = newZooKeeperWatcher();
ZKProcedureMemberRpcs comms = new ZKProcedureMemberRpcs(watcher, opDescription);
ThreadPoolExecutor pool2 = ProcedureMember.defaultPool(member, 1, KEEP_ALIVE);
ProcedureMember procMember = new ProcedureMember(comms, pool2, subprocFactory);
- procMembers.add(new Pair<ProcedureMember, ZKProcedureMemberRpcs>(procMember, comms));
+ procMembers.add(new Pair<>(procMember, comms));
comms.start(member, procMember);
}
// setup mock member subprocedures
- final List<Subprocedure> subprocs = new ArrayList<Subprocedure>();
+ final List<Subprocedure> subprocs = new ArrayList<>();
for (int i = 0; i < procMembers.size(); i++) {
ForeignExceptionDispatcher cohortMonitor = new ForeignExceptionDispatcher();
Subprocedure commit = Mockito
@@ -216,19 +215,18 @@ public class TestZKProcedure {
// start a member for each node
SubprocedureFactory subprocFactory = Mockito.mock(SubprocedureFactory.class);
- List<Pair<ProcedureMember, ZKProcedureMemberRpcs>> members = new ArrayList<Pair<ProcedureMember, ZKProcedureMemberRpcs>>(
- expected.size());
+ List<Pair<ProcedureMember, ZKProcedureMemberRpcs>> members = new ArrayList<>(expected.size());
for (String member : expected) {
ZooKeeperWatcher watcher = newZooKeeperWatcher();
ZKProcedureMemberRpcs controller = new ZKProcedureMemberRpcs(watcher, opDescription);
ThreadPoolExecutor pool2 = ProcedureMember.defaultPool(member, 1, KEEP_ALIVE);
ProcedureMember mem = new ProcedureMember(controller, pool2, subprocFactory);
- members.add(new Pair<ProcedureMember, ZKProcedureMemberRpcs>(mem, controller));
+ members.add(new Pair<>(mem, controller));
controller.start(member, mem);
}
// setup mock subprocedures
- final List<Subprocedure> cohortTasks = new ArrayList<Subprocedure>();
+ final List<Subprocedure> cohortTasks = new ArrayList<>();
final int[] elem = new int[1];
for (int i = 0; i < members.size(); i++) {
ForeignExceptionDispatcher cohortMonitor = new ForeignExceptionDispatcher();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
index 5b058b3..d864db2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
@@ -180,7 +180,7 @@ public class TestZKProcedureControllers {
CountDownLatch prepared = new CountDownLatch(expected.size());
CountDownLatch committed = new CountDownLatch(expected.size());
- ArrayList<byte[]> dataFromMembers = new ArrayList<byte[]>();
+ ArrayList<byte[]> dataFromMembers = new ArrayList<>();
// mock out coordinator so we can keep track of zk progress
ProcedureCoordinator coordinator = setupMockCoordinator(operationName,
@@ -256,7 +256,7 @@ public class TestZKProcedureControllers {
final CountDownLatch prepared = new CountDownLatch(expected.size());
final CountDownLatch committed = new CountDownLatch(expected.size());
- ArrayList<byte[]> dataFromMembers = new ArrayList<byte[]>();
+ ArrayList<byte[]> dataFromMembers = new ArrayList<>();
// mock out coordinator so we can keep track of zk progress
ProcedureCoordinator coordinator = setupMockCoordinator(operationName,
@@ -403,14 +403,13 @@ public class TestZKProcedureControllers {
// make a cohort controller for each expected node
- List<ZKProcedureMemberRpcs> cohortControllers = new ArrayList<ZKProcedureMemberRpcs>();
+ List<ZKProcedureMemberRpcs> cohortControllers = new ArrayList<>();
for (String nodeName : expected) {
ZKProcedureMemberRpcs cc = new ZKProcedureMemberRpcs(watcher, operationName);
cc.start(nodeName, member);
cohortControllers.add(cc);
}
- return new Pair<ZKProcedureCoordinatorRpcs, List<ZKProcedureMemberRpcs>>(
- controller, cohortControllers);
+ return new Pair<>(controller, cohortControllers);
}
};
@@ -427,7 +426,7 @@ public class TestZKProcedureControllers {
ProcedureMember member, List<String> expected) throws Exception {
// make a cohort controller for each expected node
- List<ZKProcedureMemberRpcs> cohortControllers = new ArrayList<ZKProcedureMemberRpcs>();
+ List<ZKProcedureMemberRpcs> cohortControllers = new ArrayList<>();
for (String nodeName : expected) {
ZKProcedureMemberRpcs cc = new ZKProcedureMemberRpcs(watcher, operationName);
cc.start(nodeName, member);
@@ -439,8 +438,7 @@ public class TestZKProcedureControllers {
watcher, operationName, CONTROLLER_NODE_NAME);
controller.start(coordinator);
- return new Pair<ZKProcedureCoordinatorRpcs, List<ZKProcedureMemberRpcs>>(
- controller, cohortControllers);
+ return new Pair<>(controller, cohortControllers);
}
};
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java
index 057a35d..1e3a0c2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java
@@ -41,19 +41,19 @@ public class TestReplicationProtobuf {
*/
@Test
public void testGetCellScanner() throws IOException {
- List<Cell> a = new ArrayList<Cell>();
+ List<Cell> a = new ArrayList<>();
KeyValue akv = new KeyValue(Bytes.toBytes("a"), -1L);
a.add(akv);
// Add a few just to make it less regular.
a.add(new KeyValue(Bytes.toBytes("aa"), -1L));
a.add(new KeyValue(Bytes.toBytes("aaa"), -1L));
- List<Cell> b = new ArrayList<Cell>();
+ List<Cell> b = new ArrayList<>();
KeyValue bkv = new KeyValue(Bytes.toBytes("b"), -1L);
a.add(bkv);
- List<Cell> c = new ArrayList<Cell>();
+ List<Cell> c = new ArrayList<>();
KeyValue ckv = new KeyValue(Bytes.toBytes("c"), -1L);
c.add(ckv);
- List<List<? extends Cell>> all = new ArrayList<List<? extends Cell>>();
+ List<List<? extends Cell>> all = new ArrayList<>();
all.add(a);
all.add(b);
all.add(c);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/AbstractTestDateTieredCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/AbstractTestDateTieredCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/AbstractTestDateTieredCompactionPolicy.java
index 4dce696..7229c40 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/AbstractTestDateTieredCompactionPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/AbstractTestDateTieredCompactionPolicy.java
@@ -39,7 +39,7 @@ public class AbstractTestDateTieredCompactionPolicy extends TestCompactionPolicy
EnvironmentEdgeManager.injectEdge(timeMachine);
// Has to be > 0 and < now.
timeMachine.setValue(1);
- ArrayList<Long> ageInDisk = new ArrayList<Long>();
+ ArrayList<Long> ageInDisk = new ArrayList<>();
for (int i = 0; i < sizes.length; i++) {
ageInDisk.add(0L);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
index bbcdce4..dd20259 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
@@ -117,7 +117,7 @@ public class DataBlockEncodingTool {
private static int benchmarkNTimes = DEFAULT_BENCHMARK_N_TIMES;
private static int benchmarkNOmit = DEFAULT_BENCHMARK_N_OMIT;
- private List<EncodedDataBlock> codecs = new ArrayList<EncodedDataBlock>();
+ private List<EncodedDataBlock> codecs = new ArrayList<>();
private long totalPrefixLength = 0;
private long totalKeyLength = 0;
private long totalValueLength = 0;
@@ -236,8 +236,7 @@ public class DataBlockEncodingTool {
KeyValue currentKv;
scanner.seek(KeyValue.LOWESTKEY);
- List<Iterator<Cell>> codecIterators =
- new ArrayList<Iterator<Cell>>();
+ List<Iterator<Cell>> codecIterators = new ArrayList<>();
for(EncodedDataBlock codec : codecs) {
codecIterators.add(codec.getIterator(HFileBlock.headerSize(useHBaseChecksum)));
}
@@ -326,7 +325,7 @@ public class DataBlockEncodingTool {
int totalSize = 0;
// decompression time
- List<Long> durations = new ArrayList<Long>();
+ List<Long> durations = new ArrayList<>();
for (int itTime = 0; itTime < benchmarkNTimes; ++itTime) {
totalSize = 0;
@@ -352,7 +351,7 @@ public class DataBlockEncodingTool {
prevTotalSize = totalSize;
}
- List<Long> encodingDurations = new ArrayList<Long>();
+ List<Long> encodingDurations = new ArrayList<>();
for (int itTime = 0; itTime < benchmarkNTimes; ++itTime) {
final long startTime = System.nanoTime();
codec.encodeData();
@@ -390,7 +389,7 @@ public class DataBlockEncodingTool {
System.out.println(name + ":");
// compress it
- List<Long> compressDurations = new ArrayList<Long>();
+ List<Long> compressDurations = new ArrayList<>();
ByteArrayOutputStream compressedStream = new ByteArrayOutputStream();
CompressionOutputStream compressingStream =
algorithm.createPlainCompressionStream(compressedStream, compressor);
@@ -421,7 +420,7 @@ public class DataBlockEncodingTool {
byte[] compBuffer = compressedStream.toByteArray();
// uncompress it several times and measure performance
- List<Long> durations = new ArrayList<Long>();
+ List<Long> durations = new ArrayList<>();
for (int itTime = 0; itTime < benchmarkNTimes; ++itTime) {
final long startTime = System.nanoTime();
byte[] newBuf = new byte[length + 1];
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
index 9638e69..eb77c28 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
@@ -56,7 +56,7 @@ public class EncodedSeekPerformanceTest {
}
private List<Cell> prepareListOfTestSeeks(Path path) throws IOException {
- List<Cell> allKeyValues = new ArrayList<Cell>();
+ List<Cell> allKeyValues = new ArrayList<>();
// read all of the key values
StoreFile storeFile = new StoreFile(testingUtility.getTestFileSystem(),
@@ -74,7 +74,7 @@ public class EncodedSeekPerformanceTest {
storeFile.closeReader(cacheConf.shouldEvictOnClose());
// pick seeks by random
- List<Cell> seeks = new ArrayList<Cell>();
+ List<Cell> seeks = new ArrayList<>();
for (int i = 0; i < numberOfSeeks; ++i) {
Cell keyValue = allKeyValues.get(
randomizer.nextInt(allKeyValues.size()));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java
index a4e7f9b..59aded8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java
@@ -39,7 +39,7 @@ public class KeyValueScanFixture extends CollectionBackedScanner {
}
public static List<KeyValueScanner> scanFixture(KeyValue[] ... kvArrays) {
- ArrayList<KeyValueScanner> scanners = new ArrayList<KeyValueScanner>();
+ ArrayList<KeyValueScanner> scanners = new ArrayList<>();
for (KeyValue [] kvs : kvArrays) {
scanners.add(new KeyValueScanFixture(CellComparator.COMPARATOR, kvs));
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java
index 5b4b0c1..1169434 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockStoreFile.java
@@ -36,7 +36,7 @@ public class MockStoreFile extends StoreFile {
boolean isRef = false;
long ageInDisk;
long sequenceid;
- private Map<byte[], byte[]> metadata = new TreeMap<byte[], byte[]>(Bytes.BYTES_COMPARATOR);
+ private Map<byte[], byte[]> metadata = new TreeMap<>(Bytes.BYTES_COMPARATOR);
byte[] splitPoint = null;
TimeRangeTracker timeRangeTracker;
long entryCount;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java
index aa2bc1a..036c11c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java
@@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequ
* <code>${HBASE_HOME}/bin/hbase ./bin/hbase org.apache.hadoop.hbase.OOMERegionServer start</code>.
*/
public class OOMERegionServer extends HRegionServer {
- private List<Put> retainer = new ArrayList<Put>();
+ private List<Put> retainer = new ArrayList<>();
public OOMERegionServer(HBaseConfiguration conf, CoordinatedStateManager cp)
throws IOException, InterruptedException {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
index d2e78b7..cfae7cb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java
@@ -144,13 +144,13 @@ public class RegionAsTable implements Table {
@Override
public Result next() throws IOException {
- List<Cell> cells = new ArrayList<Cell>();
+ List<Cell> cells = new ArrayList<>();
return regionScanner.next(cells)? Result.create(cells): null;
}
@Override
public Result[] next(int nbRows) throws IOException {
- List<Result> results = new ArrayList<Result>(nbRows);
+ List<Result> results = new ArrayList<>(nbRows);
for (int i = 0; i < nbRows; i++) {
Result result = next();
if (result == null) break;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
index d00eef1..ef3ce06 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
@@ -493,7 +493,7 @@ public class TestAtomicOperation {
}
}
long ts = timeStamps.incrementAndGet();
- List<Mutation> mrm = new ArrayList<Mutation>();
+ List<Mutation> mrm = new ArrayList<>();
if (op) {
Put p = new Put(row2, ts);
p.addColumn(fam1, qual1, value1);
@@ -518,7 +518,7 @@ public class TestAtomicOperation {
// check: should always see exactly one column
Scan s = new Scan(row);
RegionScanner rs = region.getScanner(s);
- List<Cell> r = new ArrayList<Cell>();
+ List<Cell> r = new ArrayList<>();
while (rs.next(r))
;
rs.close();
@@ -610,7 +610,7 @@ public class TestAtomicOperation {
ctx.stop();
Scan s = new Scan();
RegionScanner scanner = region.getScanner(s);
- List<Cell> results = new ArrayList<Cell>();
+ List<Cell> results = new ArrayList<>();
ScannerContext scannerContext = ScannerContext.newBuilder().setBatchLimit(2).build();
scanner.next(results, scannerContext);
for (Cell keyValue : results) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
index edd7847..59c256a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
@@ -389,7 +389,7 @@ public class TestBlocksRead {
Scan scan = new Scan();
scan.setCacheBlocks(false);
RegionScanner rs = region.getScanner(scan);
- List<Cell> result = new ArrayList<Cell>(2);
+ List<Cell> result = new ArrayList<>(2);
rs.next(result);
assertEquals(2 * BLOOM_TYPE.length, result.size());
rs.close();
@@ -402,7 +402,7 @@ public class TestBlocksRead {
blocksStart = blocksEnd;
scan.setCacheBlocks(true);
rs = region.getScanner(scan);
- result = new ArrayList<Cell>(2);
+ result = new ArrayList<>(2);
rs.next(result);
assertEquals(2 * BLOOM_TYPE.length, result.size());
rs.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
index b2ba97c..497fd03 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
@@ -101,7 +101,7 @@ public class TestBlocksScanned extends HBaseTestCase {
scan.setMaxVersions(1);
InternalScanner s = r.getScanner(scan);
- List<Cell> results = new ArrayList<Cell>();
+ List<Cell> results = new ArrayList<>();
while (s.next(results))
;
s.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
index 4c025c4..418aadf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
@@ -106,7 +106,7 @@ public class TestBulkLoad {
byte[] familyName = familyPaths.get(0).getFirst();
String storeFileName = familyPaths.get(0).getSecond();
storeFileName = (new Path(storeFileName)).getName();
- List<String> storeFileNames = new ArrayList<String>();
+ List<String> storeFileNames = new ArrayList<>();
storeFileNames.add(storeFileName);
when(log.append(any(HRegionInfo.class), any(WALKey.class),
argThat(bulkLogWalEdit(WALEdit.BULK_LOAD, tableName.toBytes(),
@@ -129,8 +129,7 @@ public class TestBulkLoad {
@Test
public void bulkHLogShouldThrowNoErrorAndWriteMarkerWithBlankInput() throws IOException {
- testRegionWithFamilies(family1).bulkLoadHFiles(new ArrayList<Pair<byte[], String>>(),
- false, null);
+ testRegionWithFamilies(family1).bulkLoadHFiles(new ArrayList<>(),false, null);
}
@Test
@@ -219,7 +218,7 @@ public class TestBulkLoad {
}
private Pair<byte[], String> withMissingHFileForFamily(byte[] family) {
- return new Pair<byte[], String>(family, getNotExistFilePath());
+ return new Pair<>(family, getNotExistFilePath());
}
private String getNotExistFilePath() {
@@ -230,7 +229,7 @@ public class TestBulkLoad {
private Pair<byte[], String> withInvalidColumnFamilyButProperHFileLocation(byte[] family)
throws IOException {
createHFileForFamilies(family);
- return new Pair<byte[], String>(new byte[]{0x00, 0x01, 0x02}, getNotExistFilePath());
+ return new Pair<>(new byte[]{0x00, 0x01, 0x02}, getNotExistFilePath());
}
@@ -258,13 +257,13 @@ public class TestBulkLoad {
}
private List<Pair<byte[], String>> getBlankFamilyPaths(){
- return new ArrayList<Pair<byte[], String>>();
+ return new ArrayList<>();
}
private List<Pair<byte[], String>> withFamilyPathsFor(byte[]... families) throws IOException {
List<Pair<byte[], String>> familyPaths = getBlankFamilyPaths();
for (byte[] family : families) {
- familyPaths.add(new Pair<byte[], String>(family, createHFileForFamilies(family)));
+ familyPaths.add(new Pair<>(family, createHFileForFamilies(family)));
}
return familyPaths;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
index 4a73eda..9fed202 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
@@ -137,7 +137,7 @@ public class TestCacheOnWriteInSchema {
@Parameters
public static Collection<Object[]> getParameters() {
- List<Object[]> cowTypes = new ArrayList<Object[]>();
+ List<Object[]> cowTypes = new ArrayList<>();
for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
cowTypes.add(new Object[] { cowType });
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
index c59f64b..5cfa17d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
@@ -90,14 +90,14 @@ public class TestColumnSeeking {
double majorPercentage = 0.2;
double putPercentage = 0.2;
- HashMap<String, KeyValue> allKVMap = new HashMap<String, KeyValue>();
+ HashMap<String, KeyValue> allKVMap = new HashMap<>();
HashMap<String, KeyValue>[] kvMaps = new HashMap[numberOfTests];
ArrayList<String>[] columnLists = new ArrayList[numberOfTests];
for (int i = 0; i < numberOfTests; i++) {
- kvMaps[i] = new HashMap<String, KeyValue>();
- columnLists[i] = new ArrayList<String>();
+ kvMaps[i] = new HashMap<>();
+ columnLists[i] = new ArrayList<>();
for (String column : allColumns) {
if (Math.random() < selectPercent) {
columnLists[i].add(column);
@@ -162,7 +162,7 @@ public class TestColumnSeeking {
}
InternalScanner scanner = region.getScanner(scan);
- List<Cell> results = new ArrayList<Cell>();
+ List<Cell> results = new ArrayList<>();
while (scanner.next(results))
;
assertEquals(kvSet.size(), results.size());
@@ -201,15 +201,15 @@ public class TestColumnSeeking {
double majorPercentage = 0.2;
double putPercentage = 0.2;
- HashMap<String, KeyValue> allKVMap = new HashMap<String, KeyValue>();
+ HashMap<String, KeyValue> allKVMap = new HashMap<>();
HashMap<String, KeyValue>[] kvMaps = new HashMap[numberOfTests];
ArrayList<String>[] columnLists = new ArrayList[numberOfTests];
String valueString = "Value";
for (int i = 0; i < numberOfTests; i++) {
- kvMaps[i] = new HashMap<String, KeyValue>();
- columnLists[i] = new ArrayList<String>();
+ kvMaps[i] = new HashMap<>();
+ columnLists[i] = new ArrayList<>();
for (String column : allColumns) {
if (Math.random() < selectPercent) {
columnLists[i].add(column);
@@ -274,7 +274,7 @@ public class TestColumnSeeking {
}
InternalScanner scanner = region.getScanner(scan);
- List<Cell> results = new ArrayList<Cell>();
+ List<Cell> results = new ArrayList<>();
while (scanner.next(results))
;
assertEquals(kvSet.size(), results.size());
@@ -285,7 +285,7 @@ public class TestColumnSeeking {
}
List<String> generateRandomWords(int numberOfWords, String suffix) {
- Set<String> wordSet = new HashSet<String>();
+ Set<String> wordSet = new HashSet<>();
for (int i = 0; i < numberOfWords; i++) {
int lengthOfWords = (int) (Math.random() * 5) + 1;
char[] wordChar = new char[lengthOfWords];
@@ -300,7 +300,7 @@ public class TestColumnSeeking {
}
wordSet.add(word);
}
- List<String> wordList = new ArrayList<String>(wordSet);
+ List<String> wordList = new ArrayList<>(wordSet);
return wordList;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
index 65ad956..63bbe65 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
@@ -191,7 +191,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore {
InternalScanner scanner = new StoreScanner(new Scan(
Bytes.toBytes(startRowId)), scanInfo, scanType, null,
memstore.getScanners(0));
- List<Cell> results = new ArrayList<Cell>();
+ List<Cell> results = new ArrayList<>();
for (int i = 0; scanner.next(results); i++) {
int rowId = startRowId + i;
Cell left = results.get(0);
@@ -199,7 +199,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore {
assertTrue("Row name",
CellComparator.COMPARATOR.compareRows(left, row1, 0, row1.length) == 0);
assertEquals("Count of columns", QUALIFIER_COUNT, results.size());
- List<Cell> row = new ArrayList<Cell>();
+ List<Cell> row = new ArrayList<>();
for (Cell kv : results) {
row.add(kv);
}
@@ -255,7 +255,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore {
public void testUpsertMemstoreSize() throws Exception {
MemstoreSize oldSize = memstore.size();
- List<Cell> l = new ArrayList<Cell>();
+ List<Cell> l = new ArrayList<>();
KeyValue kv1 = KeyValueTestUtil.create("r", "f", "q", 100, "v");
KeyValue kv2 = KeyValueTestUtil.create("r", "f", "q", 101, "v");
KeyValue kv3 = KeyValueTestUtil.create("r", "f", "q", 102, "v");
@@ -313,7 +313,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore {
t = runSnapshot(memstore, true);
// test the case that the timeOfOldestEdit is updated after a KV upsert
- List<Cell> l = new ArrayList<Cell>();
+ List<Cell> l = new ArrayList<>();
KeyValue kv1 = KeyValueTestUtil.create("r", "f", "q", 100, "v");
kv1.setSequenceId(100);
l.add(kv1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
index bc51c41..1bf6ea7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
@@ -370,8 +370,7 @@ public class TestCompaction {
// setup a region/store with some files
int numStores = r.getStores().size();
- List<Pair<CompactionRequest, Store>> requests =
- new ArrayList<Pair<CompactionRequest, Store>>(numStores);
+ List<Pair<CompactionRequest, Store>> requests = new ArrayList<>(numStores);
CountDownLatch latch = new CountDownLatch(numStores);
// create some store files and setup requests for each store on which we want to do a
// compaction
@@ -379,8 +378,7 @@ public class TestCompaction {
createStoreFile(r, store.getColumnFamilyName());
createStoreFile(r, store.getColumnFamilyName());
createStoreFile(r, store.getColumnFamilyName());
- requests
- .add(new Pair<CompactionRequest, Store>(new TrackableCompactionRequest(latch), store));
+ requests.add(new Pair<>(new TrackableCompactionRequest(latch), store));
}
thread.requestCompaction(r, "test mulitple custom comapctions", Store.PRIORITY_USER,
@@ -393,8 +391,8 @@ public class TestCompaction {
}
private class StoreMockMaker extends StatefulStoreMockMaker {
- public ArrayList<StoreFile> compacting = new ArrayList<StoreFile>();
- public ArrayList<StoreFile> notCompacting = new ArrayList<StoreFile>();
+ public ArrayList<StoreFile> compacting = new ArrayList<>();
+ public ArrayList<StoreFile> notCompacting = new ArrayList<>();
private ArrayList<Integer> results;
public StoreMockMaker(ArrayList<Integer> results) {
@@ -410,7 +408,7 @@ public class TestCompaction {
@Override
public List<StoreFile> preSelect(List<StoreFile> filesCompacting) {
- return new ArrayList<StoreFile>();
+ return new ArrayList<>();
}
@Override
@@ -425,13 +423,13 @@ public class TestCompaction {
public List<Path> compact(ThroughputController throughputController, User user)
throws IOException {
finishCompaction(this.selectedFiles);
- return new ArrayList<Path>();
+ return new ArrayList<>();
}
}
@Override
public synchronized CompactionContext selectCompaction() {
- CompactionContext ctx = new TestCompactionContext(new ArrayList<StoreFile>(notCompacting));
+ CompactionContext ctx = new TestCompactionContext(new ArrayList<>(notCompacting));
compacting.addAll(notCompacting);
notCompacting.clear();
try {
@@ -484,18 +482,18 @@ public class TestCompaction {
} catch (InterruptedException e) {
Assume.assumeNoException(e);
}
- return new ArrayList<Path>();
+ return new ArrayList<>();
}
@Override
public List<StoreFile> preSelect(List<StoreFile> filesCompacting) {
- return new ArrayList<StoreFile>();
+ return new ArrayList<>();
}
@Override
public boolean select(List<StoreFile> f, boolean i, boolean m, boolean e)
throws IOException {
- this.request = new CompactionRequest(new ArrayList<StoreFile>());
+ this.request = new CompactionRequest(new ArrayList<>());
return true;
}
}
@@ -568,7 +566,7 @@ public class TestCompaction {
});
// Set up store mocks for 2 "real" stores and the one we use for blocking CST.
- ArrayList<Integer> results = new ArrayList<Integer>();
+ ArrayList<Integer> results = new ArrayList<>();
StoreMockMaker sm = new StoreMockMaker(results), sm2 = new StoreMockMaker(results);
Store store = sm.createStoreMock("store1"), store2 = sm2.createStoreMock("store2");
BlockingStoreMockMaker blocker = new BlockingStoreMockMaker();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java
index 7c7bfd3..8e85730 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java
@@ -91,7 +91,7 @@ public class TestCompactionArchiveConcurrentClose {
HRegionInfo info = new HRegionInfo(tableName, null, null, false);
Region region = initHRegion(htd, info);
RegionServerServices rss = mock(RegionServerServices.class);
- List<Region> regions = new ArrayList<Region>();
+ List<Region> regions = new ArrayList<>();
regions.add(region);
when(rss.getOnlineRegions()).thenReturn(regions);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java
index cf99258..89b2368 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java
@@ -98,7 +98,7 @@ public class TestCompactionArchiveIOException {
HRegionInfo info = new HRegionInfo(tableName, null, null, false);
final HRegion region = initHRegion(htd, info);
RegionServerServices rss = mock(RegionServerServices.class);
- List<Region> regions = new ArrayList<Region>();
+ List<Region> regions = new ArrayList<>();
regions.add(region);
when(rss.getOnlineRegions()).thenReturn(regions);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
index 24b3667..7154511 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java
@@ -136,7 +136,7 @@ public class TestCompactionPolicy {
}
ArrayList<Long> toArrayList(long... numbers) {
- ArrayList<Long> result = new ArrayList<Long>();
+ ArrayList<Long> result = new ArrayList<>();
for (long i : numbers) {
result.add(i);
}
@@ -144,7 +144,7 @@ public class TestCompactionPolicy {
}
List<StoreFile> sfCreate(long... sizes) throws IOException {
- ArrayList<Long> ageInDisk = new ArrayList<Long>();
+ ArrayList<Long> ageInDisk = new ArrayList<>();
for (int i = 0; i < sizes.length; i++) {
ageInDisk.add(0L);
}
@@ -156,7 +156,7 @@ public class TestCompactionPolicy {
}
List<StoreFile> sfCreate(boolean isReference, long... sizes) throws IOException {
- ArrayList<Long> ageInDisk = new ArrayList<Long>(sizes.length);
+ ArrayList<Long> ageInDisk = new ArrayList<>(sizes.length);
for (int i = 0; i < sizes.length; i++) {
ageInDisk.add(0L);
}
@@ -196,8 +196,8 @@ public class TestCompactionPolicy {
// Test Default compactions
CompactionRequest result =
((RatioBasedCompactionPolicy) store.storeEngine.getCompactionPolicy()).selectCompaction(
- candidates, new ArrayList<StoreFile>(), false, isOffPeak, forcemajor);
- List<StoreFile> actual = new ArrayList<StoreFile>(result.getFiles());
+ candidates, new ArrayList<>(), false, isOffPeak, forcemajor);
+ List<StoreFile> actual = new ArrayList<>(result.getFiles());
if (isOffPeak && !forcemajor) {
Assert.assertTrue(result.isOffPeak());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
index 0e6fb54..8c55327 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
@@ -220,7 +220,7 @@ public class TestCompactionState {
private static void loadData(final Table ht, final byte[][] families,
final int rows, final int flushes) throws IOException {
- List<Put> puts = new ArrayList<Put>(rows);
+ List<Put> puts = new ArrayList<>(rows);
byte[] qualifier = Bytes.toBytes("val");
for (int i = 0; i < flushes; i++) {
for (int k = 0; k < rows; k++) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
index 174843e..dfea761 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
@@ -142,7 +142,7 @@ public class TestCompoundBloomFilter {
}
private List<KeyValue> createSortedKeyValues(Random rand, int n) {
- List<KeyValue> kvList = new ArrayList<KeyValue>(n);
+ List<KeyValue> kvList = new ArrayList<>(n);
for (int i = 0; i < n; ++i)
kvList.add(RandomKeyValueUtil.randomKeyValue(rand));
Collections.sort(kvList, CellComparator.COMPARATOR);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
index 68b0ba3..cec5fc7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
@@ -67,7 +67,7 @@ public class TestCorruptedRegionStoreFile {
@Rule public TestTableName TEST_TABLE = new TestTableName();
- private final ArrayList<Path> storeFiles = new ArrayList<Path>();
+ private final ArrayList<Path> storeFiles = new ArrayList<>();
private Path tableDir;
private int rowCount;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java
index 4fa18b8..3c41fc5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java
@@ -119,7 +119,7 @@ public class TestDefaultCompactSelection extends TestCompactionPolicy {
compactEquals(sfCreate(true, 7, 6, 5, 4, 3, 2, 1), 7, 6, 5, 4, 3);
// empty case
- compactEquals(new ArrayList<StoreFile>() /* empty */);
+ compactEquals(new ArrayList<>() /* empty */);
// empty case (because all files are too big)
compactEquals(sfCreate(tooBig, tooBig) /* empty */);
}
@@ -175,7 +175,7 @@ public class TestDefaultCompactSelection extends TestCompactionPolicy {
// Test Default compactions
CompactionRequest result = ((RatioBasedCompactionPolicy) store.storeEngine
.getCompactionPolicy()).selectCompaction(candidates,
- new ArrayList<StoreFile>(), false, false, false);
+ new ArrayList<>(), false, false, false);
Assert.assertTrue(result.getFiles().isEmpty());
store.setScanInfo(oldScanInfo);
}