You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2018/01/23 01:14:36 UTC
[1/9] hbase git commit: HBASE-19811 Fix findbugs and error-prone
warnings in hbase-server (branch-2)
Repository: hbase
Updated Branches:
refs/heads/branch-2 1e5fc1ed6 -> b1269ec57
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
index 5098e0b..410dd0c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
@@ -101,7 +101,7 @@ public abstract class MultiThreadedAction {
@Override
public byte[] getDeterministicUniqueKey(long keyBase) {
- return LoadTestKVGenerator.md5PrefixedKey(keyBase).getBytes();
+ return Bytes.toBytes(LoadTestKVGenerator.md5PrefixedKey(keyBase));
}
@Override
@@ -114,7 +114,7 @@ public abstract class MultiThreadedAction {
int numColumns = minColumnsPerKey + random.nextInt(maxColumnsPerKey - minColumnsPerKey + 1);
byte[][] columns = new byte[numColumns][];
for (int i = 0; i < numColumns; ++i) {
- columns[i] = Integer.toString(i).getBytes();
+ columns[i] = Bytes.toBytes(Integer.toString(i));
}
return columns;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
index 447cca8..6864366 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
@@ -201,8 +201,7 @@ public class MultiThreadedReader extends MultiThreadedAction
"to read " + k + " is out of range (startKey=" + startKey +
", endKey=" + endKey + ")");
}
- if (k % numThreads != readerId ||
- writer != null && writer.failedToWriteKey(k)) {
+ if (k % numThreads != readerId || (writer != null && writer.failedToWriteKey(k))) {
// Skip keys that this thread should not read, as well as the keys
// that we know the writer failed to write.
continue;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java
index 7112d50..7746bea 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestBoundedPriorityBlockingQueue.java
@@ -42,7 +42,7 @@ import org.junit.experimental.categories.Category;
public class TestBoundedPriorityBlockingQueue {
private final static int CAPACITY = 16;
- class TestObject {
+ static class TestObject {
private final int priority;
private final int seqId;
@@ -60,7 +60,7 @@ public class TestBoundedPriorityBlockingQueue {
}
}
- class TestObjectComparator implements Comparator<TestObject> {
+ static class TestObjectComparator implements Comparator<TestObject> {
public TestObjectComparator() {}
@Override
@@ -208,6 +208,7 @@ public class TestBoundedPriorityBlockingQueue {
final CyclicBarrier threadsStarted = new CyclicBarrier(2);
ExecutorService executor = Executors.newFixedThreadPool(2);
executor.execute(new Runnable() {
+ @Override
public void run() {
try {
assertNull(queue.poll(1000, TimeUnit.MILLISECONDS));
@@ -221,6 +222,7 @@ public class TestBoundedPriorityBlockingQueue {
});
executor.execute(new Runnable() {
+ @Override
public void run() {
try {
threadsStarted.await();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBuffUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBuffUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBuffUtils.java
index 4c6990e..ecc6611 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBuffUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestByteBuffUtils.java
@@ -36,7 +36,7 @@ public class TestByteBuffUtils {
ByteBuffer bb2 = ByteBuffer.allocate(50);
MultiByteBuff src = new MultiByteBuff(bb1, bb2);
for (int i = 0; i < 7; i++) {
- src.putLong(8l);
+ src.putLong(8L);
}
src.put((byte) 1);
src.put((byte) 1);
@@ -58,7 +58,7 @@ public class TestByteBuffUtils {
bb3 = ByteBuffer.allocate(100);
SingleByteBuff sbb = new SingleByteBuff(bb3);
for (int i = 0; i < 7; i++) {
- sbb.putLong(8l);
+ sbb.putLong(8L);
}
sbb.put((byte) 1);
sbb.put((byte) 1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
index 8f503e0..a554e99 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
@@ -161,11 +161,12 @@ public class TestFSHDFSUtils {
/**
* Version of DFS that has HDFS-4525 in it.
*/
- class IsFileClosedDistributedFileSystem extends DistributedFileSystem {
+ static class IsFileClosedDistributedFileSystem extends DistributedFileSystem {
/**
* Close status of a file. Copied over from HDFS-4525
* @return true if file is already closed
**/
+ @Override
public boolean isFileClosed(Path f) throws IOException{
return false;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java
index cb23a0b..be302d5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java
@@ -77,6 +77,7 @@ public class TestFSVisitor {
final Set<String> families = new HashSet<>();
final Set<String> hfiles = new HashSet<>();
FSVisitor.visitTableStoreFiles(fs, tableDir, new FSVisitor.StoreFileVisitor() {
+ @Override
public void storeFile(final String region, final String family, final String hfileName)
throws IOException {
regions.add(region);
@@ -84,9 +85,9 @@ public class TestFSVisitor {
hfiles.add(hfileName);
}
});
- assertEquals(tableRegions, regions);
- assertEquals(tableFamilies, families);
- assertEquals(tableHFiles, hfiles);
+ assertEquals(regions, tableRegions);
+ assertEquals(families, tableFamilies);
+ assertEquals(hfiles, tableHFiles);
}
/*
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
index 641f66e..31f1909 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
@@ -131,12 +131,12 @@ public class TestHBaseFsckEncryption {
// Insure HBck doesn't consider them corrupt
HBaseFsck res = HbckTestingUtil.doHFileQuarantine(conf, htd.getTableName());
- assertEquals(res.getRetCode(), 0);
+ assertEquals(0, res.getRetCode());
HFileCorruptionChecker hfcc = res.getHFilecorruptionChecker();
- assertEquals(hfcc.getCorrupted().size(), 0);
- assertEquals(hfcc.getFailures().size(), 0);
- assertEquals(hfcc.getQuarantined().size(), 0);
- assertEquals(hfcc.getMissing().size(), 0);
+ assertEquals(0, hfcc.getCorrupted().size());
+ assertEquals(0, hfcc.getFailures().size());
+ assertEquals(0, hfcc.getQuarantined().size());
+ assertEquals(0, hfcc.getMissing().size());
}
private List<Path> findStorefilePaths(TableName tableName) throws Exception {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java
index 3661207..2548ecf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckMOB.java
@@ -118,18 +118,18 @@ public class TestHBaseFsckMOB extends BaseTestHBaseFsck {
// A corrupt mob file doesn't abort the start of regions, so we can enable the table.
admin.enableTable(table);
HBaseFsck res = HbckTestingUtil.doHFileQuarantine(conf, table);
- assertEquals(res.getRetCode(), 0);
+ assertEquals(0, res.getRetCode());
HFileCorruptionChecker hfcc = res.getHFilecorruptionChecker();
- assertEquals(hfcc.getHFilesChecked(), 4);
- assertEquals(hfcc.getCorrupted().size(), 0);
- assertEquals(hfcc.getFailures().size(), 0);
- assertEquals(hfcc.getQuarantined().size(), 0);
- assertEquals(hfcc.getMissing().size(), 0);
- assertEquals(hfcc.getMobFilesChecked(), 5);
- assertEquals(hfcc.getCorruptedMobFiles().size(), 1);
- assertEquals(hfcc.getFailureMobFiles().size(), 0);
- assertEquals(hfcc.getQuarantinedMobFiles().size(), 1);
- assertEquals(hfcc.getMissedMobFiles().size(), 0);
+ assertEquals(4, hfcc.getHFilesChecked());
+ assertEquals(0, hfcc.getCorrupted().size());
+ assertEquals(0, hfcc.getFailures().size());
+ assertEquals(0, hfcc.getQuarantined().size());
+ assertEquals(0, hfcc.getMissing().size());
+ assertEquals(5, hfcc.getMobFilesChecked());
+ assertEquals(1, hfcc.getCorruptedMobFiles().size());
+ assertEquals(0, hfcc.getFailureMobFiles().size());
+ assertEquals(1, hfcc.getQuarantinedMobFiles().size());
+ assertEquals(0, hfcc.getMissedMobFiles().size());
String quarantinedMobFile = hfcc.getQuarantinedMobFiles().iterator().next().getName();
assertEquals(corruptMobFile, quarantinedMobFile);
} finally {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java
index 13c6df5..bb68898 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java
@@ -86,7 +86,7 @@ public class TestJSONMetricUtil {
Hashtable<String, String> properties = JSONMetricUtil.buldKeyValueTable(keys, values);
ObjectName testObject = JSONMetricUtil.buildObjectName(JSONMetricUtil.JAVA_LANG_DOMAIN,
properties);
- assertEquals(testObject.getDomain(), JSONMetricUtil.JAVA_LANG_DOMAIN);
+ assertEquals(JSONMetricUtil.JAVA_LANG_DOMAIN, testObject.getDomain());
assertEquals(testObject.getKeyPropertyList(), properties);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
index 7b1cd2d..e6b05e9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
@@ -40,6 +40,7 @@ public class TestMiniClusterLoadParallel
super(isMultiPut, encoding);
}
+ @Override
@Test(timeout=TIMEOUT_MS)
public void loadTest() throws Exception {
prepareForLoadTest();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
index fd86beb..eee3030 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
@@ -68,6 +68,7 @@ public class TestRegionSplitCalculator {
return end;
}
+ @Override
public String toString() {
return "[" + Bytes.toString(start) + ", " + Bytes.toString(end) + "]";
}
@@ -135,8 +136,7 @@ public class TestRegionSplitCalculator {
LOG.info("Standard");
String res = dump(sc.getSplits(), regions);
checkDepths(sc.getSplits(), regions, 1, 1, 1, 0);
- assertEquals(res, "A:\t[A, B]\t\n" + "B:\t[B, C]\t\n" + "C:\t[C, D]\t\n"
- + "D:\t\n");
+ assertEquals("A:\t[A, B]\t\n" + "B:\t[B, C]\t\n" + "C:\t[C, D]\t\nD:\t\n", res);
}
@Test
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
index 3650706..1aab1f8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
@@ -20,10 +20,11 @@ import java.io.IOException;
import java.util.Random;
import java.util.Set;
-import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
+import org.apache.yetus.audience.InterfaceAudience;
/**
* A generator of random data (keys/cfs/columns/values) for load testing.
@@ -35,11 +36,11 @@ public abstract class LoadTestDataGenerator {
// The mutate info column stores information
// about update done to this column family this row.
- public final static byte[] MUTATE_INFO = "mutate_info".getBytes();
+ public final static byte[] MUTATE_INFO = Bytes.toBytes("mutate_info");
// The increment column always has a long value,
// which can be incremented later on during updates.
- public final static byte[] INCREMENT = "increment".getBytes();
+ public final static byte[] INCREMENT = Bytes.toBytes("increment");
protected String[] args;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
index f0f35e7..ceb43d5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
@@ -476,7 +476,7 @@ public class TestWALFactory {
reader.close();
// Reset the lease period
- setLeasePeriod.invoke(cluster, new Object[]{new Long(60000), new Long(3600000)});
+ setLeasePeriod.invoke(cluster, new Object[]{ 60000L, 3600000L });
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
index a1206aa..dded506 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
@@ -131,9 +131,9 @@ public class TestWALSplit {
private static final String FILENAME_BEING_SPLIT = "testfile";
private static final TableName TABLE_NAME =
TableName.valueOf("t1");
- private static final byte[] FAMILY = "f1".getBytes();
- private static final byte[] QUALIFIER = "q1".getBytes();
- private static final byte[] VALUE = "v1".getBytes();
+ private static final byte[] FAMILY = Bytes.toBytes("f1");
+ private static final byte[] QUALIFIER = Bytes.toBytes("q1");
+ private static final byte[] VALUE = Bytes.toBytes("v1");
private static final String WAL_FILE_PREFIX = "wal.dat.";
private static List<String> REGIONS = new ArrayList<>();
private static final String HBASE_SKIP_ERRORS = "hbase.hlog.split.skip.errors";
@@ -341,7 +341,7 @@ public class TestWALSplit {
while (!stop.get()) {
try {
long seq = appendEntry(writer, TABLE_NAME, regionBytes,
- ("r" + editsCount.get()).getBytes(), regionBytes, QUALIFIER, VALUE, 0);
+ Bytes.toBytes("r" + editsCount.get()), regionBytes, QUALIFIER, VALUE, 0);
long count = editsCount.incrementAndGet();
LOG.info(getName() + " sync count=" + count + ", seq=" + seq);
try {
@@ -407,7 +407,7 @@ public class TestWALSplit {
TableName.META_TABLE_NAME, 1, now, HConstants.DEFAULT_CLUSTER_ID),
new WALEdit());
Path parent = WALSplitter.getRegionDirRecoveredEditsDir(regiondir);
- assertEquals(parent.getName(), HConstants.RECOVERED_EDITS_DIR);
+ assertEquals(HConstants.RECOVERED_EDITS_DIR, parent.getName());
fs.createNewFile(parent); // create a recovered.edits file
Path p = WALSplitter.getRegionSplitEditsPath(fs, entry, HBASEDIR,
@@ -588,8 +588,8 @@ public class TestWALSplit {
archivedLogs.add(log.getPath().getName());
}
LOG.debug(archived.toString());
- assertEquals(failureType.name() + ": expected to find all of our wals corrupt.",
- walDirContents, archivedLogs);
+ assertEquals(failureType.name() + ": expected to find all of our wals corrupt.", archivedLogs,
+ walDirContents);
}
}
@@ -687,7 +687,7 @@ public class TestWALSplit {
// should not have stored the EOF files as corrupt
FileStatus[] archivedLogs = fs.listStatus(CORRUPTDIR);
- assertEquals(archivedLogs.length, 0);
+ assertEquals(0, archivedLogs.length);
}
@@ -749,7 +749,7 @@ public class TestWALSplit {
InstrumentedLogWriter.activateFailure = false;
appendEntry(writer, TABLE_NAME, Bytes.toBytes(region),
- ("r" + 999).getBytes(), FAMILY, QUALIFIER, VALUE, 0);
+ Bytes.toBytes("r" + 999), FAMILY, QUALIFIER, VALUE, 0);
writer.close();
try {
@@ -1206,8 +1206,8 @@ public class TestWALSplit {
int prefix = 0;
for (String region : REGIONS) {
String row_key = region + prefix++ + i + j;
- appendEntry(ws[i], TABLE_NAME, region.getBytes(), row_key.getBytes(), FAMILY, QUALIFIER,
- VALUE, seq++);
+ appendEntry(ws[i], TABLE_NAME, Bytes.toBytes(region), Bytes.toBytes(row_key), FAMILY,
+ QUALIFIER, VALUE, seq++);
if (numRegionEventsAdded < regionEvents) {
numRegionEventsAdded ++;
@@ -1233,7 +1233,7 @@ public class TestWALSplit {
Path tdir = FSUtils.getTableDir(rootdir, table);
@SuppressWarnings("deprecation")
Path editsdir = WALSplitter.getRegionDirRecoveredEditsDir(HRegion.getRegionDir(tdir,
- Bytes.toString(region.getBytes())));
+ Bytes.toString(Bytes.toBytes(region))));
FileStatus[] files = fs.listStatus(editsdir, new PathFilter() {
@Override
public boolean accept(Path p) {
@@ -1260,46 +1260,46 @@ public class TestWALSplit {
in.close();
switch (corruption) {
- case APPEND_GARBAGE:
- fs.delete(path, false);
- out = fs.create(path);
- out.write(corrupted_bytes);
- out.write("-----".getBytes());
- closeOrFlush(close, out);
- break;
-
- case INSERT_GARBAGE_ON_FIRST_LINE:
- fs.delete(path, false);
- out = fs.create(path);
- out.write(0);
- out.write(corrupted_bytes);
- closeOrFlush(close, out);
- break;
-
- case INSERT_GARBAGE_IN_THE_MIDDLE:
- fs.delete(path, false);
- out = fs.create(path);
- int middle = (int) Math.floor(corrupted_bytes.length / 2);
- out.write(corrupted_bytes, 0, middle);
- out.write(0);
- out.write(corrupted_bytes, middle, corrupted_bytes.length - middle);
- closeOrFlush(close, out);
- break;
-
- case TRUNCATE:
- fs.delete(path, false);
- out = fs.create(path);
- out.write(corrupted_bytes, 0, fileSize
- - (32 + ProtobufLogReader.PB_WAL_COMPLETE_MAGIC.length + Bytes.SIZEOF_INT));
- closeOrFlush(close, out);
- break;
-
- case TRUNCATE_TRAILER:
- fs.delete(path, false);
- out = fs.create(path);
- out.write(corrupted_bytes, 0, fileSize - Bytes.SIZEOF_INT);// trailer is truncated.
- closeOrFlush(close, out);
- break;
+ case APPEND_GARBAGE:
+ fs.delete(path, false);
+ out = fs.create(path);
+ out.write(corrupted_bytes);
+ out.write(Bytes.toBytes("-----"));
+ closeOrFlush(close, out);
+ break;
+
+ case INSERT_GARBAGE_ON_FIRST_LINE:
+ fs.delete(path, false);
+ out = fs.create(path);
+ out.write(0);
+ out.write(corrupted_bytes);
+ closeOrFlush(close, out);
+ break;
+
+ case INSERT_GARBAGE_IN_THE_MIDDLE:
+ fs.delete(path, false);
+ out = fs.create(path);
+ int middle = (int) Math.floor(corrupted_bytes.length / 2);
+ out.write(corrupted_bytes, 0, middle);
+ out.write(0);
+ out.write(corrupted_bytes, middle, corrupted_bytes.length - middle);
+ closeOrFlush(close, out);
+ break;
+
+ case TRUNCATE:
+ fs.delete(path, false);
+ out = fs.create(path);
+ out.write(corrupted_bytes, 0, fileSize
+ - (32 + ProtobufLogReader.PB_WAL_COMPLETE_MAGIC.length + Bytes.SIZEOF_INT));
+ closeOrFlush(close, out);
+ break;
+
+ case TRUNCATE_TRAILER:
+ fs.delete(path, false);
+ out = fs.create(path);
+ out.write(corrupted_bytes, 0, fileSize - Bytes.SIZEOF_INT);// trailer is truncated.
+ closeOrFlush(close, out);
+ break;
}
}
@@ -1360,14 +1360,14 @@ public class TestWALSplit {
WALProtos.RegionEventDescriptor regionOpenDesc = ProtobufUtil.toRegionEventDescriptor(
WALProtos.RegionEventDescriptor.EventType.REGION_OPEN,
TABLE_NAME.toBytes(),
- region.getBytes(),
- String.valueOf(region.hashCode()).getBytes(),
+ Bytes.toBytes(region),
+ Bytes.toBytes(String.valueOf(region.hashCode())),
1,
ServerName.parseServerName("ServerName:9099"), ImmutableMap.<byte[], List<Path>>of());
final long time = EnvironmentEdgeManager.currentTime();
- KeyValue kv = new KeyValue(region.getBytes(), WALEdit.METAFAMILY, WALEdit.REGION_EVENT,
+ KeyValue kv = new KeyValue(Bytes.toBytes(region), WALEdit.METAFAMILY, WALEdit.REGION_EVENT,
time, regionOpenDesc.toByteArray());
- final WALKeyImpl walKey = new WALKeyImpl(region.getBytes(), TABLE_NAME, 1, time,
+ final WALKeyImpl walKey = new WALKeyImpl(Bytes.toBytes(region), TABLE_NAME, 1, time,
HConstants.DEFAULT_CLUSTER_ID);
w.append(
new Entry(walKey, new WALEdit().add(kv)));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java
index 844cb3a..400d12b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitBoundedLogWriterCreation.java
@@ -35,6 +35,7 @@ public class TestWALSplitBoundedLogWriterCreation extends TestWALSplit{
/**
* The logic of this test has conflict with the limit writers split logic, skip this test
*/
+ @Override
@Test(timeout=300000)
@Ignore
public void testThreadingSlowWriterSmallBuffer() throws Exception {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
index 4e67b91..3e9e650 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
@@ -129,10 +129,10 @@ public class TestZooKeeperACL {
List<ACL> acls = zkw.getRecoverableZooKeeper().getZooKeeper()
.getACL("/hbase", new Stat());
- assertEquals(acls.size(),1);
- assertEquals(acls.get(0).getId().getScheme(),"sasl");
- assertEquals(acls.get(0).getId().getId(),"hbase");
- assertEquals(acls.get(0).getPerms(), ZooDefs.Perms.ALL);
+ assertEquals(1, acls.size());
+ assertEquals("sasl", acls.get(0).getId().getScheme());
+ assertEquals("hbase", acls.get(0).getId().getId());
+ assertEquals(ZooDefs.Perms.ALL, acls.get(0).getPerms());
}
/**
@@ -148,20 +148,20 @@ public class TestZooKeeperACL {
List<ACL> acls = zkw.getRecoverableZooKeeper().getZooKeeper()
.getACL("/hbase/root-region-server", new Stat());
- assertEquals(acls.size(),2);
+ assertEquals(2, acls.size());
boolean foundWorldReadableAcl = false;
boolean foundHBaseOwnerAcl = false;
for(int i = 0; i < 2; i++) {
if (acls.get(i).getId().getScheme().equals("world") == true) {
- assertEquals(acls.get(0).getId().getId(),"anyone");
- assertEquals(acls.get(0).getPerms(), ZooDefs.Perms.READ);
+ assertEquals("anyone", acls.get(0).getId().getId());
+ assertEquals(ZooDefs.Perms.READ, acls.get(0).getPerms());
foundWorldReadableAcl = true;
}
else {
if (acls.get(i).getId().getScheme().equals("sasl") == true) {
- assertEquals(acls.get(1).getId().getId(),"hbase");
- assertEquals(acls.get(1).getId().getScheme(),"sasl");
+ assertEquals("hbase", acls.get(1).getId().getId());
+ assertEquals("sasl", acls.get(1).getId().getScheme());
foundHBaseOwnerAcl = true;
} else { // error: should not get here: test fails.
assertTrue(false);
@@ -185,19 +185,19 @@ public class TestZooKeeperACL {
List<ACL> acls = zkw.getRecoverableZooKeeper().getZooKeeper()
.getACL("/hbase/master", new Stat());
- assertEquals(acls.size(),2);
+ assertEquals(2, acls.size());
boolean foundWorldReadableAcl = false;
boolean foundHBaseOwnerAcl = false;
for(int i = 0; i < 2; i++) {
if (acls.get(i).getId().getScheme().equals("world") == true) {
- assertEquals(acls.get(0).getId().getId(),"anyone");
- assertEquals(acls.get(0).getPerms(), ZooDefs.Perms.READ);
+ assertEquals("anyone", acls.get(0).getId().getId());
+ assertEquals(ZooDefs.Perms.READ, acls.get(0).getPerms());
foundWorldReadableAcl = true;
} else {
if (acls.get(i).getId().getScheme().equals("sasl") == true) {
- assertEquals(acls.get(1).getId().getId(),"hbase");
- assertEquals(acls.get(1).getId().getScheme(),"sasl");
+ assertEquals("hbase", acls.get(1).getId().getId());
+ assertEquals("sasl", acls.get(1).getId().getScheme());
foundHBaseOwnerAcl = true;
} else { // error: should not get here: test fails.
assertTrue(false);
@@ -221,19 +221,19 @@ public class TestZooKeeperACL {
List<ACL> acls = zkw.getRecoverableZooKeeper().getZooKeeper()
.getACL("/hbase/hbaseid", new Stat());
- assertEquals(acls.size(),2);
+ assertEquals(2, acls.size());
boolean foundWorldReadableAcl = false;
boolean foundHBaseOwnerAcl = false;
for(int i = 0; i < 2; i++) {
if (acls.get(i).getId().getScheme().equals("world") == true) {
- assertEquals(acls.get(0).getId().getId(),"anyone");
- assertEquals(acls.get(0).getPerms(), ZooDefs.Perms.READ);
+ assertEquals("anyone", acls.get(0).getId().getId());
+ assertEquals(ZooDefs.Perms.READ, acls.get(0).getPerms());
foundWorldReadableAcl = true;
} else {
if (acls.get(i).getId().getScheme().equals("sasl") == true) {
- assertEquals(acls.get(1).getId().getId(),"hbase");
- assertEquals(acls.get(1).getId().getScheme(),"sasl");
+ assertEquals("hbase", acls.get(1).getId().getId());
+ assertEquals("sasl", acls.get(1).getId().getScheme());
foundHBaseOwnerAcl = true;
} else { // error: should not get here: test fails.
assertTrue(false);
@@ -257,10 +257,10 @@ public class TestZooKeeperACL {
ZKUtil.createWithParents(zkw, "/testACLNode");
List<ACL> acls = zkw.getRecoverableZooKeeper().getZooKeeper()
.getACL("/testACLNode", new Stat());
- assertEquals(acls.size(),1);
- assertEquals(acls.get(0).getId().getScheme(),"sasl");
- assertEquals(acls.get(0).getId().getId(),"hbase");
- assertEquals(acls.get(0).getPerms(), ZooDefs.Perms.ALL);
+ assertEquals(1, acls.size());
+ assertEquals("sasl", acls.get(0).getId().getScheme());
+ assertEquals("hbase", acls.get(0).getId().getId());
+ assertEquals(ZooDefs.Perms.ALL, acls.get(0).getPerms());
}
/**
@@ -281,7 +281,7 @@ public class TestZooKeeperACL {
saslConfFile.getAbsolutePath());
testJaasConfig = ZKUtil.isSecureZooKeeper(new Configuration(TEST_UTIL.getConfiguration()));
- assertEquals(testJaasConfig, false);
+ assertEquals(false, testJaasConfig);
saslConfFile.delete();
}
@@ -295,13 +295,13 @@ public class TestZooKeeperACL {
Configuration config = new Configuration(HBaseConfiguration.create());
boolean testJaasConfig = ZKUtil.isSecureZooKeeper(config);
- assertEquals(testJaasConfig, false);
+ assertEquals(false, testJaasConfig);
// Now set authentication scheme to Kerberos still it should return false
// because no configuration set
config.set("hbase.security.authentication", "kerberos");
testJaasConfig = ZKUtil.isSecureZooKeeper(config);
- assertEquals(testJaasConfig, false);
+ assertEquals(false, testJaasConfig);
// Now set programmatic options related to security
config.set(HConstants.ZK_CLIENT_KEYTAB_FILE, "/dummy/file");
[9/9] hbase git commit: HBASE-19811 Fix findbugs and error-prone
warnings in hbase-server (branch-2)
Posted by st...@apache.org.
HBASE-19811 Fix findbugs and error-prone warnings in hbase-server (branch-2)
Signed-off-by: Michael Stack <st...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b1269ec5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b1269ec5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b1269ec5
Branch: refs/heads/branch-2
Commit: b1269ec57ff77f4077516fb4f866e2fe7ede3a3e
Parents: 1e5fc1e
Author: Peter Somogyi <ps...@cloudera.com>
Authored: Mon Jan 22 15:32:06 2018 +0100
Committer: Michael Stack <st...@apache.org>
Committed: Mon Jan 22 17:14:15 2018 -0800
----------------------------------------------------------------------
.../hbase/SslRMIServerSocketFactorySecure.java | 1 +
.../hadoop/hbase/client/locking/EntityLock.java | 1 +
.../hadoop/hbase/conf/ConfigurationManager.java | 2 +-
.../ZkSplitLogWorkerCoordination.java | 2 +-
.../coprocessor/BaseRowProcessorEndpoint.java | 8 +-
.../hbase/coprocessor/CoprocessorHost.java | 1 +
.../hbase/coprocessor/ObserverContextImpl.java | 3 +
.../coprocessor/ReadOnlyConfiguration.java | 14 +-
.../hadoop/hbase/filter/FilterWrapper.java | 1 +
.../org/apache/hadoop/hbase/fs/HFileSystem.java | 83 ++--
.../hbase/io/FSDataInputStreamWrapper.java | 11 +-
.../hadoop/hbase/io/HalfStoreFileReader.java | 8 +
.../org/apache/hadoop/hbase/io/Reference.java | 1 +
.../asyncfs/FanOutOneBlockAsyncDFSOutput.java | 1 +
.../hadoop/hbase/io/hfile/BlockCache.java | 1 +
.../hbase/io/hfile/CompoundBloomFilter.java | 1 +
.../hadoop/hbase/io/hfile/FixedFileTrailer.java | 10 +-
.../hadoop/hbase/io/hfile/HFileBlock.java | 68 +--
.../hadoop/hbase/io/hfile/HFileReaderImpl.java | 2 +-
.../hadoop/hbase/io/hfile/HFileScanner.java | 1 +
.../hadoop/hbase/io/hfile/LruBlockCache.java | 4 +
.../hadoop/hbase/io/hfile/LruCachedBlock.java | 1 +
.../hbase/io/hfile/LruCachedBlockQueue.java | 1 +
.../hbase/io/hfile/bucket/BucketAllocator.java | 1 +
.../hbase/io/hfile/bucket/BucketCache.java | 18 +-
.../hbase/io/hfile/bucket/CachedEntryQueue.java | 1 +
.../apache/hadoop/hbase/ipc/BufferChain.java | 2 +-
.../ipc/FastPathBalancedQueueRpcExecutor.java | 1 +
.../hadoop/hbase/ipc/ServerRpcConnection.java | 5 +-
.../hadoop/hbase/ipc/SimpleRpcServer.java | 1 +
.../hbase/master/ClusterStatusPublisher.java | 9 +-
.../apache/hadoop/hbase/master/DeadServer.java | 1 +
.../hadoop/hbase/master/HMasterCommandLine.java | 3 +-
.../hadoop/hbase/master/LoadBalancer.java | 1 +
...MasterAnnotationReadingPriorityFunction.java | 1 +
.../hbase/master/MasterCoprocessorHost.java | 1 +
.../hadoop/hbase/master/MobCompactionChore.java | 2 +-
.../hbase/master/RegionServerTracker.java | 2 +-
.../hadoop/hbase/master/SplitLogManager.java | 2 +-
.../assignment/MergeTableRegionsProcedure.java | 2 +-
.../hbase/master/assignment/RegionStates.java | 7 +-
.../balancer/FavoredStochasticBalancer.java | 5 +-
.../master/balancer/RegionLocationFinder.java | 27 +-
.../master/balancer/SimpleLoadBalancer.java | 1 +
.../master/balancer/StochasticLoadBalancer.java | 6 +-
.../hbase/master/cleaner/CleanerChore.java | 4 +-
.../hbase/master/cleaner/HFileCleaner.java | 2 +-
.../hadoop/hbase/master/cleaner/LogCleaner.java | 2 +-
.../hbase/master/locking/LockProcedure.java | 2 +-
.../master/procedure/ProcedurePrepareLatch.java | 4 +
.../master/procedure/RSProcedureDispatcher.java | 9 +
.../master/snapshot/SnapshotHFileCleaner.java | 2 +
.../apache/hadoop/hbase/mob/CachedMobFile.java | 1 +
.../hadoop/hbase/mob/ExpiredMobFileCleaner.java | 1 +
.../PartitionedMobCompactionRequest.java | 1 +
.../monitoring/MonitoredRPCHandlerImpl.java | 12 +
.../hbase/monitoring/MonitoredTaskImpl.java | 1 +
.../hadoop/hbase/monitoring/TaskMonitor.java | 2 +-
.../hadoop/hbase/procedure/Procedure.java | 2 +-
.../hbase/procedure/ProcedureManagerHost.java | 6 +-
.../hadoop/hbase/procedure/Subprocedure.java | 1 +
.../hbase/procedure/ZKProcedureCoordinator.java | 12 +-
.../hbase/procedure/ZKProcedureMemberRpcs.java | 1 +
.../hadoop/hbase/procedure/ZKProcedureUtil.java | 1 +
.../hadoop/hbase/quotas/MasterQuotaManager.java | 4 +-
.../hadoop/hbase/quotas/QuotaObserverChore.java | 3 +-
.../apache/hadoop/hbase/quotas/RateLimiter.java | 1 +
.../quotas/RegionServerSpaceQuotaManager.java | 2 +-
.../SpaceQuotaSnapshotNotifierFactory.java | 4 +-
.../hbase/regionserver/AbstractMemStore.java | 2 +-
.../AdaptiveMemStoreCompactionStrategy.java | 3 +
.../regionserver/CellChunkImmutableSegment.java | 2 +-
.../hadoop/hbase/regionserver/CellSet.java | 30 ++
.../hbase/regionserver/CompactingMemStore.java | 4 +-
.../regionserver/CompositeImmutableSegment.java | 1 +
.../regionserver/DateTieredStoreEngine.java | 1 +
.../hbase/regionserver/DefaultStoreFlusher.java | 8 +-
.../hadoop/hbase/regionserver/HMobStore.java | 56 +--
.../hadoop/hbase/regionserver/HRegion.java | 5 +-
.../hbase/regionserver/HRegionServer.java | 10 +-
.../regionserver/HRegionServerCommandLine.java | 2 +
.../hadoop/hbase/regionserver/HStore.java | 84 ++--
...IncreasingToUpperBoundRegionSplitPolicy.java | 2 +-
.../hbase/regionserver/InternalScanner.java | 1 +
.../hadoop/hbase/regionserver/KeyValueHeap.java | 5 +
.../hbase/regionserver/KeyValueScanner.java | 1 +
.../hbase/regionserver/MemStoreCompactor.java | 4 +-
.../hbase/regionserver/MemStoreFlusher.java | 8 +-
.../MemStoreMergerSegmentsIterator.java | 1 +
.../MetricsRegionServerWrapperImpl.java | 4 +-
.../regionserver/MetricsRegionWrapperImpl.java | 2 +-
.../MultiVersionConcurrencyControl.java | 9 +-
.../hbase/regionserver/RSRpcServices.java | 2 +-
.../RegionServerCoprocessorHost.java | 1 +
.../hbase/regionserver/ScannerContext.java | 8 +-
.../hbase/regionserver/ServerNonceManager.java | 2 +-
.../hbase/regionserver/SteppingSplitPolicy.java | 1 +
.../hbase/regionserver/StoreFileScanner.java | 6 +
.../hbase/regionserver/StripeStoreFlusher.java | 8 +-
.../regionserver/compactions/Compactor.java | 7 +-
.../compactions/DateTieredCompactionPolicy.java | 1 +
.../compactions/RatioBasedCompactionPolicy.java | 1 +
.../compactions/SortedCompactionPolicy.java | 2 +
.../querymatcher/ExplicitColumnTracker.java | 4 +
.../querymatcher/ScanWildcardColumnTracker.java | 2 +
.../hbase/regionserver/wal/AsyncFSWAL.java | 2 +-
.../hadoop/hbase/regionserver/wal/FSHLog.java | 20 +-
.../hbase/regionserver/wal/FSWALEntry.java | 1 +
.../regionserver/wal/ProtobufLogReader.java | 6 +-
.../replication/BaseReplicationEndpoint.java | 2 +-
.../DefaultSourceFSConfigurationProvider.java | 2 +-
.../replication/regionserver/Replication.java | 5 +
.../regionserver/ReplicationSink.java | 4 +-
.../regionserver/ReplicationSource.java | 19 +-
.../regionserver/ReplicationSourceManager.java | 2 +-
.../ReplicationSourceWALReader.java | 3 +-
.../security/access/AccessControlFilter.java | 1 +
.../hbase/security/access/AccessController.java | 4 +-
.../hbase/security/access/AuthResult.java | 2 +
.../token/AuthenticationTokenSecretManager.java | 1 +
.../security/visibility/ExpressionParser.java | 4 +-
.../visibility/VisibilityController.java | 4 +-
.../VisibilityNewVersionBehaivorTracker.java | 2 +-
.../expression/LeafExpressionNode.java | 1 +
.../expression/NonLeafExpressionNode.java | 1 +
.../visibility/expression/Operator.java | 3 +-
.../hadoop/hbase/snapshot/SnapshotInfo.java | 6 +-
.../hbase/snapshot/SnapshotManifestV1.java | 5 +
.../hbase/snapshot/SnapshotManifestV2.java | 5 +
.../hbase/tool/LoadIncrementalHFiles.java | 2 +-
.../util/BoundedPriorityBlockingQueue.java | 14 +
.../apache/hadoop/hbase/util/FSMapRUtils.java | 1 +
.../hadoop/hbase/util/FSRegionScanner.java | 2 +-
.../hadoop/hbase/util/FSTableDescriptors.java | 2 +-
.../org/apache/hadoop/hbase/util/HBaseFsck.java | 6 +-
.../org/apache/hadoop/hbase/util/IdLock.java | 1 +
.../hadoop/hbase/util/RegionSplitter.java | 2 +-
.../hadoop/hbase/util/RowBloomContext.java | 1 +
.../hadoop/hbase/util/ShutdownHookManager.java | 6 +-
.../hadoop/hbase/wal/AbstractFSWALProvider.java | 3 +-
.../hadoop/hbase/wal/DisabledWALProvider.java | 3 +-
.../apache/hadoop/hbase/wal/FSHLogProvider.java | 2 +-
.../hbase/wal/RegionGroupingProvider.java | 13 +-
.../java/org/apache/hadoop/hbase/wal/WAL.java | 1 +
.../org/apache/hadoop/hbase/wal/WALFactory.java | 16 +-
.../org/apache/hadoop/hbase/wal/WALKeyImpl.java | 4 +-
.../apache/hadoop/hbase/wal/WALSplitter.java | 6 +-
.../hadoop/hbase/AcidGuaranteesTestTool.java | 4 +
.../org/apache/hadoop/hbase/HBaseTestCase.java | 3 +-
.../hadoop/hbase/HBaseTestingUtility.java | 9 +-
.../apache/hadoop/hbase/MiniHBaseCluster.java | 7 +-
.../hadoop/hbase/MultithreadedTestUtil.java | 2 +
.../hbase/TestHDFSBlocksDistribution.java | 1 +
.../org/apache/hadoop/hbase/TestIOFencing.java | 11 +-
.../hbase/TestMetaTableAccessorNoCluster.java | 1 +
.../hadoop/hbase/TestMetaTableLocator.java | 6 +-
.../hadoop/hbase/TestMovedRegionsCleaner.java | 1 +
.../apache/hadoop/hbase/TestMultiVersions.java | 2 +
.../org/apache/hadoop/hbase/TestServerName.java | 5 +-
...TestServerSideScanMetricsFromClientSide.java | 8 +-
.../apache/hadoop/hbase/client/TestAdmin1.java | 2 +-
.../apache/hadoop/hbase/client/TestAdmin2.java | 11 +-
.../hbase/client/TestAsyncClusterAdminApi2.java | 2 +
.../client/TestAsyncDecommissionAdminApi.java | 2 +-
.../client/TestAsyncProcedureAdminApi.java | 2 +-
.../hbase/client/TestAsyncRegionAdminApi.java | 25 +-
...estAsyncReplicationAdminApiWithClusters.java | 1 +
.../hbase/client/TestAsyncSnapshotAdminApi.java | 105 +++--
.../hbase/client/TestAsyncTableAdminApi.java | 2 +-
.../hbase/client/TestAsyncTableBatch.java | 6 +-
...estAvoidCellReferencesIntoShippedBlocks.java | 15 +-
.../hadoop/hbase/client/TestClientPushback.java | 8 +-
.../client/TestConnectionImplementation.java | 3 +-
.../hadoop/hbase/client/TestFastFail.java | 3 +-
.../hadoop/hbase/client/TestFromClientSide.java | 356 ++++++++--------
.../hbase/client/TestFromClientSide3.java | 58 +--
.../hadoop/hbase/client/TestMetaCache.java | 3 +
.../hbase/client/TestMultipleTimestamps.java | 4 +-
.../hadoop/hbase/client/TestReplicasClient.java | 14 +-
.../client/TestRestoreSnapshotFromClient.java | 2 +-
.../apache/hadoop/hbase/client/TestResult.java | 4 +-
.../hbase/client/TestServerBusyException.java | 4 +-
.../hadoop/hbase/client/TestSizeFailures.java | 4 +-
.../hbase/client/TestSmallReversedScanner.java | 4 +-
.../hbase/client/TestSnapshotFromClient.java | 2 +-
.../hbase/client/TestSnapshotMetadata.java | 4 +-
.../hbase/client/TestSnapshotWithAcl.java | 2 +-
.../hbase/client/TestSplitOrMergeStatus.java | 8 +-
.../hbase/client/TestTimestampsFilter.java | 20 +-
.../hbase/client/TestUpdateConfiguration.java | 17 +-
.../TestReplicationAdminWithClusters.java | 10 +-
.../hbase/conf/TestConfigurationManager.java | 1 +
.../hadoop/hbase/constraint/TestConstraint.java | 10 +-
.../coprocessor/SampleRegionWALCoprocessor.java | 3 +-
.../hbase/coprocessor/SimpleRegionObserver.java | 4 +-
.../TestCoprocessorConfiguration.java | 14 +-
.../hbase/coprocessor/TestCoprocessorHost.java | 2 +-
...TestMasterCoprocessorExceptionWithAbort.java | 2 +-
.../hbase/coprocessor/TestMasterObserver.java | 1 +
.../hbase/coprocessor/TestWALObserver.java | 1 +
.../hbase/executor/TestExecutorService.java | 2 +-
.../hadoop/hbase/filter/TestBitComparator.java | 8 +-
.../apache/hadoop/hbase/filter/TestFilter.java | 9 +-
.../hbase/filter/TestFilterFromRegionSide.java | 1 +
.../hadoop/hbase/filter/TestFilterList.java | 3 +-
.../hbase/filter/TestFilterSerialization.java | 4 +-
.../filter/TestFuzzyRowFilterEndToEnd.java | 2 +-
.../filter/TestInvocationRecordFilter.java | 4 +
.../hadoop/hbase/filter/TestParseFilter.java | 144 ++++---
.../TestSingleColumnValueExcludeFilter.java | 2 +-
.../hadoop/hbase/fs/TestBlockReorder.java | 5 +-
.../apache/hadoop/hbase/io/TestHFileLink.java | 2 +-
.../apache/hadoop/hbase/io/TestHeapSize.java | 4 +-
.../encoding/TestBufferedDataBlockEncoder.java | 12 +-
.../io/encoding/TestDataBlockEncoders.java | 18 +-
.../encoding/TestLoadAndSwitchEncodeOnDisk.java | 2 +
.../encoding/TestSeekBeforeWithReverseScan.java | 12 +-
.../hadoop/hbase/io/hfile/CacheTestUtils.java | 9 +-
.../apache/hadoop/hbase/io/hfile/NanoTimer.java | 1 +
.../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 2 +-
.../hadoop/hbase/io/hfile/TestHFileBlock.java | 2 +-
.../hbase/io/hfile/TestHFileEncryption.java | 2 +-
.../hbase/io/hfile/TestLruBlockCache.java | 4 +-
.../hbase/io/hfile/bucket/TestBucketCache.java | 20 +-
.../apache/hadoop/hbase/ipc/TestNettyIPC.java | 2 +-
.../hadoop/hbase/ipc/TestProtoBufRpc.java | 2 +-
.../hadoop/hbase/ipc/TestRpcClientLeaks.java | 7 +-
.../hbase/ipc/TestSimpleRpcScheduler.java | 8 +-
.../hbase/mapreduce/MapreduceTestingShim.java | 4 +
.../hbase/master/TestAssignmentListener.java | 11 +-
.../hadoop/hbase/master/TestMasterFailover.java | 4 +-
.../hbase/master/TestMasterNoCluster.java | 1 +
.../TestMasterOperationsForRegionReplicas.java | 5 +-
.../hadoop/hbase/master/TestMasterShutdown.java | 1 +
.../hbase/master/TestMetaShutdownHandler.java | 6 +-
.../hbase/master/TestRegionPlacement.java | 4 +-
.../hbase/master/TestSplitLogManager.java | 12 +-
.../hbase/master/TestTableStateManager.java | 5 +-
.../master/assignment/MockMasterServices.java | 2 +
.../TestSplitTableRegionProcedure.java | 4 +-
.../hbase/master/balancer/BalancerTestBase.java | 1 +
.../hbase/master/cleaner/TestLogsCleaner.java | 1 +
.../cleaner/TestReplicationHFileCleaner.java | 1 +
.../master/cleaner/TestSnapshotFromMaster.java | 2 +-
.../hbase/master/locking/TestLockProcedure.java | 13 +-
...ProcedureSchedulerPerformanceEvaluation.java | 4 +
.../procedure/TestModifyNamespaceProcedure.java | 8 +-
.../procedure/TestModifyTableProcedure.java | 25 +-
.../procedure/TestWALProcedureStoreOnHDFS.java | 2 +-
.../master/snapshot/TestSnapshotFileCache.java | 1 +
.../snapshot/TestSnapshotHFileCleaner.java | 3 +-
.../hadoop/hbase/mob/TestCachedMobFile.java | 36 +-
.../apache/hadoop/hbase/mob/TestMobFile.java | 14 +-
.../hadoop/hbase/mob/TestMobFileCache.java | 15 +-
.../hadoop/hbase/mob/TestMobFileName.java | 11 +-
.../hbase/namespace/TestNamespaceAuditor.java | 4 +-
.../hadoop/hbase/procedure/TestProcedure.java | 6 +-
.../procedure/TestProcedureCoordinator.java | 3 +-
.../hadoop/hbase/procedure/TestZKProcedure.java | 2 +-
.../hbase/quotas/TestQuotaStatusRPCs.java | 2 +-
.../quotas/TestSuperUserQuotaPermissions.java | 1 +
.../EncodedSeekPerformanceTest.java | 2 +-
.../hbase/regionserver/MockHStoreFile.java | 2 +
.../regionserver/StatefulStoreMockMaker.java | 1 +
.../hbase/regionserver/TestAtomicOperation.java | 6 +-
.../hbase/regionserver/TestBlocksScanned.java | 1 +
.../hadoop/hbase/regionserver/TestBulkLoad.java | 6 +-
.../hbase/regionserver/TestCellFlatSet.java | 25 +-
.../hbase/regionserver/TestCellSkipListSet.java | 1 +
.../regionserver/TestCompactingMemStore.java | 5 +-
.../TestCompactingToCellFlatMapMemStore.java | 4 +-
.../hbase/regionserver/TestCompaction.java | 7 +-
.../TestCompactionArchiveConcurrentClose.java | 2 +
.../TestCompactionFileNotFound.java | 2 +-
.../hbase/regionserver/TestDefaultMemStore.java | 4 +-
.../regionserver/TestGetClosestAtOrBefore.java | 3 +-
.../hadoop/hbase/regionserver/TestHRegion.java | 79 ++--
.../regionserver/TestHRegionReplayEvents.java | 8 +-
.../regionserver/TestHRegionServerBulkLoad.java | 2 +
.../hadoop/hbase/regionserver/TestHStore.java | 9 +-
.../hbase/regionserver/TestHStoreFile.java | 33 +-
.../hbase/regionserver/TestKeyValueHeap.java | 3 +-
.../hbase/regionserver/TestMajorCompaction.java | 9 +-
.../hbase/regionserver/TestMemStoreLAB.java | 4 +-
.../hbase/regionserver/TestMinorCompaction.java | 3 +-
.../TestMultiVersionConcurrencyControl.java | 2 +
.../hbase/regionserver/TestPriorityRpc.java | 30 +-
.../hbase/regionserver/TestRegionReplicas.java | 2 +-
.../TestRegionReplicasWithModifyTable.java | 12 +-
.../TestRegionServerAccounting.java | 20 +-
.../regionserver/TestRegionServerNoMaster.java | 12 +-
.../hadoop/hbase/regionserver/TestScanner.java | 4 +-
.../regionserver/TestScannerWithBulkload.java | 1 +
.../hbase/regionserver/TestSplitLogWorker.java | 2 +-
.../TestSplitTransactionOnCluster.java | 8 +-
.../hbase/regionserver/TestStoreScanner.java | 6 +
.../regionserver/TestSyncTimeRangeTracker.java | 7 +-
.../hbase/regionserver/TestWALLockup.java | 2 +
.../TestWalAndCompactingMemStoreFlush.java | 2 +-
.../compactions/PerfTestCompactionPolicies.java | 6 +-
.../regionserver/compactions/TestCompactor.java | 2 +
.../querymatcher/TestUserScanQueryMatcher.java | 4 +-
.../regionserver/wal/AbstractTestFSWAL.java | 2 +-
.../regionserver/wal/AbstractTestWALReplay.java | 12 +-
.../regionserver/wal/InstrumentedLogWriter.java | 2 +-
.../replication/TestNamespaceReplication.java | 4 +-
.../replication/TestPerTableCFReplication.java | 4 +-
.../hbase/replication/TestReplicationBase.java | 2 +-
.../TestReplicationDisableInactivePeer.java | 2 +-
.../replication/TestReplicationKillRS.java | 1 +
.../replication/TestReplicationSmallTests.java | 4 +-
.../replication/TestReplicationSyncUpTool.java | 4 +-
.../replication/TestReplicationWithTags.java | 2 +-
.../TestRegionReplicaReplicationEndpoint.java | 8 +-
.../regionserver/TestReplicator.java | 60 ++-
.../regionserver/TestWALEntryStream.java | 2 +-
.../hadoop/hbase/security/TestSecureIPC.java | 3 +-
.../apache/hadoop/hbase/security/TestUser.java | 3 +
.../access/TestAccessControlFilter.java | 3 +
.../security/access/TestAccessController.java | 9 +-
.../security/token/TestZKSecretWatcher.java | 2 +
.../token/TestZKSecretWatcherRefreshKeys.java | 2 +
.../TestDefaultScanLabelGeneratorStack.java | 22 +-
.../TestEnforcingScanLabelGenerator.java | 4 +
...sibilityLabelReplicationWithExpAsString.java | 1 +
.../visibility/TestVisibilityLabels.java | 41 +-
...sibilityLabelsOnNewVersionBehaviorTable.java | 1 +
...sibilityLabelsOpWithDifferentUsersNoACL.java | 5 +
.../TestVisibilityLabelsReplication.java | 3 +
.../visibility/TestVisibilityLabelsWithACL.java | 9 +
...VisibilityLabelsWithCustomVisLabService.java | 2 +
...ibilityLabelsWithDefaultVisLabelService.java | 4 +
.../TestVisibilityLabelsWithDeletes.java | 426 +++++++++----------
.../TestVisibilityLabelsWithSLGStack.java | 1 +
.../TestVisibilityLablesWithGroups.java | 8 +
.../TestVisibilityWithCheckAuths.java | 2 +-
.../TestWithDisabledAuthorization.java | 8 +-
.../snapshot/TestFlushSnapshotFromClient.java | 2 +
.../hadoop/hbase/tool/MapreduceTestingShim.java | 4 +
.../hbase/tool/TestLoadIncrementalHFiles.java | 2 +-
.../TestLoadIncrementalHFilesSplitRecovery.java | 2 +-
.../hadoop/hbase/util/MultiThreadedAction.java | 4 +-
.../hadoop/hbase/util/MultiThreadedReader.java | 3 +-
.../util/TestBoundedPriorityBlockingQueue.java | 6 +-
.../hadoop/hbase/util/TestByteBuffUtils.java | 4 +-
.../hadoop/hbase/util/TestFSHDFSUtils.java | 3 +-
.../apache/hadoop/hbase/util/TestFSVisitor.java | 7 +-
.../hbase/util/TestHBaseFsckEncryption.java | 10 +-
.../hadoop/hbase/util/TestHBaseFsckMOB.java | 22 +-
.../hadoop/hbase/util/TestJSONMetricUtil.java | 2 +-
.../hbase/util/TestMiniClusterLoadParallel.java | 1 +
.../hbase/util/TestRegionSplitCalculator.java | 4 +-
.../hbase/util/test/LoadTestDataGenerator.java | 7 +-
.../apache/hadoop/hbase/wal/TestWALFactory.java | 2 +-
.../apache/hadoop/hbase/wal/TestWALSplit.java | 112 ++---
.../TestWALSplitBoundedLogWriterCreation.java | 1 +
.../hbase/zookeeper/TestZooKeeperACL.java | 52 +--
357 files changed, 1959 insertions(+), 1495 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
index 8560ddc..3583afe 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
@@ -33,6 +33,7 @@ public class SslRMIServerSocketFactorySecure extends SslRMIServerSocketFactory {
@Override
public ServerSocket createServerSocket(int port) throws IOException {
return new ServerSocket(port) {
+ @Override
public Socket accept() throws IOException {
Socket socket = super.accept();
SSLSocketFactory sslSocketFactory = (SSLSocketFactory) SSLSocketFactory.getDefault();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java
index 3fea1a2..b956e33 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java
@@ -227,6 +227,7 @@ public class EntityLock {
return this;
}
+ @Override
public void run() {
final LockHeartbeatRequest lockHeartbeatRequest =
LockHeartbeatRequest.newBuilder().setProcId(procId).build();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java
index 2bbb90b..555a5c0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java
@@ -79,7 +79,7 @@ public class ConfigurationManager {
// notified when the configuration is reloaded from disk. This is a set
// constructed from a WeakHashMap, whose entries would be removed if the
// observer classes go out of scope.
- private Set<ConfigurationObserver> configurationObservers =
+ private final Set<ConfigurationObserver> configurationObservers =
Collections.newSetFromMap(new WeakHashMap<ConfigurationObserver,
Boolean>());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java
index bcba101..2143f80 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java
@@ -74,7 +74,7 @@ public class ZkSplitLogWorkerCoordination extends ZKListener implements
private TaskExecutor splitTaskExecutor;
- private AtomicInteger taskReadySeq = new AtomicInteger(0);
+ private final AtomicInteger taskReadySeq = new AtomicInteger(0);
private volatile String currentTask = null;
private int currentVersion;
private volatile boolean shouldStop = false;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java
index f460ac9..ef91bf2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java
@@ -118,7 +118,7 @@ extends RowProcessorService implements RegionCoprocessor {
Class<?> cls;
try {
cls = Class.forName(className);
- RowProcessor<S,T> ci = (RowProcessor<S,T>) cls.newInstance();
+ RowProcessor<S,T> ci = (RowProcessor<S,T>) cls.getDeclaredConstructor().newInstance();
if (request.hasRowProcessorInitializerMessageName()) {
Class<?> imn = Class.forName(request.getRowProcessorInitializerMessageName())
.asSubclass(Message.class);
@@ -141,11 +141,7 @@ extends RowProcessorService implements RegionCoprocessor {
ci.initialize(s);
}
return ci;
- } catch (ClassNotFoundException e) {
- throw new IOException(e);
- } catch (InstantiationException e) {
- throw new IOException(e);
- } catch (IllegalAccessException e) {
+ } catch (Exception e) {
throw new IOException(e);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
index 42da86a..05ac9f6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
@@ -572,6 +572,7 @@ public abstract class CoprocessorHost<C extends Coprocessor, E extends Coprocess
return this.result;
}
+ @Override
void callObserver() throws IOException {
Optional<O> observer = observerGetter.apply(getEnvironment().getInstance());
if (observer.isPresent()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContextImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContextImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContextImpl.java
index 7de6f0b..6ed1ad3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContextImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContextImpl.java
@@ -48,6 +48,7 @@ public class ObserverContextImpl<E extends CoprocessorEnvironment> implements Ob
this.bypassable = bypassable;
}
+ @Override
public E getEnvironment() {
return env;
}
@@ -60,6 +61,7 @@ public class ObserverContextImpl<E extends CoprocessorEnvironment> implements Ob
return this.bypassable;
};
+ @Override
public void bypass() {
if (!this.bypassable) {
throw new UnsupportedOperationException("This method does not support 'bypass'.");
@@ -82,6 +84,7 @@ public class ObserverContextImpl<E extends CoprocessorEnvironment> implements Ob
return false;
}
+ @Override
public Optional<User> getCaller() {
return Optional.ofNullable(caller);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
index b073ada..b805c50 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
@@ -87,7 +87,7 @@ class ReadOnlyConfiguration extends Configuration {
}
@Override
- public void reloadConfiguration() {
+ public synchronized void reloadConfiguration() {
// This is a write operation. We need to allow it though because if any Configuration in
// current JVM context calls addDefaultResource, this forces a reload of all Configurations
// (all Configurations are 'registered' by the default constructor. Rather than turn
@@ -100,10 +100,12 @@ class ReadOnlyConfiguration extends Configuration {
return conf.get(name);
}
+ // Do not add @Override because it is not in Hadoop 2.6.5
public void setAllowNullValueProperties(boolean val) {
throw new UnsupportedOperationException("Read-only Configuration");
}
+ @Override
public String getTrimmed(String name) {
return conf.getTrimmed(name);
}
@@ -129,12 +131,12 @@ class ReadOnlyConfiguration extends Configuration {
}
@Override
- public void unset(String name) {
+ public synchronized void unset(String name) {
throw new UnsupportedOperationException("Read-only Configuration");
}
@Override
- public void setIfUnset(String name, String value) {
+ public synchronized void setIfUnset(String name, String value) {
throw new UnsupportedOperationException("Read-only Configuration");
}
@@ -239,7 +241,7 @@ class ReadOnlyConfiguration extends Configuration {
}
@Override
- public String[] getPropertySources(String name) {
+ public synchronized String[] getPropertySources(String name) {
return conf.getPropertySources(name);
}
@@ -326,7 +328,7 @@ class ReadOnlyConfiguration extends Configuration {
}
@Override
- public Class<?>[] getClasses(String name, Class<?>[] defaultValue) {
+ public Class<?>[] getClasses(String name, Class<?>... defaultValue) {
return conf.getClasses(name, defaultValue);
}
@@ -422,7 +424,7 @@ class ReadOnlyConfiguration extends Configuration {
}
@Override
- public void setQuietMode(boolean quietmode) {
+ public synchronized void setQuietMode(boolean quietmode) {
throw new UnsupportedOperationException("Read-only Configuration");
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
index b7c56e0..9bc072a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
@@ -119,6 +119,7 @@ final public class FilterWrapper extends Filter {
return filterCell(c);
}
+ @Override
public ReturnCode filterCell(final Cell c) throws IOException {
return this.filter.filterCell(c);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
index b89470f..9ea67c1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
@@ -235,16 +235,15 @@ public class HFileSystem extends FilterFileSystem {
}
}
- /**
+ /**
* Returns a brand new instance of the FileSystem. It does not use
* the FileSystem.Cache. In newer versions of HDFS, we can directly
* invoke FileSystem.newInstance(Configuration).
- *
+ *
* @param conf Configuration
* @return A new instance of the filesystem
*/
- private static FileSystem newInstanceFileSystem(Configuration conf)
- throws IOException {
+ private static FileSystem newInstanceFileSystem(Configuration conf) throws IOException {
URI uri = FileSystem.getDefaultUri(conf);
FileSystem fs = null;
Class<?> clazz = conf.getClass("fs." + uri.getScheme() + ".impl", null);
@@ -361,47 +360,43 @@ public class HFileSystem extends FilterFileSystem {
private static ClientProtocol createReorderingProxy(final ClientProtocol cp,
final ReorderBlocks lrb, final Configuration conf) {
- return (ClientProtocol) Proxy.newProxyInstance
- (cp.getClass().getClassLoader(),
- new Class[]{ClientProtocol.class, Closeable.class},
- new InvocationHandler() {
- public Object invoke(Object proxy, Method method,
- Object[] args) throws Throwable {
- try {
- if ((args == null || args.length == 0)
- && "close".equals(method.getName())) {
- RPC.stopProxy(cp);
- return null;
- } else {
- Object res = method.invoke(cp, args);
- if (res != null && args != null && args.length == 3
- && "getBlockLocations".equals(method.getName())
- && res instanceof LocatedBlocks
- && args[0] instanceof String
- && args[0] != null) {
- lrb.reorderBlocks(conf, (LocatedBlocks) res, (String) args[0]);
- }
- return res;
- }
- } catch (InvocationTargetException ite) {
- // We will have this for all the exception, checked on not, sent
- // by any layer, including the functional exception
- Throwable cause = ite.getCause();
- if (cause == null){
- throw new RuntimeException(
- "Proxy invocation failed and getCause is null", ite);
- }
- if (cause instanceof UndeclaredThrowableException) {
- Throwable causeCause = cause.getCause();
- if (causeCause == null) {
- throw new RuntimeException("UndeclaredThrowableException had null cause!");
- }
- cause = cause.getCause();
- }
- throw cause;
+ return (ClientProtocol) Proxy.newProxyInstance(cp.getClass().getClassLoader(),
+ new Class[]{ClientProtocol.class, Closeable.class}, new InvocationHandler() {
+ @Override
+ public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
+ try {
+ if ((args == null || args.length == 0) && "close".equals(method.getName())) {
+ RPC.stopProxy(cp);
+ return null;
+ } else {
+ Object res = method.invoke(cp, args);
+ if (res != null && args != null && args.length == 3
+ && "getBlockLocations".equals(method.getName())
+ && res instanceof LocatedBlocks
+ && args[0] instanceof String
+ && args[0] != null) {
+ lrb.reorderBlocks(conf, (LocatedBlocks) res, (String) args[0]);
}
+ return res;
}
- });
+ } catch (InvocationTargetException ite) {
+ // We will have this for all the exception, checked on not, sent
+ // by any layer, including the functional exception
+ Throwable cause = ite.getCause();
+ if (cause == null){
+ throw new RuntimeException("Proxy invocation failed and getCause is null", ite);
+ }
+ if (cause instanceof UndeclaredThrowableException) {
+ Throwable causeCause = cause.getCause();
+ if (causeCause == null) {
+ throw new RuntimeException("UndeclaredThrowableException had null cause!");
+ }
+ cause = cause.getCause();
+ }
+ throw cause;
+ }
+ }
+ });
}
/**
@@ -424,6 +419,7 @@ public class HFileSystem extends FilterFileSystem {
* datanode is actually dead, so if we use it it will timeout.
*/
static class ReorderWALBlocks implements ReorderBlocks {
+ @Override
public void reorderBlocks(Configuration conf, LocatedBlocks lbs, String src)
throws IOException {
@@ -481,6 +477,7 @@ public class HFileSystem extends FilterFileSystem {
* createNonRecursive. This is a hadoop bug and when it is fixed in Hadoop,
* this definition will go away.
*/
+ @Override
@SuppressWarnings("deprecation")
public FSDataOutputStream createNonRecursive(Path f,
boolean overwrite,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
index 5fd9d36..6c73405 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
@@ -22,15 +22,16 @@ import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
+import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
@@ -73,7 +74,7 @@ public class FSDataInputStreamWrapper implements Closeable {
*/
private volatile FSDataInputStream stream = null;
private volatile FSDataInputStream streamNoFsChecksum = null;
- private Object streamNoFsChecksumFirstCreateLock = new Object();
+ private final Object streamNoFsChecksumFirstCreateLock = new Object();
// The configuration states that we should validate hbase checksums
private boolean useHBaseChecksumConfigured;
@@ -86,7 +87,7 @@ public class FSDataInputStreamWrapper implements Closeable {
// In the case of a checksum failure, do these many succeeding
// reads without hbase checksum verification.
- private volatile int hbaseChecksumOffCount = -1;
+ private AtomicInteger hbaseChecksumOffCount = new AtomicInteger(-1);
private Boolean instanceOfCanUnbuffer = null;
// Using reflection to get org.apache.hadoop.fs.CanUnbuffer#unbuffer method to avoid compilation
@@ -216,7 +217,7 @@ public class FSDataInputStreamWrapper implements Closeable {
}
if (!partOfConvoy) {
this.useHBaseChecksum = false;
- this.hbaseChecksumOffCount = offCount;
+ this.hbaseChecksumOffCount.set(offCount);
}
return this.stream;
}
@@ -224,7 +225,7 @@ public class FSDataInputStreamWrapper implements Closeable {
/** Report that checksum was ok, so we may ponder going back to HBase checksum. */
public void checksumOk() {
if (this.useHBaseChecksumConfigured && !this.useHBaseChecksum
- && (this.hbaseChecksumOffCount-- < 0)) {
+ && (this.hbaseChecksumOffCount.getAndDecrement() < 0)) {
// The stream we need is already open (because we were using HBase checksum in the past).
assert this.streamNoFsChecksum != null;
this.useHBaseChecksum = true;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
index 80207eb..f30d488 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
@@ -129,35 +129,41 @@ public class HalfStoreFileReader extends StoreFileReader {
final HFileScanner delegate = s;
public boolean atEnd = false;
+ @Override
public Cell getKey() {
if (atEnd) return null;
return delegate.getKey();
}
+ @Override
public String getKeyString() {
if (atEnd) return null;
return delegate.getKeyString();
}
+ @Override
public ByteBuffer getValue() {
if (atEnd) return null;
return delegate.getValue();
}
+ @Override
public String getValueString() {
if (atEnd) return null;
return delegate.getValueString();
}
+ @Override
public Cell getCell() {
if (atEnd) return null;
return delegate.getCell();
}
+ @Override
public boolean next() throws IOException {
if (atEnd) return false;
@@ -200,10 +206,12 @@ public class HalfStoreFileReader extends StoreFileReader {
return (this.delegate.getReader().getComparator().compare(splitCell, getKey())) > 0;
}
+ @Override
public org.apache.hadoop.hbase.io.hfile.HFile.Reader getReader() {
return this.delegate.getReader();
}
+ @Override
public boolean isSeeked() {
return this.delegate.isSeeked();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
index 6dbfd2f..6dce132 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
@@ -222,6 +222,7 @@ public class Reference {
return Arrays.hashCode(splitkey) + region.hashCode();
}
+ @Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null) return false;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java
index 5f4bb76..1645d68 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java
@@ -507,6 +507,7 @@ public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput {
* @param syncBlock will call hsync if true, otherwise hflush.
* @return A CompletableFuture that hold the acked length after flushing.
*/
+ @Override
public CompletableFuture<Long> flush(boolean syncBlock) {
CompletableFuture<Long> future = new CompletableFuture<>();
flush0(future, syncBlock);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java
index 50d8508..dccfe39 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java
@@ -126,6 +126,7 @@ public interface BlockCache extends Iterable<CachedBlock> {
/**
* @return Iterator over the blocks in the cache.
*/
+ @Override
Iterator<CachedBlock> iterator();
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilter.java
index 768b37f..2aceed7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilter.java
@@ -156,6 +156,7 @@ public class CompoundBloomFilter extends CompoundBloomFilterBase
return result;
}
+ @Override
public boolean supportsAutoLoading() {
return true;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
index 70a3d4d..a0d3df3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
@@ -545,7 +545,7 @@ public class FixedFileTrailer {
try {
// If null, it should be the Bytes.BYTES_RAWCOMPARATOR
if (klass != null) {
- CellComparator comp = klass.newInstance();
+ CellComparator comp = klass.getDeclaredConstructor().newInstance();
// if the name wasn't one of the legacy names, maybe its a legit new
// kind of comparator.
comparatorClassName = klass.getName();
@@ -589,12 +589,8 @@ public class FixedFileTrailer {
public static CellComparator createComparator(
String comparatorClassName) throws IOException {
try {
- Class<? extends CellComparator> comparatorClass = getComparatorClass(comparatorClassName);
- return comparatorClass != null ? comparatorClass.newInstance() : null;
- } catch (InstantiationException e) {
- throw new IOException("Comparator class " + comparatorClassName +
- " is not instantiable", e);
- } catch (IllegalAccessException e) {
+ return getComparatorClass(comparatorClassName).getDeclaredConstructor().newInstance();
+ } catch (Exception e) {
throw new IOException("Comparator class " + comparatorClassName +
" is not instantiable", e);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index 992ebbd..5674414 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -255,42 +255,43 @@ public class HFileBlock implements Cacheable {
*/
static final CacheableDeserializer<Cacheable> BLOCK_DESERIALIZER =
new CacheableDeserializer<Cacheable>() {
- public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)
+ @Override
+ public HFileBlock deserialize(ByteBuff buf, boolean reuse, MemoryType memType)
throws IOException {
- // The buf has the file block followed by block metadata.
- // Set limit to just before the BLOCK_METADATA_SPACE then rewind.
- buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();
- // Get a new buffer to pass the HFileBlock for it to 'own'.
- ByteBuff newByteBuff;
- if (reuse) {
- newByteBuff = buf.slice();
- } else {
- int len = buf.limit();
- newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));
- newByteBuff.put(0, buf, buf.position(), len);
- }
- // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.
- buf.position(buf.limit());
- buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);
- boolean usesChecksum = buf.get() == (byte)1;
- long offset = buf.getLong();
- int nextBlockOnDiskSize = buf.getInt();
- HFileBlock hFileBlock =
- new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);
- return hFileBlock;
- }
+ // The buf has the file block followed by block metadata.
+ // Set limit to just before the BLOCK_METADATA_SPACE then rewind.
+ buf.limit(buf.limit() - BLOCK_METADATA_SPACE).rewind();
+ // Get a new buffer to pass the HFileBlock for it to 'own'.
+ ByteBuff newByteBuff;
+ if (reuse) {
+ newByteBuff = buf.slice();
+ } else {
+ int len = buf.limit();
+ newByteBuff = new SingleByteBuff(ByteBuffer.allocate(len));
+ newByteBuff.put(0, buf, buf.position(), len);
+ }
+ // Read out the BLOCK_METADATA_SPACE content and shove into our HFileBlock.
+ buf.position(buf.limit());
+ buf.limit(buf.limit() + HFileBlock.BLOCK_METADATA_SPACE);
+ boolean usesChecksum = buf.get() == (byte) 1;
+ long offset = buf.getLong();
+ int nextBlockOnDiskSize = buf.getInt();
+ HFileBlock hFileBlock =
+ new HFileBlock(newByteBuff, usesChecksum, memType, offset, nextBlockOnDiskSize, null);
+ return hFileBlock;
+ }
- @Override
- public int getDeserialiserIdentifier() {
- return DESERIALIZER_IDENTIFIER;
- }
+ @Override
+ public int getDeserialiserIdentifier() {
+ return DESERIALIZER_IDENTIFIER;
+ }
- @Override
- public HFileBlock deserialize(ByteBuff b) throws IOException {
- // Used only in tests
- return deserialize(b, false, MemoryType.EXCLUSIVE);
- }
- };
+ @Override
+ public HFileBlock deserialize(ByteBuff b) throws IOException {
+ // Used only in tests
+ return deserialize(b, false, MemoryType.EXCLUSIVE);
+ }
+ };
private static final int DESERIALIZER_IDENTIFIER;
static {
@@ -1480,6 +1481,7 @@ public class HFileBlock implements Cacheable {
this(new FSDataInputStreamWrapper(istream), fileSize, null, null, fileContext);
}
+ @Override
public BlockIterator blockRange(final long startOffset, final long endOffset) {
final FSReader owner = this; // handle for inner class
return new BlockIterator() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
index a16565e..1f591a0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
@@ -947,7 +947,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
Cell ret;
int cellBufSize = getKVBufSize();
- long seqId = 0l;
+ long seqId = 0L;
if (this.reader.shouldIncludeMemStoreTS()) {
seqId = currMemstoreTS;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java
index 032c1ad..a2a35fe 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java
@@ -166,5 +166,6 @@ public interface HFileScanner extends Shipper, Closeable {
/**
* Close this HFile scanner and do necessary cleanup.
*/
+ @Override
void close();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
index 284447a..d26b90a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
@@ -448,6 +448,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
* @param cacheKey block's cache key
* @param buf block buffer
*/
+ @Override
public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {
cacheBlock(cacheKey, buf, false);
}
@@ -794,6 +795,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
return totalSize;
}
+ @Override
public int compareTo(BlockBucket that) {
return Long.compare(this.overflow(), that.overflow());
}
@@ -970,6 +972,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
* <p>Includes: total accesses, hits, misses, evicted blocks, and runs
* of the eviction processes.
*/
+ @Override
public CacheStats getStats() {
return this.stats;
}
@@ -1096,6 +1099,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
return (long) Math.floor(this.maxSize * this.memoryFactor * this.minFactor);
}
+ @Override
public void shutdown() {
if (victimHandler != null) {
victimHandler.shutdown();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java
index 21b3bfd..32a277d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java
@@ -90,6 +90,7 @@ public class LruCachedBlock implements HeapSize, Comparable<LruCachedBlock> {
return this.cachedTime;
}
+ @Override
public long heapSize() {
return size;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java
index bed08fe..4c67c9a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java
@@ -103,6 +103,7 @@ public class LruCachedBlockQueue implements HeapSize {
* Total size of all elements in this queue.
* @return size of all elements currently in queue, in bytes
*/
+ @Override
public long heapSize() {
return heapSize;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
index 7bbb4ed..e31b1cb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
@@ -414,6 +414,7 @@ public final class BucketAllocator {
}
}
+ @Override
public String toString() {
StringBuilder sb = new StringBuilder(1024);
for (int i = 0; i < buckets.length; ++i) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index d07c30d..bd2b9c8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -36,6 +36,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
+import java.util.Objects;
import java.util.PriorityQueue;
import java.util.Set;
import java.util.concurrent.ArrayBlockingQueue;
@@ -904,6 +905,7 @@ public class BucketCache implements BlockCache, HeapSize {
this.writerEnabled = false;
}
+ @Override
public void run() {
List<RAMQueueEntry> entries = new ArrayList<>();
try {
@@ -1395,10 +1397,22 @@ public class BucketCache implements BlockCache, HeapSize {
}
@Override
- public boolean equals(Object that) {
- return this == that;
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ BucketEntryGroup that = (BucketEntryGroup) o;
+ return totalSize == that.totalSize && bucketSize == that.bucketSize
+ && Objects.equals(queue, that.queue);
}
+ @Override
+ public int hashCode() {
+ return Objects.hash(queue, totalSize, bucketSize);
+ }
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java
index fa39202..29721ab 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java
@@ -59,6 +59,7 @@ public class CachedEntryQueue {
}
queue = MinMaxPriorityQueue.orderedBy(new Comparator<Map.Entry<BlockCacheKey, BucketEntry>>() {
+ @Override
public int compare(Entry<BlockCacheKey, BucketEntry> entry1,
Entry<BlockCacheKey, BucketEntry> entry2) {
return BucketEntry.COMPARATOR.compare(entry1.getValue(), entry2.getValue());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BufferChain.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BufferChain.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BufferChain.java
index c340c06..915b82d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BufferChain.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BufferChain.java
@@ -101,7 +101,7 @@ class BufferChain {
try {
long ret = channel.write(buffers, bufferOffset, bufCount);
if (ret > 0) {
- remaining -= ret;
+ remaining = (int) (remaining - ret);
}
return ret;
} finally {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FastPathBalancedQueueRpcExecutor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FastPathBalancedQueueRpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FastPathBalancedQueueRpcExecutor.java
index 9a01a0a..eaea34d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FastPathBalancedQueueRpcExecutor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FastPathBalancedQueueRpcExecutor.java
@@ -91,6 +91,7 @@ public class FastPathBalancedQueueRpcExecutor extends BalancedQueueRpcExecutor {
this.fastPathHandlerStack = fastPathHandlerStack;
}
+ @Override
protected CallRunner getCallRunner() throws InterruptedException {
// Get a callrunner if one in the Q.
CallRunner cr = this.q.poll();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
index 096efa3..17bb362 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
@@ -193,14 +193,15 @@ abstract class ServerRpcConnection implements Closeable {
String className = header.getCellBlockCodecClass();
if (className == null || className.length() == 0) return;
try {
- this.codec = (Codec)Class.forName(className).newInstance();
+ this.codec = (Codec)Class.forName(className).getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new UnsupportedCellCodecException(className, e);
}
if (!header.hasCellBlockCompressorClass()) return;
className = header.getCellBlockCompressorClass();
try {
- this.compressionCodec = (CompressionCodec)Class.forName(className).newInstance();
+ this.compressionCodec =
+ (CompressionCodec)Class.forName(className).getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new UnsupportedCompressionCodecException(className, e);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcServer.java
index b14c934..13a3cf7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcServer.java
@@ -548,6 +548,7 @@ public class SimpleRpcServer extends RpcServer {
* The number of open RPC conections
* @return the number of open rpc connections
*/
+ @Override
public int getNumOpenConnections() {
return connectionManager.size();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
index 6b87194..5e97204 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
@@ -118,10 +118,8 @@ public class ClusterStatusPublisher extends ScheduledChore {
this.master = master;
this.messagePeriod = conf.getInt(STATUS_PUBLISH_PERIOD, DEFAULT_STATUS_PUBLISH_PERIOD);
try {
- this.publisher = publisherClass.newInstance();
- } catch (InstantiationException e) {
- throw new IOException("Can't create publisher " + publisherClass.getName(), e);
- } catch (IllegalAccessException e) {
+ this.publisher = publisherClass.getDeclaredConstructor().newInstance();
+ } catch (Exception e) {
throw new IOException("Can't create publisher " + publisherClass.getName(), e);
}
this.publisher.connect(conf);
@@ -166,7 +164,8 @@ public class ClusterStatusPublisher extends ScheduledChore {
.build());
}
- protected void cleanup() {
+ @Override
+ protected synchronized void cleanup() {
connected = false;
publisher.close();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java
index db04c60..116d24e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java
@@ -155,6 +155,7 @@ public class DeadServer {
}
}
+ @Override
public synchronized String toString() {
StringBuilder sb = new StringBuilder();
for (ServerName sn : deadServers.keySet()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
index 3ec70d3..a591025 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
@@ -69,11 +69,12 @@ public class HMasterCommandLine extends ServerCommandLine {
this.masterClass = masterClass;
}
+ @Override
protected String getUsage() {
return USAGE;
}
-
+ @Override
public int run(String args[]) throws Exception {
Options opt = new Options();
opt.addOption("localRegionServers", true,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
index f1a0593..917da08 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
@@ -156,6 +156,7 @@ public interface LoadBalancer extends Configurable, Stoppable, ConfigurationObse
* Notification that config has changed
* @param conf
*/
+ @Override
void onConfigurationChange(Configuration conf);
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
index 18aab6e..f25f3bf 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java
@@ -52,6 +52,7 @@ public class MasterAnnotationReadingPriorityFunction extends AnnotationReadingPr
super(rpcServices, clz);
}
+ @Override
public int getPriority(RPCProtos.RequestHeader header, Message param, User user) {
// Yes this is copy pasted from the base class but it keeps from having to look in the
// annotatedQos table twice something that could get costly since this is called for
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
index 10e1d0a..8396145 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
@@ -133,6 +133,7 @@ public class MasterCoprocessorHost
* @return An instance of MasterServices, an object NOT for general user-space Coprocessor
* consumption.
*/
+ @Override
public MasterServices getMasterServices() {
return this.masterServices;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
index 8a7c4e1..6c5d677 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java
@@ -89,7 +89,7 @@ public class MobCompactionChore extends ScheduledChore {
}
@Override
- protected void cleanup() {
+ protected synchronized void cleanup() {
super.cleanup();
pool.shutdown();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java
index 2f2d536..29218e2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java
@@ -50,7 +50,7 @@ import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class RegionServerTracker extends ZKListener {
private static final Logger LOG = LoggerFactory.getLogger(RegionServerTracker.class);
- private NavigableMap<ServerName, RegionServerInfo> regionServers = new TreeMap<>();
+ private final NavigableMap<ServerName, RegionServerInfo> regionServers = new TreeMap<>();
private ServerManager serverManager;
private MasterServices server;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
index d1c1612..2b88fb1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
@@ -637,7 +637,7 @@ public class SplitLogManager {
public enum TerminationStatus {
IN_PROGRESS("in_progress"), SUCCESS("success"), FAILURE("failure"), DELETED("deleted");
- String statusMsg;
+ final String statusMsg;
TerminationStatus(String msg) {
statusMsg = msg;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
index 3810403..8c59776 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
@@ -612,7 +612,7 @@ public class MergeTableRegionsProcedure
final TableDescriptor htd = env.getMasterServices().getTableDescriptors().get(getTableName());
for (String family: regionFs.getFamilies()) {
- final ColumnFamilyDescriptor hcd = htd.getColumnFamily(family.getBytes());
+ final ColumnFamilyDescriptor hcd = htd.getColumnFamily(Bytes.toBytes(family));
final Collection<StoreFileInfo> storeFiles = regionFs.getStoreFiles(family);
if (storeFiles != null && storeFiles.size() > 0) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
index 1cf9a54..fa94495 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java
@@ -33,6 +33,7 @@ import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
+import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
@@ -864,7 +865,7 @@ public class RegionStates {
private final RegionStateNode regionNode;
private volatile Exception exception = null;
- private volatile int retries = 0;
+ private AtomicInteger retries = new AtomicInteger();
public RegionFailedOpen(final RegionStateNode regionNode) {
this.regionNode = regionNode;
@@ -879,11 +880,11 @@ public class RegionStates {
}
public int incrementAndGetRetries() {
- return ++this.retries;
+ return this.retries.incrementAndGet();
}
public int getRetries() {
- return retries;
+ return retries.get();
}
public void setException(final Exception exception) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
index b3fbc57..a72478c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java
@@ -86,7 +86,7 @@ public class FavoredStochasticBalancer extends StochasticLoadBalancer implements
}
@Override
- public void setMasterServices(MasterServices masterServices) {
+ public synchronized void setMasterServices(MasterServices masterServices) {
super.setMasterServices(masterServices);
fnm = masterServices.getFavoredNodesManager();
}
@@ -692,7 +692,8 @@ public class FavoredStochasticBalancer extends StochasticLoadBalancer implements
* implementation. For the misplaced regions, we assign a bogus server to it and AM takes care.
*/
@Override
- public List<RegionPlan> balanceCluster(Map<ServerName, List<RegionInfo>> clusterState) {
+ public synchronized List<RegionPlan> balanceCluster(Map<ServerName,
+ List<RegionInfo>> clusterState) {
if (this.services != null) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
index a9b1bb7..07e9600 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
@@ -72,21 +72,22 @@ class RegionLocationFinder {
private CacheLoader<RegionInfo, HDFSBlocksDistribution> loader =
new CacheLoader<RegionInfo, HDFSBlocksDistribution>() {
- public ListenableFuture<HDFSBlocksDistribution> reload(final RegionInfo hri,
- HDFSBlocksDistribution oldValue) throws Exception {
- return executor.submit(new Callable<HDFSBlocksDistribution>() {
- @Override
- public HDFSBlocksDistribution call() throws Exception {
- return internalGetTopBlockLocation(hri);
- }
- });
- }
-
+ @Override
+ public ListenableFuture<HDFSBlocksDistribution> reload(final RegionInfo hri,
+ HDFSBlocksDistribution oldValue) throws Exception {
+ return executor.submit(new Callable<HDFSBlocksDistribution>() {
@Override
- public HDFSBlocksDistribution load(RegionInfo key) throws Exception {
- return internalGetTopBlockLocation(key);
+ public HDFSBlocksDistribution call() throws Exception {
+ return internalGetTopBlockLocation(hri);
}
- };
+ });
+ }
+
+ @Override
+ public HDFSBlocksDistribution load(RegionInfo key) throws Exception {
+ return internalGetTopBlockLocation(key);
+ }
+ };
// The cache for where regions are located.
private LoadingCache<RegionInfo, HDFSBlocksDistribution> cache = null;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java
index f53683f..b0d3f19 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java
@@ -106,6 +106,7 @@ public class SimpleLoadBalancer extends BaseLoadBalancer {
}
+ @Override
public void setClusterLoad(Map<TableName, Map<ServerName, List<RegionInfo>>> clusterLoad){
serverLoadList = new ArrayList<>();
float sum = 0;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java
index 6b4f943..dca9cbb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java
@@ -349,8 +349,8 @@ public class StochasticLoadBalancer extends BaseLoadBalancer {
// Allow turning this feature off if the locality cost is not going to
// be used in any computations.
RegionLocationFinder finder = null;
- if (this.localityCost != null && this.localityCost.getMultiplier() > 0
- || this.rackLocalityCost != null && this.rackLocalityCost.getMultiplier() > 0) {
+ if ((this.localityCost != null && this.localityCost.getMultiplier() > 0)
+ || (this.rackLocalityCost != null && this.rackLocalityCost.getMultiplier() > 0)) {
finder = this.regionFinder;
}
@@ -1401,7 +1401,7 @@ public class StochasticLoadBalancer extends BaseLoadBalancer {
// Now if we found a region load get the type of cost that was requested.
if (regionLoadList != null) {
- cost += getRegionLoadCost(regionLoadList);
+ cost = (long) (cost + getRegionLoadCost(regionLoadList));
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
index 775d8f9..21f8251 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
@@ -205,7 +205,7 @@ public abstract class CleanerChore<T extends FileCleanerDelegate> extends Schedu
Class<? extends FileCleanerDelegate> c = Class.forName(className).asSubclass(
FileCleanerDelegate.class);
@SuppressWarnings("unchecked")
- T cleaner = (T) c.newInstance();
+ T cleaner = (T) c.getDeclaredConstructor().newInstance();
cleaner.setConf(conf);
cleaner.init(this.params);
return cleaner;
@@ -360,7 +360,7 @@ public abstract class CleanerChore<T extends FileCleanerDelegate> extends Schedu
}
@Override
- public void cleanup() {
+ public synchronized void cleanup() {
for (T lc : this.cleanersChain) {
try {
lc.stop("Exiting");
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java
index 640c8f7..08640a7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java
@@ -182,7 +182,7 @@ public class HFileCleaner extends CleanerChore<BaseHFileCleanerDelegate> {
}
@Override
- public void cleanup() {
+ public synchronized void cleanup() {
super.cleanup();
stopHFileDeleteThreads();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java
index db364ee..9beed58 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java
@@ -108,7 +108,7 @@ public class LogCleaner extends CleanerChore<BaseLogCleanerDelegate> {
}
@Override
- public void cleanup() {
+ public synchronized void cleanup() {
super.cleanup();
interruptOldWALsCleaner();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
index edf7642..b4c55f4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java
@@ -202,7 +202,7 @@ public final class LockProcedure extends Procedure<MasterProcedureEnv>
* @return false, so procedure framework doesn't mark this procedure as failure.
*/
@Override
- protected boolean setTimeoutFailure(final MasterProcedureEnv env) {
+ protected synchronized boolean setTimeoutFailure(final MasterProcedureEnv env) {
synchronized (event) {
if (LOG.isDebugEnabled()) LOG.debug("Timeout failure " + this.event);
if (!event.isReady()) { // Maybe unlock() awakened the event.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedurePrepareLatch.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedurePrepareLatch.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedurePrepareLatch.java
index 09d05e6..535f288 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedurePrepareLatch.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedurePrepareLatch.java
@@ -71,7 +71,9 @@ public abstract class ProcedurePrepareLatch {
}
private static class NoopLatch extends ProcedurePrepareLatch {
+ @Override
protected void countDown(final Procedure proc) {}
+ @Override
public void await() throws IOException {}
}
@@ -80,6 +82,7 @@ public abstract class ProcedurePrepareLatch {
private IOException exception = null;
+ @Override
protected void countDown(final Procedure proc) {
if (proc.hasException()) {
exception = proc.getException().unwrapRemoteIOException();
@@ -87,6 +90,7 @@ public abstract class ProcedurePrepareLatch {
latch.countDown();
}
+ @Override
public void await() throws IOException {
try {
latch.await();
[3/9] hbase git commit: HBASE-19811 Fix findbugs and error-prone
warnings in hbase-server (branch-2)
Posted by st...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
index a497bf4..59a0c31 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
@@ -71,7 +71,7 @@ public class TestRegionReplicas {
private static final int NB_SERVERS = 1;
private static Table table;
- private static final byte[] row = "TestRegionReplicas".getBytes();
+ private static final byte[] row = Bytes.toBytes("TestRegionReplicas");
private static HRegionInfo hriPrimary;
private static HRegionInfo hriSecondary;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java
index d1bf773..9a02a9d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java
@@ -111,7 +111,7 @@ public class TestRegionReplicasWithModifyTable {
List<HRegion> onlineRegions2 = getSecondaryRS().getRegions(tableName);
List<HRegion> onlineRegions3 = getTertiaryRS().getRegions(tableName);
int totalRegions = onlineRegions.size() + onlineRegions2.size() + onlineRegions3.size();
- assertEquals("the number of regions should be more than 1", totalRegions, 3);
+ assertEquals("the number of regions should be more than 1", 3, totalRegions);
} finally {
disableAndDeleteTable(tableName);
}
@@ -132,7 +132,7 @@ public class TestRegionReplicasWithModifyTable {
List<HRegion> onlineRegions2 = getSecondaryRS().getRegions(tableName);
List<HRegion> onlineRegions3 = getTertiaryRS().getRegions(tableName);
int totalRegions = onlineRegions.size() + onlineRegions2.size() + onlineRegions3.size();
- assertEquals("the number of regions should be equal to 30", totalRegions, 30);
+ assertEquals("the number of regions should be equal to 30", 30, totalRegions);
} finally {
disableAndDeleteTable(tableName);
}
@@ -148,7 +148,7 @@ public class TestRegionReplicasWithModifyTable {
List<HRegion> onlineRegions2 = getSecondaryRS().getRegions(tableName);
List<HRegion> onlineRegions3 = getTertiaryRS().getRegions(tableName);
int totalRegions = onlineRegions.size() + onlineRegions2.size() + onlineRegions3.size();
- assertEquals("the number of regions should be 3", totalRegions, 3);
+ assertEquals("the number of regions should be 3", 3, totalRegions);
} finally {
disableAndDeleteTable(tableName);
}
@@ -164,7 +164,7 @@ public class TestRegionReplicasWithModifyTable {
List<HRegion> onlineRegions2 = getSecondaryRS().getRegions(tableName);
List<HRegion> onlineRegions3 = getTertiaryRS().getRegions(tableName);
int totalRegions = onlineRegions.size() + onlineRegions2.size() + onlineRegions3.size();
- assertEquals("the number of regions should be reduced to 2", totalRegions, 2);
+ assertEquals("the number of regions should be reduced to 2", 2, totalRegions);
} finally {
disableAndDeleteTable(tableName);
}
@@ -181,7 +181,7 @@ public class TestRegionReplicasWithModifyTable {
List<HRegion> onlineRegions2 = getSecondaryRS().getRegions(tableName);
List<HRegion> onlineRegions3 = getTertiaryRS().getRegions(tableName);
int totalRegions = onlineRegions.size() + onlineRegions2.size() + onlineRegions3.size();
- assertEquals("the number of regions should be reduced to 40", totalRegions, 40);
+ assertEquals("the number of regions should be reduced to 40", 40, totalRegions);
} finally {
disableAndDeleteTable(tableName);
}
@@ -198,7 +198,7 @@ public class TestRegionReplicasWithModifyTable {
List<HRegion> onlineRegions2 = getSecondaryRS().getRegions(tableName);
List<HRegion> onlineRegions3 = getTertiaryRS().getRegions(tableName);
int totalRegions = onlineRegions.size() + onlineRegions2.size() + onlineRegions3.size();
- assertEquals("the number of regions should be equal to 45", totalRegions, 3 * 15);
+ assertEquals("the number of regions should be equal to 45", 3 * 15, totalRegions);
} finally {
disableAndDeleteTable(tableName);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAccounting.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAccounting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAccounting.java
index 0122674..6172d61 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAccounting.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAccounting.java
@@ -37,7 +37,7 @@ public class TestRegionServerAccounting {
// try for default cases
RegionServerAccounting regionServerAccounting = new RegionServerAccounting(conf);
MemStoreSize memstoreSize =
- new MemStoreSize((long) (3l * 1024l * 1024l * 1024l), (long) (1l * 1024l * 1024l * 1024l));
+ new MemStoreSize(3L * 1024 * 1024 * 1024, 1L * 1024 * 1024 * 1024);
regionServerAccounting.incGlobalMemStoreSize(memstoreSize);
assertEquals(FlushType.ABOVE_ONHEAP_HIGHER_MARK,
regionServerAccounting.isAboveHighWaterMark());
@@ -50,7 +50,7 @@ public class TestRegionServerAccounting {
// try for default cases
RegionServerAccounting regionServerAccounting = new RegionServerAccounting(conf);
MemStoreSize memstoreSize =
- new MemStoreSize((long) (3l * 1024l * 1024l * 1024l), (long) (1l * 1024l * 1024l * 1024l));
+ new MemStoreSize(3L * 1024 * 1024 * 1024, 1L * 1024 * 1024 * 1024);
regionServerAccounting.incGlobalMemStoreSize(memstoreSize);
assertEquals(FlushType.ABOVE_ONHEAP_LOWER_MARK,
regionServerAccounting.isAboveLowWaterMark());
@@ -60,12 +60,12 @@ public class TestRegionServerAccounting {
public void testOffheapMemstoreHigherWaterMarkLimitsDueToDataSize() {
Configuration conf = HBaseConfiguration.create();
// setting 1G as offheap data size
- conf.setLong(MemorySizeUtil.OFFHEAP_MEMSTORE_SIZE_KEY, (1l * 1024l));
+ conf.setLong(MemorySizeUtil.OFFHEAP_MEMSTORE_SIZE_KEY, (1L * 1024));
// try for default cases
RegionServerAccounting regionServerAccounting = new RegionServerAccounting(conf);
// this will breach offheap limit as data size is higher and not due to heap size
MemStoreSize memstoreSize =
- new MemStoreSize((long) (3l * 1024l * 1024l * 1024l), (long) (1l * 1024l * 1024l * 1024l));
+ new MemStoreSize(3L * 1024 * 1024 * 1024, 1L * 1024 * 1024 * 1024);
regionServerAccounting.incGlobalMemStoreSize(memstoreSize);
assertEquals(FlushType.ABOVE_OFFHEAP_HIGHER_MARK,
regionServerAccounting.isAboveHighWaterMark());
@@ -76,12 +76,12 @@ public class TestRegionServerAccounting {
Configuration conf = HBaseConfiguration.create();
conf.setFloat(MemorySizeUtil.MEMSTORE_SIZE_KEY, 0.2f);
// setting 1G as offheap data size
- conf.setLong(MemorySizeUtil.OFFHEAP_MEMSTORE_SIZE_KEY, (1l * 1024l));
+ conf.setLong(MemorySizeUtil.OFFHEAP_MEMSTORE_SIZE_KEY, (1L * 1024));
// try for default cases
RegionServerAccounting regionServerAccounting = new RegionServerAccounting(conf);
// this will breach higher limit as heap size is higher and not due to offheap size
MemStoreSize memstoreSize =
- new MemStoreSize((long) (3l * 1024l * 1024l), (long) (2l * 1024l * 1024l * 1024l));
+ new MemStoreSize(3L * 1024 * 1024, 2L * 1024 * 1024 * 1024);
regionServerAccounting.incGlobalMemStoreSize(memstoreSize);
assertEquals(FlushType.ABOVE_ONHEAP_HIGHER_MARK,
regionServerAccounting.isAboveHighWaterMark());
@@ -91,12 +91,12 @@ public class TestRegionServerAccounting {
public void testOffheapMemstoreLowerWaterMarkLimitsDueToDataSize() {
Configuration conf = HBaseConfiguration.create();
// setting 1G as offheap data size
- conf.setLong(MemorySizeUtil.OFFHEAP_MEMSTORE_SIZE_KEY, (1l * 1024l));
+ conf.setLong(MemorySizeUtil.OFFHEAP_MEMSTORE_SIZE_KEY, (1L * 1024));
// try for default cases
RegionServerAccounting regionServerAccounting = new RegionServerAccounting(conf);
// this will breach offheap limit as data size is higher and not due to heap size
MemStoreSize memstoreSize =
- new MemStoreSize((long) (3l * 1024l * 1024l * 1024l), (long) (1l * 1024l * 1024l * 1024l));
+ new MemStoreSize(3L * 1024 * 1024 * 1024, 1L * 1024 * 1024 * 1024);
regionServerAccounting.incGlobalMemStoreSize(memstoreSize);
assertEquals(FlushType.ABOVE_OFFHEAP_LOWER_MARK,
regionServerAccounting.isAboveLowWaterMark());
@@ -107,12 +107,12 @@ public class TestRegionServerAccounting {
Configuration conf = HBaseConfiguration.create();
conf.setFloat(MemorySizeUtil.MEMSTORE_SIZE_KEY, 0.2f);
// setting 1G as offheap data size
- conf.setLong(MemorySizeUtil.OFFHEAP_MEMSTORE_SIZE_KEY, (1l * 1024l));
+ conf.setLong(MemorySizeUtil.OFFHEAP_MEMSTORE_SIZE_KEY, (1L * 1024));
// try for default cases
RegionServerAccounting regionServerAccounting = new RegionServerAccounting(conf);
// this will breach higher limit as heap size is higher and not due to offheap size
MemStoreSize memstoreSize =
- new MemStoreSize((long) (3l * 1024l * 1024l), (long) (2l * 1024l * 1024l * 1024l));
+ new MemStoreSize(3L * 1024 * 1024, 2L * 1024 * 1024 * 1024);
regionServerAccounting.incGlobalMemStoreSize(memstoreSize);
assertEquals(FlushType.ABOVE_ONHEAP_LOWER_MARK,
regionServerAccounting.isAboveLowWaterMark());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
index ea27ee5..b63b844 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
@@ -32,13 +32,10 @@ import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.master.HMaster;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
@@ -51,6 +48,11 @@ import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
+
/**
* Tests on the region server, without the master.
@@ -61,7 +63,7 @@ public class TestRegionServerNoMaster {
private static final Logger LOG = LoggerFactory.getLogger(TestRegionServerNoMaster.class);
private static final int NB_SERVERS = 1;
private static Table table;
- private static final byte[] row = "ee".getBytes();
+ private static final byte[] row = Bytes.toBytes("ee");
private static HRegionInfo hri;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
index c6dce67..afbb48d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
@@ -117,7 +117,8 @@ public class TestScanner {
// Increment the least significant character so we get to next row.
secondRowBytes[START_KEY_BYTES.length - 1]++;
thirdRowBytes = START_KEY_BYTES.clone();
- thirdRowBytes[START_KEY_BYTES.length - 1] += 2;
+ thirdRowBytes[START_KEY_BYTES.length - 1] =
+ (byte) (thirdRowBytes[START_KEY_BYTES.length - 1] + 2);
col1 = Bytes.toBytes("column1");
}
@@ -589,6 +590,7 @@ public class TestScanner {
if (flushIndex == count) {
LOG.info("Starting flush at flush index " + flushIndex);
Thread t = new Thread() {
+ @Override
public void run() {
try {
region.flush(true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
index 0c014fd..63ea993 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java
@@ -233,6 +233,7 @@ public class TestScannerWithBulkload {
// Create a scanner and then do bulk load
final CountDownLatch latch = new CountDownLatch(1);
new Thread() {
+ @Override
public void run() {
try {
Put put1 = new Put(Bytes.toBytes("row5"));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java
index 613282f..8519c3c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java
@@ -76,7 +76,7 @@ public class TestSplitLogWorker {
private SplitLogWorker slw;
private ExecutorService executorService;
- class DummyServer implements Server {
+ static class DummyServer implements Server {
private ZKWatcher zkw;
private Configuration conf;
private CoordinatedStateManager cm;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
index ab2d4b4..c80fc2e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
@@ -673,7 +673,7 @@ public class TestSplitTransactionOnCluster {
FileSystem fs = TESTING_UTIL.getDFSCluster().getFileSystem();
Map<String, Path> storefiles =
FSUtils.getTableStoreFilePathMap(null, fs, rootDir, tableName);
- assertEquals("Expected nothing but found " + storefiles.toString(), storefiles.size(), 0);
+ assertEquals("Expected nothing but found " + storefiles.toString(), 0, storefiles.size());
// find a splittable region. Refresh the regions list
regions = cluster.getRegions(tableName);
@@ -696,8 +696,8 @@ public class TestSplitTransactionOnCluster {
HBaseFsck.debugLsr(conf, new Path("/"));
Map<String, Path> storefilesAfter =
FSUtils.getTableStoreFilePathMap(null, fs, rootDir, tableName);
- assertEquals("Expected nothing but found " + storefilesAfter.toString(),
- storefilesAfter.size(), 0);
+ assertEquals("Expected nothing but found " + storefilesAfter.toString(), 0,
+ storefilesAfter.size());
hri = region.getRegionInfo(); // split parent
AssignmentManager am = cluster.getMaster().getAssignmentManager();
@@ -755,7 +755,7 @@ public class TestSplitTransactionOnCluster {
region.flush(true);
HStore store = region.getStore(Bytes.toBytes("f"));
Collection<HStoreFile> storefiles = store.getStorefiles();
- assertEquals(storefiles.size(), 1);
+ assertEquals(1, storefiles.size());
assertFalse(region.hasReferences());
Path referencePath =
region.getRegionFileSystem().splitStoreFile(region.getRegionInfo(), "f",
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
index 1f5db50..2c679d8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
@@ -164,6 +164,7 @@ public class TestStoreScanner {
new KeyValueScanner[] { new KeyValueScanFixture(CellComparator.getInstance(), CELL_GRID) }));
}
+ @Override
protected void resetKVHeap(List<? extends KeyValueScanner> scanners,
CellComparator comparator) throws IOException {
if (count == null) {
@@ -172,6 +173,7 @@ public class TestStoreScanner {
heap = new KeyValueHeapWithCount(scanners, comparator, count);
}
+ @Override
protected boolean trySkipToNextRow(Cell cell) throws IOException {
boolean optimized = super.trySkipToNextRow(cell);
LOG.info("Cell=" + cell + ", nextIndex=" + CellUtil.toString(getNextIndexedKey(), false)
@@ -182,6 +184,7 @@ public class TestStoreScanner {
return optimized;
}
+ @Override
protected boolean trySkipToNextColumn(Cell cell) throws IOException {
boolean optimized = super.trySkipToNextColumn(cell);
LOG.info("Cell=" + cell + ", nextIndex=" + CellUtil.toString(getNextIndexedKey(), false)
@@ -227,6 +230,7 @@ public class TestStoreScanner {
new KeyValueScanFixture(CellComparator.getInstance(), CELL_WITH_VERSIONS) }));
}
+ @Override
protected boolean trySkipToNextColumn(Cell cell) throws IOException {
boolean optimized = super.trySkipToNextColumn(cell);
LOG.info("Cell=" + cell + ", nextIndex=" + CellUtil.toString(getNextIndexedKey(), false)
@@ -255,6 +259,7 @@ public class TestStoreScanner {
new KeyValueScanFixture(CellComparator.getInstance(), CELL_WITH_VERSIONS) }));
}
+ @Override
protected boolean trySkipToNextColumn(Cell cell) throws IOException {
boolean optimized = super.trySkipToNextColumn(cell);
LOG.info("Cell=" + cell + ", nextIndex=" + CellUtil.toString(getNextIndexedKey(), false)
@@ -884,6 +889,7 @@ public class TestStoreScanner {
try {
final long now = System.currentTimeMillis();
EnvironmentEdgeManagerTestHelper.injectEdge(new EnvironmentEdge() {
+ @Override
public long currentTime() {
return now;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSyncTimeRangeTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSyncTimeRangeTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSyncTimeRangeTracker.java
index 52c31d9..273535b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSyncTimeRangeTracker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSyncTimeRangeTracker.java
@@ -32,10 +32,12 @@ public class TestSyncTimeRangeTracker extends TestSimpleTimeRangeTracker {
private static final int NUM_KEYS = 10000000;
private static final int NUM_OF_THREADS = 20;
+ @Override
protected TimeRangeTracker getTimeRangeTracker() {
return TimeRangeTracker.create(TimeRangeTracker.Type.SYNC);
}
+ @Override
protected TimeRangeTracker getTimeRangeTracker(long min, long max) {
return TimeRangeTracker.create(TimeRangeTracker.Type.SYNC, min, max);
}
@@ -77,7 +79,7 @@ public class TestSyncTimeRangeTracker extends TestSimpleTimeRangeTracker {
assertTrue(trr.getMin() == 0);
}
- class RandomTestData {
+ static class RandomTestData {
private long[] keys = new long[NUM_KEYS];
private long min = Long.MAX_VALUE;
private long max = 0;
@@ -107,7 +109,7 @@ public class TestSyncTimeRangeTracker extends TestSimpleTimeRangeTracker {
}
}
- class TrtUpdateRunnable implements Runnable {
+ static class TrtUpdateRunnable implements Runnable {
private TimeRangeTracker trt;
private RandomTestData data;
@@ -116,6 +118,7 @@ public class TestSyncTimeRangeTracker extends TestSimpleTimeRangeTracker {
this.data = data;
}
+ @Override
public void run() {
for (long key : data.keys) {
trt.includeTimestamp(key);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
index 62d22d2..70bdc49 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java
@@ -258,6 +258,7 @@ public class TestWALLockup {
// in HBASE-14317. Flush hangs trying to get sequenceid because the ringbuffer is held up
// by the zigzaglatch waiting on syncs to come home.
Thread t = new Thread ("Flusher") {
+ @Override
public void run() {
try {
if (region.getMemStoreSize() <= 0) {
@@ -444,6 +445,7 @@ public class TestWALLockup {
dodgyWAL2.append(region.getRegionInfo(), key, edit, true);
Thread t = new Thread("Sync") {
+ @Override
public void run() {
try {
dodgyWAL2.sync();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java
index f3bd7ee..290f71a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java
@@ -581,7 +581,7 @@ public class TestWalAndCompactingMemStoreFlush {
// The total memstores size should be empty
assertEquals(0, totalMemstoreSizePhaseV);
// Because there is nothing in any memstore the WAL's LSN should be -1
- assertEquals(smallestSeqInRegionCurrentMemstorePhaseV, HConstants.NO_SEQNUM);
+ assertEquals(HConstants.NO_SEQNUM, smallestSeqInRegionCurrentMemstorePhaseV);
// What happens when we hit the memstore limit, but we are not able to find
// any Column Family above the threshold?
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
index 7cdd24d..13c7a6b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
@@ -22,6 +22,7 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@@ -110,7 +111,8 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
final Class<? extends StoreFileListGenerator> fileGenClass,
final int inMmax,
final int inMin,
- final float inRatio) throws IllegalAccessException, InstantiationException {
+ final float inRatio) throws IllegalAccessException, InstantiationException,
+ NoSuchMethodException, InvocationTargetException {
super(PerfTestCompactionPolicies.class);
this.fileGenClass = fileGenClass;
this.max = inMmax;
@@ -138,7 +140,7 @@ public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
new Class[] {Configuration.class, StoreConfigInformation.class },
new Object[] {configuration, store });
- this.generator = fileGenClass.newInstance();
+ this.generator = fileGenClass.getDeclaredConstructor().newInstance();
// Used for making paths
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
index 932664b..f8df870 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java
@@ -92,11 +92,13 @@ public class TestCompactor {
writers.add(realWriter);
StoreFileWriter writer = mock(StoreFileWriter.class);
doAnswer(new Answer<Object>() {
+ @Override
public Object answer(InvocationOnMock invocation) {
return realWriter.kvs.add((KeyValue) invocation.getArgument(0));
}
}).when(writer).append(any());
doAnswer(new Answer<Object>() {
+ @Override
public Object answer(InvocationOnMock invocation) {
Object[] args = invocation.getArguments();
return realWriter.data.put((byte[]) args[0], (byte[]) args[1]);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
index 3ae49c0..b43a89d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java
@@ -242,7 +242,7 @@ public class TestUserScanQueryMatcher extends AbstractTestScanQueryMatcher {
}
}
- private class AlwaysIncludeAndSeekNextRowFilter extends FilterBase {
+ private static class AlwaysIncludeAndSeekNextRowFilter extends FilterBase {
@Override
public ReturnCode filterKeyValue(final Cell c) throws IOException {
@@ -287,7 +287,7 @@ public class TestUserScanQueryMatcher extends AbstractTestScanQueryMatcher {
}
}
- private class AlwaysIncludeFilter extends FilterBase {
+ private static class AlwaysIncludeFilter extends FilterBase {
@Override
public ReturnCode filterKeyValue(final Cell c) throws IOException {
return ReturnCode.INCLUDE;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
index 093a512..009cca0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
@@ -289,7 +289,7 @@ public abstract class AbstractTestFSWAL {
addEdits(wal, hri2, t2, 2, mvcc, scopes2);
// get the regions to flush, it should still read region1.
regionsToFlush = wal.findRegionsToForceFlush();
- assertEquals(regionsToFlush.length, 1);
+ assertEquals(1, regionsToFlush.length);
assertEquals(hri1.getEncodedNameAsBytes(), regionsToFlush[0]);
// flush region 1, and roll the wal file. Only last wal which has entries for region1 should
// remain.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
index ededcf3..6b55adc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java
@@ -705,9 +705,8 @@ public abstract class AbstractTestWALReplay {
try {
region.flush(true);
fail("Injected exception hasn't been thrown");
- } catch (Throwable t) {
- LOG.info("Expected simulated exception when flushing region,"
- + t.getMessage());
+ } catch (IOException e) {
+ LOG.info("Expected simulated exception when flushing region, {}", e.getMessage());
// simulated to abort server
Mockito.doReturn(true).when(rsServices).isAborted();
region.setClosing(false); // region normally does not accept writes after
@@ -928,8 +927,7 @@ public abstract class AbstractTestWALReplay {
* testcase for https://issues.apache.org/jira/browse/HBASE-15252
*/
@Test
- public void testDatalossWhenInputError() throws IOException, InstantiationException,
- IllegalAccessException {
+ public void testDatalossWhenInputError() throws Exception {
final TableName tableName = TableName.valueOf("testDatalossWhenInputError");
final HRegionInfo hri = createBasic3FamilyHRegionInfo(tableName);
final Path basedir = FSUtils.getTableDir(this.hbaseRootDir, tableName);
@@ -964,7 +962,7 @@ public abstract class AbstractTestWALReplay {
Class<? extends AbstractFSWALProvider.Reader> logReaderClass =
conf.getClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class,
AbstractFSWALProvider.Reader.class);
- AbstractFSWALProvider.Reader reader = logReaderClass.newInstance();
+ AbstractFSWALProvider.Reader reader = logReaderClass.getDeclaredConstructor().newInstance();
reader.init(this.fs, editFile, conf, stream);
final long headerLength = stream.getPos();
reader.close();
@@ -1108,7 +1106,7 @@ public abstract class AbstractTestWALReplay {
// Flusher used in this test. Keep count of how often we are called and
// actually run the flush inside here.
- class TestFlusher implements FlushRequester {
+ static class TestFlusher implements FlushRequester {
private HRegion r;
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/InstrumentedLogWriter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/InstrumentedLogWriter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/InstrumentedLogWriter.java
index 2aebf2b..5a61370 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/InstrumentedLogWriter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/InstrumentedLogWriter.java
@@ -39,7 +39,7 @@ public class InstrumentedLogWriter extends ProtobufLogWriter {
public void append(Entry entry) throws IOException {
super.append(entry);
if (activateFailure &&
- Bytes.equals(entry.getKey().getEncodedRegionName(), "break".getBytes())) {
+ Bytes.equals(entry.getKey().getEncodedRegionName(), Bytes.toBytes("break"))) {
System.out.println(getClass().getName() + ": I will throw an exception now...");
throw(new IOException("This exception is instrumented and should only be thrown for testing"
));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java
index ed71123..0f5fda0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java
@@ -220,8 +220,8 @@ public class TestNamespaceReplication extends TestReplicationBase {
if (res.isEmpty()) {
LOG.info("Row not available");
} else {
- assertEquals(res.size(), 1);
- assertArrayEquals(res.value(), val);
+ assertEquals(1, res.size());
+ assertArrayEquals(val, res.value());
break;
}
Thread.sleep(SLEEP_TIME);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
index bacda63..98b3fda 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
@@ -582,8 +582,8 @@ public class TestPerTableCFReplication {
replicatedToAll = false;
break;
} else {
- assertEquals(res.size(), 1);
- assertArrayEquals(res.value(), val);
+ assertEquals(1, res.size());
+ assertArrayEquals(val, res.value());
}
}
if (replicatedToAll) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
index afb975d..3a7a575 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
@@ -270,7 +270,7 @@ public class TestReplicationBase {
LOG.info("Row not available");
Thread.sleep(SLEEP_TIME);
} else {
- assertArrayEquals(res.value(), row);
+ assertArrayEquals(row, res.value());
break;
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
index 1675496..7b9dea4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
@@ -84,7 +84,7 @@ public class TestReplicationDisableInactivePeer extends TestReplicationBase {
LOG.info("Row not available");
Thread.sleep(SLEEP_TIME * NB_RETRIES);
} else {
- assertArrayEquals(res.value(), row);
+ assertArrayEquals(row, res.value());
return;
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
index 30cd860..2837045 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
@@ -111,6 +111,7 @@ public class TestReplicationKillRS extends TestReplicationBase {
private static Thread killARegionServer(final HBaseTestingUtility utility,
final long timeout, final int rs) {
Thread killer = new Thread() {
+ @Override
public void run() {
try {
Thread.sleep(timeout);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
index cb47827..f46a7b1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
@@ -209,7 +209,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
LOG.info("Row not available");
Thread.sleep(SLEEP_TIME);
} else {
- assertArrayEquals(res.value(), row);
+ assertArrayEquals(row, res.value());
return;
}
}
@@ -262,7 +262,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
LOG.info("Row not available");
Thread.sleep(SLEEP_TIME * i);
} else {
- assertArrayEquals(res.value(), row);
+ assertArrayEquals(row, res.value());
break;
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
index 0a602ad..b473277 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
@@ -277,14 +277,14 @@ public class TestReplicationSyncUpTool extends TestReplicationBase {
// delete half of the rows
for (int i = 0; i < NB_ROWS_IN_BATCH / 2; i++) {
String rowKey = "row" + i;
- Delete del = new Delete(rowKey.getBytes());
+ Delete del = new Delete(Bytes.toBytes(rowKey));
list.add(del);
}
ht1Source.delete(list);
for (int i = 0; i < NB_ROWS_IN_BATCH; i++) {
String rowKey = "row" + i;
- Delete del = new Delete(rowKey.getBytes());
+ Delete del = new Delete(Bytes.toBytes(rowKey));
list.add(del);
}
ht2Source.delete(list);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
index 98f11f7..b2ecb67 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
@@ -187,7 +187,7 @@ public class TestReplicationWithTags {
LOG.info("Row not available");
Thread.sleep(SLEEP_TIME);
} else {
- assertArrayEquals(res.value(), ROW);
+ assertArrayEquals(ROW, res.value());
assertEquals(1, TestCoprocessorForTagsAtSink.tags.size());
Tag tag = TestCoprocessorForTagsAtSink.tags.get(0);
assertEquals(TAG_TYPE, tag.getType());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java
index df84515..e69d84c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java
@@ -143,8 +143,8 @@ public class TestRegionReplicaReplicationEndpoint {
assertNotNull(peerConfig);
assertEquals(peerConfig.getClusterKey(), ZKConfig.getZooKeeperClusterKey(
HTU.getConfiguration()));
- assertEquals(peerConfig.getReplicationEndpointImpl(),
- RegionReplicaReplicationEndpoint.class.getName());
+ assertEquals(RegionReplicaReplicationEndpoint.class.getName(),
+ peerConfig.getReplicationEndpointImpl());
admin.close();
}
@@ -190,8 +190,8 @@ public class TestRegionReplicaReplicationEndpoint {
assertNotNull(peerConfig);
assertEquals(peerConfig.getClusterKey(), ZKConfig.getZooKeeperClusterKey(
HTU.getConfiguration()));
- assertEquals(peerConfig.getReplicationEndpointImpl(),
- RegionReplicaReplicationEndpoint.class.getName());
+ assertEquals(RegionReplicaReplicationEndpoint.class.getName(),
+ peerConfig.getReplicationEndpointImpl());
admin.close();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java
index d7044dc..375e64e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java
@@ -31,27 +31,61 @@ import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.ipc.RpcServer;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.*;
-import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
-import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.replication.TestReplicationBase;
-import org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL.Entry;
-
import org.junit.AfterClass;
import org.junit.BeforeClass;
-import org.junit.Test;
import org.junit.Ignore;
+import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
+import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearRegionBlockCacheResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse;
+
@Category(MediumTests.class)
@Ignore("Flaky, needs to be rewritten, see HBASE-19125")
@@ -88,7 +122,7 @@ public class TestReplicator extends TestReplicationBase {
// have to be replicated separately.
final byte[] valueBytes = new byte[8 *1024];
for (int i = 0; i < NUM_ROWS; i++) {
- htable1.put(new Put(("row"+Integer.toString(i)).getBytes())
+ htable1.put(new Put(Bytes.toBytes("row"+Integer.toString(i)))
.addColumn(famName, null, valueBytes)
);
}
@@ -140,7 +174,7 @@ public class TestReplicator extends TestReplicationBase {
// have to be replicated separately.
final byte[] valueBytes = new byte[8 *1024];
for (int i = 0; i < NUM_ROWS; i++) {
- htable1.put(new Put(("row"+Integer.toString(i)).getBytes())
+ htable1.put(new Put(Bytes.toBytes("row"+Integer.toString(i)))
.addColumn(famName, null, valueBytes)
);
}
@@ -409,7 +443,7 @@ public class TestReplicator extends TestReplicationBase {
@Override
public ClearRegionBlockCacheResponse clearRegionBlockCache(RpcController controller,
- ClearRegionBlockCacheRequest request) throws ServiceException {
+ ClearRegionBlockCacheRequest request) throws ServiceException {
return delegate.clearRegionBlockCache(controller, request);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntryStream.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntryStream.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntryStream.java
index 43140bc..44c76f8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntryStream.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntryStream.java
@@ -363,7 +363,7 @@ public class TestWALEntryStream {
appendToLog("foo");
entryBatch = batcher.take();
assertEquals(1, entryBatch.getNbEntries());
- assertEquals(getRow(entryBatch.getWalEntries().get(0)), "foo");
+ assertEquals("foo", getRow(entryBatch.getWalEntries().get(0)));
}
private String getRow(WAL.Entry entry) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java
index 2414e5a..1399b21 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java
@@ -153,7 +153,7 @@ public class TestSecureIPC {
UserGroupInformation ugi2 = UserGroupInformation.getCurrentUser();
// check that the login user is okay:
- assertSame(ugi, ugi2);
+ assertSame(ugi2, ugi);
assertEquals(AuthenticationMethod.KERBEROS, ugi.getAuthenticationMethod());
assertEquals(krbPrincipal, ugi.getUserName());
@@ -280,6 +280,7 @@ public class TestSecureIPC {
final Throwable exception[] = new Throwable[1];
Collections.synchronizedList(new ArrayList<Throwable>());
Thread.UncaughtExceptionHandler exceptionHandler = new Thread.UncaughtExceptionHandler() {
+ @Override
public void uncaughtException(Thread th, Throwable ex) {
exception[0] = ex;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
index bfc82db..d5eed3c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
@@ -120,6 +120,7 @@ public class TestUser {
Configuration conf = HBaseConfiguration.create();
final User user = User.createUserForTesting(conf, "testuser", new String[]{"foo"});
final PrivilegedExceptionAction<String> action = new PrivilegedExceptionAction<String>(){
+ @Override
public String run() throws IOException {
User u = User.getCurrent();
return u.getName();
@@ -138,6 +139,7 @@ public class TestUser {
// check the exception version
username = user.runAs(new PrivilegedExceptionAction<String>(){
+ @Override
public String run() throws Exception {
return User.getCurrent().getName();
}
@@ -146,6 +148,7 @@ public class TestUser {
// verify that nested contexts work
user2.runAs(new PrivilegedExceptionAction<Object>(){
+ @Override
public Object run() throws IOException, InterruptedException{
String nestedName = user.runAs(action);
assertEquals("Nest name should match nested user", "testuser", nestedName);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
index 06389ab..8a1af20 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
@@ -123,6 +123,7 @@ public class TestAccessControlFilter extends SecureTestUtil {
// test read
READER.runAs(new PrivilegedExceptionAction<Object>() {
+ @Override
public Object run() throws Exception {
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
// force a new RS connection
@@ -151,6 +152,7 @@ public class TestAccessControlFilter extends SecureTestUtil {
// test read with qualifier filter
LIMITED.runAs(new PrivilegedExceptionAction<Object>() {
+ @Override
public Object run() throws Exception {
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
// force a new RS connection
@@ -178,6 +180,7 @@ public class TestAccessControlFilter extends SecureTestUtil {
// test as user with no permission
DENIED.runAs(new PrivilegedExceptionAction<Object>(){
+ @Override
public Object run() throws Exception {
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
// force a new RS connection
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index be1b0e4..14e94be 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -306,12 +306,15 @@ public class TestAccessController extends SecureTestUtil {
grantGlobal(TEST_UTIL, toGroupEntry(GROUP_WRITE), Permission.Action.WRITE);
assertEquals(5, AccessControlLists.getTablePermissions(conf, TEST_TABLE).size());
+ int size = 0;
try {
- assertEquals(5, AccessControlClient.getUserPermissions(systemUserConnection,
- TEST_TABLE.toString()).size());
+ size = AccessControlClient.getUserPermissions(systemUserConnection, TEST_TABLE.toString())
+ .size();
} catch (Throwable e) {
LOG.error("error during call of AccessControlClient.getUserPermissions. ", e);
+ fail("error during call of AccessControlClient.getUserPermissions.");
}
+ assertEquals(5, size);
}
private static void cleanUp() throws Exception {
@@ -992,7 +995,7 @@ public class TestAccessController extends SecureTestUtil {
}
}
- public class BulkLoadHelper {
+ public static class BulkLoadHelper {
private final FileSystem fs;
private final Path loadPath;
private final Configuration conf;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java
index 1e5ea53..252de3a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java
@@ -56,11 +56,13 @@ public class TestZKSecretWatcher {
private static class MockAbortable implements Abortable {
private boolean abort;
+ @Override
public void abort(String reason, Throwable e) {
LOG.info("Aborting: "+reason, e);
abort = true;
}
+ @Override
public boolean isAborted() {
return abort;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java
index 391a844..9a249c3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java
@@ -46,11 +46,13 @@ public class TestZKSecretWatcherRefreshKeys {
private static class MockAbortable implements Abortable {
private boolean abort;
+ @Override
public void abort(String reason, Throwable e) {
LOG.info("Aborting: "+reason, e);
abort = true;
}
+ @Override
public boolean isAborted() {
return abort;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java
index 9da2531..6dacf02 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java
@@ -85,6 +85,7 @@ public class TestDefaultScanLabelGeneratorStack {
// Set up for the test
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
VisibilityClient.addLabels(conn, new String[] { SECRET, CONFIDENTIAL });
@@ -102,6 +103,7 @@ public class TestDefaultScanLabelGeneratorStack {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = TEST_UTIL.createTable(tableName, CF)) {
@@ -123,15 +125,13 @@ public class TestDefaultScanLabelGeneratorStack {
// Test that super user can see all the cells.
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
- Scan s = new Scan();
- ResultScanner scanner = table.getScanner(s);
- Result[] next = scanner.next(1);
+ Result[] next = getResult(table, new Scan());
// Test that super user can see all the cells.
- assertTrue(next.length == 1);
CellScanner cellScanner = next[0].cellScanner();
cellScanner.advance();
Cell current = cellScanner.current();
@@ -164,15 +164,12 @@ public class TestDefaultScanLabelGeneratorStack {
});
TESTUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
// Test scan with no auth attribute
- Scan s = new Scan();
- ResultScanner scanner = table.getScanner(s);
- Result[] next = scanner.next(1);
-
- assertTrue(next.length == 1);
+ Result[] next = getResult(table, new Scan());
CellScanner cellScanner = next[0].cellScanner();
cellScanner.advance();
Cell current = cellScanner.current();
@@ -248,6 +245,13 @@ public class TestDefaultScanLabelGeneratorStack {
});
}
+
+ private static Result [] getResult(Table table, Scan scan) throws IOException {
+ ResultScanner scanner = table.getScanner(scan);
+ Result[] next = scanner.next(1);
+ assertTrue(next.length == 1);
+ return next;
+ }
@AfterClass
public static void tearDownAfterClass() throws Exception {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java
index a0703fc..c7075e3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java
@@ -82,6 +82,7 @@ public class TestEnforcingScanLabelGenerator {
// Set up for the test
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
VisibilityClient.addLabels(conn, new String[] { SECRET, CONFIDENTIAL });
@@ -99,6 +100,7 @@ public class TestEnforcingScanLabelGenerator {
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = TEST_UTIL.createTable(tableName, CF)) {
@@ -120,6 +122,7 @@ public class TestEnforcingScanLabelGenerator {
// Test that super user can see all the cells.
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
@@ -135,6 +138,7 @@ public class TestEnforcingScanLabelGenerator {
});
TESTUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
index 521cafe..76bba48 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java
@@ -160,6 +160,7 @@ public class TestVisibilityLabelReplicationWithExpAsString extends TestVisibilit
InterruptedException {
PrivilegedExceptionAction<Void> scanAction = new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf1);
Table table2 = connection.getTable(TABLE_NAME)) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
index ba93d19..932f63e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
@@ -302,6 +302,7 @@ public abstract class TestVisibilityLabels {
TEST_UTIL.getHBaseCluster().startRegionServer();
}
Thread t1 = new Thread() {
+ @Override
public void run() {
List<RegionServerThread> regionServerThreads = TEST_UTIL.getHBaseCluster()
.getRegionServerThreads();
@@ -320,6 +321,7 @@ public abstract class TestVisibilityLabels {
t1.start();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
Thread t = new Thread() {
+ @Override
public void run() {
try {
while (!killedRS) {
@@ -415,6 +417,7 @@ public abstract class TestVisibilityLabels {
public void testSetAndGetUserAuths() throws Throwable {
final String user = "user1";
PrivilegedExceptionAction<Void> action = new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
String[] auths = { SECRET, CONFIDENTIAL };
try (Connection conn = ConnectionFactory.createConnection(conf)) {
@@ -441,6 +444,7 @@ public abstract class TestVisibilityLabels {
}
action = new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
GetAuthsResponse authsResponse = null;
try (Connection conn = ConnectionFactory.createConnection(conf)) {
@@ -462,6 +466,7 @@ public abstract class TestVisibilityLabels {
// Try doing setAuths once again and there should not be any duplicates
action = new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
String[] auths1 = { SECRET, CONFIDENTIAL };
GetAuthsResponse authsResponse = null;
@@ -491,7 +496,7 @@ public abstract class TestVisibilityLabels {
List<String> auths = new ArrayList<>();
for (Result result : results) {
Cell labelCell = result.getColumnLatestCell(LABELS_TABLE_FAMILY, LABEL_QUALIFIER);
- Cell userAuthCell = result.getColumnLatestCell(LABELS_TABLE_FAMILY, user.getBytes());
+ Cell userAuthCell = result.getColumnLatestCell(LABELS_TABLE_FAMILY, Bytes.toBytes(user));
if (userAuthCell != null) {
auths.add(Bytes.toString(labelCell.getValueArray(), labelCell.getValueOffset(),
labelCell.getValueLength()));
@@ -503,6 +508,7 @@ public abstract class TestVisibilityLabels {
@Test
public void testClearUserAuths() throws Throwable {
PrivilegedExceptionAction<Void> action = new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
String[] auths = { SECRET, CONFIDENTIAL, PRIVATE };
String user = "testUser";
@@ -700,32 +706,32 @@ public abstract class TestVisibilityLabels {
TEST_UTIL.getAdmin().createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(r1);
- put.addColumn(fam, qual, 3l, v1);
- put.addColumn(fam, qual2, 3l, v1);
- put.addColumn(fam2, qual, 3l, v1);
- put.addColumn(fam2, qual2, 3l, v1);
+ put.addColumn(fam, qual, 3L, v1);
+ put.addColumn(fam, qual2, 3L, v1);
+ put.addColumn(fam2, qual, 3L, v1);
+ put.addColumn(fam2, qual2, 3L, v1);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
put = new Put(r1);
- put.addColumn(fam, qual, 4l, v2);
- put.addColumn(fam, qual2, 4l, v2);
- put.addColumn(fam2, qual, 4l, v2);
- put.addColumn(fam2, qual2, 4l, v2);
+ put.addColumn(fam, qual, 4L, v2);
+ put.addColumn(fam, qual2, 4L, v2);
+ put.addColumn(fam2, qual, 4L, v2);
+ put.addColumn(fam2, qual2, 4L, v2);
put.setCellVisibility(new CellVisibility(PRIVATE));
table.put(put);
put = new Put(r2);
- put.addColumn(fam, qual, 3l, v1);
- put.addColumn(fam, qual2, 3l, v1);
- put.addColumn(fam2, qual, 3l, v1);
- put.addColumn(fam2, qual2, 3l, v1);
+ put.addColumn(fam, qual, 3L, v1);
+ put.addColumn(fam, qual2, 3L, v1);
+ put.addColumn(fam2, qual, 3L, v1);
+ put.addColumn(fam2, qual2, 3L, v1);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
put = new Put(r2);
- put.addColumn(fam, qual, 4l, v2);
- put.addColumn(fam, qual2, 4l, v2);
- put.addColumn(fam2, qual, 4l, v2);
- put.addColumn(fam2, qual2, 4l, v2);
+ put.addColumn(fam, qual, 4L, v2);
+ put.addColumn(fam, qual2, 4L, v2);
+ put.addColumn(fam2, qual, 4L, v2);
+ put.addColumn(fam2, qual2, 4L, v2);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
@@ -860,6 +866,7 @@ public abstract class TestVisibilityLabels {
public static void addLabels() throws Exception {
PrivilegedExceptionAction<VisibilityLabelsResponse> action =
new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
String[] labels = { SECRET, TOPSECRET, CONFIDENTIAL, PUBLIC, PRIVATE, COPYRIGHT, ACCENT,
UNICODE_VIS_TAG, UC1, UC2 };
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOnNewVersionBehaviorTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOnNewVersionBehaviorTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOnNewVersionBehaviorTable.java
index 843ca99..c14438e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOnNewVersionBehaviorTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOnNewVersionBehaviorTable.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.client.Table;
public class TestVisibilityLabelsOnNewVersionBehaviorTable extends TestVisibilityLabelsWithDeletes {
+ @Override
protected Table createTable(HColumnDescriptor fam) throws IOException {
fam.setNewVersionBehavior(true);
TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java
index a3c926e..8d75156 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java
@@ -85,6 +85,7 @@ public class TestVisibilityLabelsOpWithDifferentUsersNoACL {
public void testLabelsTableOpsWithDifferentUsers() throws Throwable {
PrivilegedExceptionAction<VisibilityLabelsResponse> action =
new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
return VisibilityClient.setAuths(conn, new String[] { CONFIDENTIAL, PRIVATE }, "user1");
@@ -99,6 +100,7 @@ public class TestVisibilityLabelsOpWithDifferentUsersNoACL {
// Ideally this should not be allowed. this operation should fail or do nothing.
action = new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
return VisibilityClient.setAuths(conn, new String[] { CONFIDENTIAL, PRIVATE }, "user3");
@@ -115,6 +117,7 @@ public class TestVisibilityLabelsOpWithDifferentUsersNoACL {
PrivilegedExceptionAction<GetAuthsResponse> action1 =
new PrivilegedExceptionAction<GetAuthsResponse>() {
+ @Override
public GetAuthsResponse run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
return VisibilityClient.getAuths(conn, "user1");
@@ -138,6 +141,7 @@ public class TestVisibilityLabelsOpWithDifferentUsersNoACL {
PrivilegedExceptionAction<VisibilityLabelsResponse> action2 =
new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
return VisibilityClient.clearAuths(conn, new String[] {
@@ -162,6 +166,7 @@ public class TestVisibilityLabelsOpWithDifferentUsersNoACL {
private static void addLabels() throws Exception {
PrivilegedExceptionAction<VisibilityLabelsResponse> action =
new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
String[] labels = { SECRET, CONFIDENTIAL, PRIVATE };
try (Connection conn = ConnectionFactory.createConnection(conf)) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
index 072a385..dce8591 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
@@ -301,6 +301,7 @@ public class TestVisibilityLabelsReplication {
final boolean nullExpected, final String... auths) throws IOException,
InterruptedException {
PrivilegedExceptionAction<Void> scanAction = new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf1);
Table table2 = connection.getTable(TABLE_NAME)) {
@@ -346,6 +347,7 @@ public class TestVisibilityLabelsReplication {
public static void addLabels() throws Exception {
PrivilegedExceptionAction<VisibilityLabelsResponse> action =
new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
String[] labels = { SECRET, TOPSECRET, CONFIDENTIAL, PUBLIC, PRIVATE, UNICODE_VIS_TAG };
try (Connection conn = ConnectionFactory.createConnection(conf)) {
@@ -362,6 +364,7 @@ public class TestVisibilityLabelsReplication {
public static void setAuths(final Configuration conf) throws Exception {
PrivilegedExceptionAction<VisibilityLabelsResponse> action =
new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
return VisibilityClient.setAuths(conn, new String[] { SECRET,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
index f6ff640..ef1ae98 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
@@ -122,6 +122,7 @@ public class TestVisibilityLabelsWithACL {
SecureTestUtil.grantOnTable(TEST_UTIL, NORMAL_USER2.getShortName(), tableName,
null, null, Permission.Action.READ);
PrivilegedExceptionAction<Void> scanAction = new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
Scan s = new Scan();
s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
@@ -151,6 +152,7 @@ public class TestVisibilityLabelsWithACL {
final Table table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL
+ "&!" + PRIVATE, SECRET + "&!" + PRIVATE);
PrivilegedExceptionAction<Void> scanAction = new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
Scan s = new Scan();
s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
@@ -175,6 +177,7 @@ public class TestVisibilityLabelsWithACL {
final Table table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL
+ "&!" + PRIVATE, SECRET + "&!" + PRIVATE);
PrivilegedExceptionAction<Void> scanAction = new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
Get g = new Get(row1);
g.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
@@ -204,6 +207,7 @@ public class TestVisibilityLabelsWithACL {
SecureTestUtil.grantOnTable(TEST_UTIL, NORMAL_USER2.getShortName(), tableName,
null, null, Permission.Action.READ);
PrivilegedExceptionAction<Void> getAction = new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
Get g = new Get(row1);
g.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
@@ -222,6 +226,7 @@ public class TestVisibilityLabelsWithACL {
public void testLabelsTableOpsWithDifferentUsers() throws Throwable {
PrivilegedExceptionAction<VisibilityLabelsResponse> action =
new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
return VisibilityClient.addLabels(conn, new String[] { "l1", "l2" });
@@ -237,6 +242,7 @@ public class TestVisibilityLabelsWithACL {
.getResult(1).getException().getName());
action = new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
return VisibilityClient.setAuths(conn, new String[] { CONFIDENTIAL, PRIVATE }, "user1");
@@ -252,6 +258,7 @@ public class TestVisibilityLabelsWithACL {
.getResult(1).getException().getName());
action = new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
return VisibilityClient.setAuths(conn, new String[] { CONFIDENTIAL, PRIVATE }, "user1");
@@ -265,6 +272,7 @@ public class TestVisibilityLabelsWithACL {
assertTrue(response.getResult(1).getException().getValue().isEmpty());
action = new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
return VisibilityClient.clearAuths(conn, new String[] {
@@ -289,6 +297,7 @@ public class TestVisibilityLabelsWithACL {
"user3");
PrivilegedExceptionAction<GetAuthsResponse> action1 =
new PrivilegedExceptionAction<GetAuthsResponse>() {
+ @Override
public GetAuthsResponse run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
return VisibilityClient.getAuths(conn, "user3");
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithCustomVisLabService.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithCustomVisLabService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithCustomVisLabService.java
index 2d3f607..ea1ed10 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithCustomVisLabService.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithCustomVisLabService.java
@@ -56,6 +56,7 @@ public class TestVisibilityLabelsWithCustomVisLabService extends TestVisibilityL
}
// Extending this test from super as we don't verify predefined labels in ExpAsStringVisibilityLabelServiceImpl
+ @Override
@Test
public void testVisibilityLabelsInPutsThatDoesNotMatchAnyDefinedLabels() throws Exception {
TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
@@ -63,6 +64,7 @@ public class TestVisibilityLabelsWithCustomVisLabService extends TestVisibilityL
createTableAndWriteDataWithLabels(tableName, "SAMPLE_LABEL", "TEST");
}
+ @Override
protected List<String> extractAuths(String user, List<Result> results) {
List<String> auths = new ArrayList<>();
for (Result result : results) {
[6/9] hbase git commit: HBASE-19811 Fix findbugs and error-prone
warnings in hbase-server (branch-2)
Posted by st...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
index dcccfd1..91f7971 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
@@ -51,7 +51,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.apache.hadoop.hbase.util.Threads;
-import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -204,14 +203,14 @@ public class TestAsyncRegionAdminApi extends TestAsyncAdminBase {
rs -> {
ServerName serverName = rs.getServerName();
try {
- Assert.assertEquals(admin.getRegions(serverName).get().size(), rs
+ assertEquals(admin.getRegions(serverName).get().size(), rs
.getRegions().size());
} catch (Exception e) {
fail("admin.getOnlineRegions() method throws a exception: " + e.getMessage());
}
regionServerCount.incrementAndGet();
});
- Assert.assertEquals(regionServerCount.get(), 2);
+ assertEquals(2, regionServerCount.get());
}
@Test
@@ -229,7 +228,7 @@ public class TestAsyncRegionAdminApi extends TestAsyncAdminBase {
ASYNC_CONN.getTable(tableName)
.put(new Put(hri.getStartKey()).addColumn(FAMILY, FAMILY_0, Bytes.toBytes("value-1")))
.join();
- Assert.assertTrue(regionServer.getOnlineRegion(hri.getRegionName()).getMemStoreSize() > 0);
+ assertTrue(regionServer.getOnlineRegion(hri.getRegionName()).getMemStoreSize() > 0);
// flush region and wait flush operation finished.
LOG.info("flushing region: " + Bytes.toStringBinary(hri.getRegionName()));
admin.flushRegion(hri.getRegionName()).get();
@@ -239,20 +238,20 @@ public class TestAsyncRegionAdminApi extends TestAsyncAdminBase {
Threads.sleep(50);
}
// check the memstore.
- Assert.assertEquals(regionServer.getOnlineRegion(hri.getRegionName()).getMemStoreSize(), 0);
+ assertEquals(0, regionServer.getOnlineRegion(hri.getRegionName()).getMemStoreSize());
// write another put into the specific region
ASYNC_CONN.getTable(tableName)
.put(new Put(hri.getStartKey()).addColumn(FAMILY, FAMILY_0, Bytes.toBytes("value-2")))
.join();
- Assert.assertTrue(regionServer.getOnlineRegion(hri.getRegionName()).getMemStoreSize() > 0);
+ assertTrue(regionServer.getOnlineRegion(hri.getRegionName()).getMemStoreSize() > 0);
admin.flush(tableName).get();
Threads.sleepWithoutInterrupt(500);
while (regionServer.getOnlineRegion(hri.getRegionName()).getMemStoreSize() > 0) {
Threads.sleep(50);
}
// check the memstore.
- Assert.assertEquals(regionServer.getOnlineRegion(hri.getRegionName()).getMemStoreSize(), 0);
+ assertEquals(0, regionServer.getOnlineRegion(hri.getRegionName()).getMemStoreSize());
}
@Test
@@ -421,7 +420,7 @@ public class TestAsyncRegionAdminApi extends TestAsyncAdminBase {
LOG.error(e.toString(), e);
}
}
- assertEquals(count, 2);
+ assertEquals(2, count);
}
private void waitUntilMobCompactionFinished(TableName tableName)
@@ -471,23 +470,23 @@ public class TestAsyncRegionAdminApi extends TestAsyncAdminBase {
.map(rsThread -> rsThread.getRegionServer()).collect(Collectors.toList());
List<Region> regions = new ArrayList<>();
rsList.forEach(rs -> regions.addAll(rs.getRegions(tableName)));
- Assert.assertEquals(regions.size(), 1);
+ assertEquals(1, regions.size());
int countBefore = countStoreFilesInFamilies(regions, families);
- Assert.assertTrue(countBefore > 0);
+ assertTrue(countBefore > 0);
// Minor compaction for all region servers.
for (HRegionServer rs : rsList)
admin.compactRegionServer(rs.getServerName()).get();
Thread.sleep(5000);
int countAfterMinorCompaction = countStoreFilesInFamilies(regions, families);
- Assert.assertTrue(countAfterMinorCompaction < countBefore);
+ assertTrue(countAfterMinorCompaction < countBefore);
// Major compaction for all region servers.
for (HRegionServer rs : rsList)
admin.majorCompactRegionServer(rs.getServerName()).get();
Thread.sleep(5000);
int countAfterMajorCompaction = countStoreFilesInFamilies(regions, families);
- Assert.assertEquals(countAfterMajorCompaction, 3);
+ assertEquals(3, countAfterMajorCompaction);
}
@Test
@@ -512,7 +511,7 @@ public class TestAsyncRegionAdminApi extends TestAsyncAdminBase {
.getHBaseCluster()
.getLiveRegionServerThreads()
.forEach(rsThread -> regions.addAll(rsThread.getRegionServer().getRegions(tableName)));
- Assert.assertEquals(regions.size(), 1);
+ assertEquals(1, regions.size());
int countBefore = countStoreFilesInFamilies(regions, families);
int countBeforeSingleFamily = countStoreFilesInFamily(regions, family);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java
index 9ceb172..efea20e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java
@@ -81,6 +81,7 @@ public class TestAsyncReplicationAdminApiWithClusters extends TestAsyncAdminBase
ASYNC_CONN.getAdmin().addReplicationPeer(ID_SECOND, rpc).join();
}
+ @Override
@After
public void tearDown() throws Exception {
Pattern pattern = Pattern.compile(tableName.getNameAsString() + ".*");
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncSnapshotAdminApi.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncSnapshotAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncSnapshotAdminApi.java
index 5014e96..7501192 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncSnapshotAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncSnapshotAdminApi.java
@@ -18,22 +18,25 @@
package org.apache.hadoop.hbase.client;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.regex.Pattern;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
-import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.List;
-import java.util.regex.Pattern;
-
@RunWith(Parameterized.class)
@Category({ LargeTests.class, ClientTests.class })
public class TestAsyncSnapshotAdminApi extends TestAsyncAdminBase {
@@ -65,19 +68,19 @@ public class TestAsyncSnapshotAdminApi extends TestAsyncAdminBase {
admin.snapshot(snapshotName2, tableName).get();
List<SnapshotDescription> snapshots = syncAdmin.listSnapshots();
Collections.sort(snapshots, (snap1, snap2) -> {
- Assert.assertNotNull(snap1);
- Assert.assertNotNull(snap1.getName());
- Assert.assertNotNull(snap2);
- Assert.assertNotNull(snap2.getName());
+ assertNotNull(snap1);
+ assertNotNull(snap1.getName());
+ assertNotNull(snap2);
+ assertNotNull(snap2.getName());
return snap1.getName().compareTo(snap2.getName());
});
- Assert.assertEquals(snapshotName1, snapshots.get(0).getName());
- Assert.assertEquals(tableName, snapshots.get(0).getTableName());
- Assert.assertEquals(SnapshotType.FLUSH, snapshots.get(0).getType());
- Assert.assertEquals(snapshotName2, snapshots.get(1).getName());
- Assert.assertEquals(tableName, snapshots.get(1).getTableName());
- Assert.assertEquals(SnapshotType.FLUSH, snapshots.get(1).getType());
+ assertEquals(snapshotName1, snapshots.get(0).getName());
+ assertEquals(tableName, snapshots.get(0).getTableName());
+ assertEquals(SnapshotType.FLUSH, snapshots.get(0).getType());
+ assertEquals(snapshotName2, snapshots.get(1).getName());
+ assertEquals(tableName, snapshots.get(1).getTableName());
+ assertEquals(SnapshotType.FLUSH, snapshots.get(1).getType());
}
@Test
@@ -93,10 +96,10 @@ public class TestAsyncSnapshotAdminApi extends TestAsyncAdminBase {
admin.snapshot(snapshotName1, tableName).get();
List<SnapshotDescription> snapshots = syncAdmin.listSnapshots();
- Assert.assertEquals(snapshots.size(), 1);
- Assert.assertEquals(snapshotName1, snapshots.get(0).getName());
- Assert.assertEquals(tableName, snapshots.get(0).getTableName());
- Assert.assertEquals(SnapshotType.FLUSH, snapshots.get(0).getType());
+ assertEquals(1, snapshots.size());
+ assertEquals(snapshotName1, snapshots.get(0).getName());
+ assertEquals(tableName, snapshots.get(0).getTableName());
+ assertEquals(SnapshotType.FLUSH, snapshots.get(0).getType());
// cloneSnapshot into a existed table.
boolean failed = false;
@@ -105,10 +108,10 @@ public class TestAsyncSnapshotAdminApi extends TestAsyncAdminBase {
} catch (Exception e) {
failed = true;
}
- Assert.assertTrue(failed);
+ assertTrue(failed);
// cloneSnapshot into a new table.
- Assert.assertTrue(!syncAdmin.tableExists(tableName2));
+ assertTrue(!syncAdmin.tableExists(tableName2));
admin.cloneSnapshot(snapshotName1, tableName2).get();
syncAdmin.tableExists(tableName2);
}
@@ -120,12 +123,12 @@ public class TestAsyncSnapshotAdminApi extends TestAsyncAdminBase {
Result result;
int rowCount = 0;
while ((result = scanner.next()) != null) {
- Assert.assertArrayEquals(result.getRow(), Bytes.toBytes(rowCount));
- Assert.assertArrayEquals(result.getValue(Bytes.toBytes("f1"), Bytes.toBytes("cq")),
+ assertArrayEquals(result.getRow(), Bytes.toBytes(rowCount));
+ assertArrayEquals(result.getValue(Bytes.toBytes("f1"), Bytes.toBytes("cq")),
Bytes.toBytes(rowCount));
rowCount += 1;
}
- Assert.assertEquals(rowCount, expectedRowCount);
+ assertEquals(rowCount, expectedRowCount);
}
}
}
@@ -137,11 +140,11 @@ public class TestAsyncSnapshotAdminApi extends TestAsyncAdminBase {
table.put(new Put(Bytes.toBytes(i)).addColumn(Bytes.toBytes("f1"), Bytes.toBytes("cq"),
Bytes.toBytes(i)));
}
- Assert.assertEquals(admin.listSnapshots().get().size(), 0);
+ assertEquals(0, admin.listSnapshots().get().size());
admin.snapshot(snapshotName1, tableName).get();
admin.snapshot(snapshotName2, tableName).get();
- Assert.assertEquals(admin.listSnapshots().get().size(), 2);
+ assertEquals(2, admin.listSnapshots().get().size());
admin.disableTable(tableName).get();
admin.restoreSnapshot(snapshotName1, true).get();
@@ -161,29 +164,23 @@ public class TestAsyncSnapshotAdminApi extends TestAsyncAdminBase {
table.put(new Put(Bytes.toBytes(i)).addColumn(Bytes.toBytes("f1"), Bytes.toBytes("cq"),
Bytes.toBytes(i)));
}
- Assert.assertEquals(admin.listSnapshots().get().size(), 0);
+ assertEquals(0, admin.listSnapshots().get().size());
admin.snapshot(snapshotName1, tableName).get();
admin.snapshot(snapshotName2, tableName).get();
admin.snapshot(snapshotName3, tableName).get();
- Assert.assertEquals(admin.listSnapshots().get().size(), 3);
-
- Assert.assertEquals(admin.listSnapshots(Pattern.compile("(.*)")).get().size(), 3);
- Assert.assertEquals(admin.listSnapshots(Pattern.compile("snapshotName(\\d+)")).get().size(), 3);
- Assert.assertEquals(admin.listSnapshots(Pattern.compile("snapshotName[1|3]")).get().size(), 2);
- Assert.assertEquals(admin.listSnapshots(Pattern.compile("snapshot(.*)")).get().size(), 3);
- Assert.assertEquals(
- admin.listTableSnapshots(Pattern.compile("testListSnapshots"), Pattern.compile("s(.*)")).get()
- .size(),
- 3);
- Assert.assertEquals(
- admin.listTableSnapshots(Pattern.compile("fakeTableName"), Pattern.compile("snap(.*)")).get()
- .size(),
- 0);
- Assert.assertEquals(
- admin.listTableSnapshots(Pattern.compile("test(.*)"), Pattern.compile("snap(.*)[1|3]")).get()
- .size(),
- 2);
+ assertEquals(3, admin.listSnapshots().get().size());
+
+ assertEquals(3, admin.listSnapshots(Pattern.compile("(.*)")).get().size());
+ assertEquals(3, admin.listSnapshots(Pattern.compile("snapshotName(\\d+)")).get().size());
+ assertEquals(2, admin.listSnapshots(Pattern.compile("snapshotName[1|3]")).get().size());
+ assertEquals(3, admin.listSnapshots(Pattern.compile("snapshot(.*)")).get().size());
+ assertEquals(3, admin.listTableSnapshots(Pattern.compile("testListSnapshots"),
+ Pattern.compile("s(.*)")).get().size());
+ assertEquals(0, admin.listTableSnapshots(Pattern.compile("fakeTableName"),
+ Pattern.compile("snap(.*)")).get().size());
+ assertEquals(2, admin.listTableSnapshots(Pattern.compile("test(.*)"),
+ Pattern.compile("snap(.*)[1|3]")).get().size());
}
@Test
@@ -193,29 +190,29 @@ public class TestAsyncSnapshotAdminApi extends TestAsyncAdminBase {
table.put(new Put(Bytes.toBytes(i)).addColumn(Bytes.toBytes("f1"), Bytes.toBytes("cq"),
Bytes.toBytes(i)));
}
- Assert.assertEquals(admin.listSnapshots().get().size(), 0);
+ assertEquals(0, admin.listSnapshots().get().size());
admin.snapshot(snapshotName1, tableName).get();
admin.snapshot(snapshotName2, tableName).get();
admin.snapshot(snapshotName3, tableName).get();
- Assert.assertEquals(admin.listSnapshots().get().size(), 3);
+ assertEquals(3, admin.listSnapshots().get().size());
admin.deleteSnapshot(snapshotName1).get();
- Assert.assertEquals(admin.listSnapshots().get().size(), 2);
+ assertEquals(2, admin.listSnapshots().get().size());
admin.deleteSnapshots(Pattern.compile("(.*)abc")).get();
- Assert.assertEquals(admin.listSnapshots().get().size(), 2);
+ assertEquals(2, admin.listSnapshots().get().size());
admin.deleteSnapshots(Pattern.compile("(.*)1")).get();
- Assert.assertEquals(admin.listSnapshots().get().size(), 2);
+ assertEquals(2, admin.listSnapshots().get().size());
admin.deleteTableSnapshots(Pattern.compile("(.*)"), Pattern.compile("(.*)1")).get();
- Assert.assertEquals(admin.listSnapshots().get().size(), 2);
+ assertEquals(2, admin.listSnapshots().get().size());
admin.deleteTableSnapshots(Pattern.compile("(.*)"), Pattern.compile("(.*)2")).get();
- Assert.assertEquals(admin.listSnapshots().get().size(), 1);
+ assertEquals(1, admin.listSnapshots().get().size());
admin.deleteTableSnapshots(Pattern.compile("(.*)"), Pattern.compile("(.*)3")).get();
- Assert.assertEquals(admin.listSnapshots().get().size(), 0);
+ assertEquals(0, admin.listSnapshots().get().size());
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
index 529346f..8b2dce3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
@@ -135,7 +135,7 @@ public class TestAsyncTableAdminApi extends TestAsyncAdminBase {
admin.createTable(desc).join();
ModifyableTableDescriptor modifyableDesc = ((ModifyableTableDescriptor) desc);
TableDescriptor confirmedHtd = admin.getDescriptor(tableName).get();
- assertEquals(modifyableDesc.compareTo((ModifyableTableDescriptor) confirmedHtd), 0);
+ assertEquals(0, modifyableDesc.compareTo((ModifyableTableDescriptor) confirmedHtd));
}
@Test
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java
index f47e6e9..7848251 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java
@@ -212,13 +212,13 @@ public class TestAsyncTableBatch {
.collect(Collectors.toList())).get();
List<Row> actions = new ArrayList<>();
actions.add(new Get(Bytes.toBytes(0)));
- actions.add(new Put(Bytes.toBytes(1)).addColumn(FAMILY, CQ, Bytes.toBytes((long) 2)));
+ actions.add(new Put(Bytes.toBytes(1)).addColumn(FAMILY, CQ, Bytes.toBytes(2L)));
actions.add(new Delete(Bytes.toBytes(2)));
actions.add(new Increment(Bytes.toBytes(3)).addColumn(FAMILY, CQ, 1));
actions.add(new Append(Bytes.toBytes(4)).addColumn(FAMILY, CQ, Bytes.toBytes(4)));
RowMutations rm = new RowMutations(Bytes.toBytes(5));
- rm.add(new Put(Bytes.toBytes(5)).addColumn(FAMILY, CQ, Bytes.toBytes((long) 100)));
- rm.add(new Put(Bytes.toBytes(5)).addColumn(FAMILY, CQ1, Bytes.toBytes((long) 200)));
+ rm.add(new Put(Bytes.toBytes(5)).addColumn(FAMILY, CQ, Bytes.toBytes(100L)));
+ rm.add(new Put(Bytes.toBytes(5)).addColumn(FAMILY, CQ1, Bytes.toBytes(200L)));
actions.add(rm);
actions.add(new Get(Bytes.toBytes(6)));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java
index 521d2f5..ba61ab4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAvoidCellReferencesIntoShippedBlocks.java
@@ -189,7 +189,7 @@ public class TestAvoidCellReferencesIntoShippedBlocks {
try (ResultScanner scanner = table.getScanner(s)) {
count = Iterables.size(scanner);
}
- assertEquals("Count all the rows ", count, 6);
+ assertEquals("Count all the rows ", 6, count);
// all the cache is loaded
// trigger a major compaction
ScannerThread scannerThread = new ScannerThread(table, cache);
@@ -200,7 +200,7 @@ public class TestAvoidCellReferencesIntoShippedBlocks {
try (ResultScanner scanner = table.getScanner(s)) {
count = Iterables.size(scanner);
}
- assertEquals("Count all the rows ", count, 6);
+ assertEquals("Count all the rows ", 6, count);
} finally {
table.close();
}
@@ -215,6 +215,7 @@ public class TestAvoidCellReferencesIntoShippedBlocks {
this.cache = cache;
}
+ @Override
public void run() {
Scan s = new Scan().withStartRow(ROW4).withStopRow(ROW5).setCaching(1);
try {
@@ -346,7 +347,7 @@ public class TestAvoidCellReferencesIntoShippedBlocks {
try (ResultScanner scanner = table.getScanner(s)) {
count = Iterables.size(scanner);
}
- assertEquals("Count all the rows ", count, 6);
+ assertEquals("Count all the rows ", 6, count);
// Scan from cache
s = new Scan();
@@ -379,7 +380,7 @@ public class TestAvoidCellReferencesIntoShippedBlocks {
iterator.next();
refBlockCount++;
}
- assertEquals("One block should be there ", refBlockCount, 1);
+ assertEquals("One block should be there ", 1, refBlockCount);
// Rescan to prepopulate the data
// cache this row.
Scan s1 = new Scan();
@@ -392,7 +393,7 @@ public class TestAvoidCellReferencesIntoShippedBlocks {
try {
scanner = table.getScanner(s1);
int count = Iterables.size(scanner);
- assertEquals("Count the rows", count, 2);
+ assertEquals("Count the rows", 2, count);
iterator = cache.iterator();
List<BlockCacheKey> newCacheList = new ArrayList<>();
while (iterator.hasNext()) {
@@ -407,7 +408,7 @@ public class TestAvoidCellReferencesIntoShippedBlocks {
}
}
- assertEquals("old blocks should still be found ", newBlockRefCount, 6);
+ assertEquals("old blocks should still be found ", 6, newBlockRefCount);
latch.countDown();
} catch (IOException e) {
@@ -423,7 +424,7 @@ public class TestAvoidCellReferencesIntoShippedBlocks {
}
}
}
- assertEquals("Count should give all rows ", count, 10);
+ assertEquals("Count should give all rows ", 10, count);
} finally {
table.close();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java
index 62eb316..9103c51 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java
@@ -127,7 +127,7 @@ public class TestClientPushback {
regionStats.getMemStoreLoadPercent());
// check that the load reported produces a nonzero delay
long backoffTime = backoffPolicy.getBackoffTime(server, regionName, serverStats);
- assertNotEquals("Reported load does not produce a backoff", backoffTime, 0);
+ assertNotEquals("Reported load does not produce a backoff", 0, backoffTime);
LOG.debug("Backoff calculated for " + region.getRegionInfo().getRegionNameAsString() + " @ " +
server + " is " + backoffTime);
@@ -166,13 +166,13 @@ public class TestClientPushback {
MetricsConnection.RunnerStats runnerStats = conn.getConnectionMetrics().runnerStats;
- assertEquals(runnerStats.delayRunners.getCount(), 1);
- assertEquals(runnerStats.normalRunners.getCount(), 1);
+ assertEquals(1, runnerStats.delayRunners.getCount());
+ assertEquals(1, runnerStats.normalRunners.getCount());
assertEquals("", runnerStats.delayIntevalHist.getSnapshot().getMean(),
(double)backoffTime, 0.1);
latch.await(backoffTime * 2, TimeUnit.MILLISECONDS);
- assertNotEquals("AsyncProcess did not submit the work time", endTime.get(), 0);
+ assertNotEquals("AsyncProcess did not submit the work time", 0, endTime.get());
assertTrue("AsyncProcess did not delay long enough", endTime.get() - startTime >= backoffTime);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestConnectionImplementation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestConnectionImplementation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestConnectionImplementation.java
index 2ad49f8..f061fc9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestConnectionImplementation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestConnectionImplementation.java
@@ -120,6 +120,7 @@ public class TestConnectionImplementation {
TEST_UTIL.shutdownMiniCluster();
}
+ @Test
public void testClusterConnection() throws IOException {
ThreadPoolExecutor otherPool = new ThreadPoolExecutor(1, 1,
5, TimeUnit.SECONDS,
@@ -636,7 +637,7 @@ public class TestConnectionImplementation {
LOG.info("Put done, exception caught: " + e.getClass());
Assert.assertEquals(1, e.getNumExceptions());
Assert.assertEquals(1, e.getCauses().size());
- Assert.assertArrayEquals(e.getRow(0).getRow(), ROW);
+ Assert.assertArrayEquals(ROW, e.getRow(0).getRow());
// Check that we unserialized the exception as expected
Throwable cause = ClientExceptionsUtil.findException(e.getCause(0));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
index 47516ec..b2eae85 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
@@ -171,6 +171,7 @@ public class TestFastFail {
* will follow the killing of a regionserver so that we make sure that
* some of threads go into PreemptiveFastFailExcception
*/
+ @Override
public Boolean call() throws Exception {
try (Table table = connection.getTable(TableName.valueOf(tableName))) {
Thread.sleep(Math.abs(random.nextInt()) % 250); // Add some jitter here
@@ -275,7 +276,7 @@ public class TestFastFail {
"All the failures should be coming from the secondput failure",
numFailedThreads.get(), numThreadsReturnedFalse);
assertEquals("Number of threads that threw execution exceptions "
- + "otherwise should be 0", numThreadsThrewExceptions, 0);
+ + "otherwise should be 0", 0, numThreadsThrewExceptions);
assertEquals("The regionservers that returned true should equal to the"
+ " number of successful threads", numThreadsReturnedTrue,
numSuccessfullThreads.get());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 3af245f..952905a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -203,125 +203,123 @@ public class TestFromClientSide {
/**
* Basic client side validation of HBASE-4536
*/
- @Test
- public void testKeepDeletedCells() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName());
- final byte[] FAMILY = Bytes.toBytes("family");
- final byte[] C0 = Bytes.toBytes("c0");
-
- final byte[] T1 = Bytes.toBytes("T1");
- final byte[] T2 = Bytes.toBytes("T2");
- final byte[] T3 = Bytes.toBytes("T3");
- HColumnDescriptor hcd = new HColumnDescriptor(FAMILY)
- .setKeepDeletedCells(KeepDeletedCells.TRUE)
- .setMaxVersions(3);
-
- HTableDescriptor desc = new HTableDescriptor(tableName);
- desc.addFamily(hcd);
- TEST_UTIL.getAdmin().createTable(desc);
- Table h = TEST_UTIL.getConnection().getTable(tableName);
-
- long ts = System.currentTimeMillis();
- Put p = new Put(T1, ts);
- p.addColumn(FAMILY, C0, T1);
- h.put(p);
- p = new Put(T1, ts+2);
- p.addColumn(FAMILY, C0, T2);
- h.put(p);
- p = new Put(T1, ts+4);
- p.addColumn(FAMILY, C0, T3);
- h.put(p);
-
- Delete d = new Delete(T1, ts+3);
- h.delete(d);
-
- d = new Delete(T1, ts+3);
- d.addColumns(FAMILY, C0, ts+3);
- h.delete(d);
-
- Get g = new Get(T1);
- // does *not* include the delete
- g.setTimeRange(0, ts+3);
- Result r = h.get(g);
- assertArrayEquals(T2, r.getValue(FAMILY, C0));
-
- Scan s = new Scan(T1);
- s.setTimeRange(0, ts+3);
- s.setMaxVersions();
- ResultScanner scanner = h.getScanner(s);
- Cell[] kvs = scanner.next().rawCells();
- assertArrayEquals(T2, CellUtil.cloneValue(kvs[0]));
- assertArrayEquals(T1, CellUtil.cloneValue(kvs[1]));
- scanner.close();
-
- s = new Scan(T1);
- s.setRaw(true);
- s.setMaxVersions();
- scanner = h.getScanner(s);
- kvs = scanner.next().rawCells();
- assertTrue(PrivateCellUtil.isDeleteFamily(kvs[0]));
- assertArrayEquals(T3, CellUtil.cloneValue(kvs[1]));
- assertTrue(CellUtil.isDelete(kvs[2]));
- assertArrayEquals(T2, CellUtil.cloneValue(kvs[3]));
- assertArrayEquals(T1, CellUtil.cloneValue(kvs[4]));
- scanner.close();
- h.close();
- }
-
- /**
- * Basic client side validation of HBASE-10118
- */
- @Test
- public void testPurgeFutureDeletes() throws Exception {
- final TableName tableName = TableName.valueOf(name.getMethodName());
- final byte[] ROW = Bytes.toBytes("row");
- final byte[] FAMILY = Bytes.toBytes("family");
- final byte[] COLUMN = Bytes.toBytes("column");
- final byte[] VALUE = Bytes.toBytes("value");
-
- Table table = TEST_UTIL.createTable(tableName, FAMILY);
-
- // future timestamp
- long ts = System.currentTimeMillis() * 2;
- Put put = new Put(ROW, ts);
- put.addColumn(FAMILY, COLUMN, VALUE);
- table.put(put);
-
- Get get = new Get(ROW);
- Result result = table.get(get);
- assertArrayEquals(VALUE, result.getValue(FAMILY, COLUMN));
-
- Delete del = new Delete(ROW);
- del.addColumn(FAMILY, COLUMN, ts);
- table.delete(del);
-
- get = new Get(ROW);
- result = table.get(get);
- assertNull(result.getValue(FAMILY, COLUMN));
-
- // major compaction, purged future deletes
- TEST_UTIL.getAdmin().flush(tableName);
- TEST_UTIL.getAdmin().majorCompact(tableName);
-
- // waiting for the major compaction to complete
- TEST_UTIL.waitFor(6000, new Waiter.Predicate<IOException>() {
- @Override
- public boolean evaluate() throws IOException {
- return TEST_UTIL.getAdmin().getCompactionState(tableName) ==
- CompactionState.NONE;
- }
- });
-
- put = new Put(ROW, ts);
- put.addColumn(FAMILY, COLUMN, VALUE);
- table.put(put);
-
- get = new Get(ROW);
- result = table.get(get);
- assertArrayEquals(VALUE, result.getValue(FAMILY, COLUMN));
-
- table.close();
- }
+ @Test
+ public void testKeepDeletedCells() throws Exception {
+ final TableName tableName = TableName.valueOf(name.getMethodName());
+ final byte[] FAMILY = Bytes.toBytes("family");
+ final byte[] C0 = Bytes.toBytes("c0");
+
+ final byte[] T1 = Bytes.toBytes("T1");
+ final byte[] T2 = Bytes.toBytes("T2");
+ final byte[] T3 = Bytes.toBytes("T3");
+ HColumnDescriptor hcd =
+ new HColumnDescriptor(FAMILY).setKeepDeletedCells(KeepDeletedCells.TRUE).setMaxVersions(3);
+
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(hcd);
+ TEST_UTIL.getAdmin().createTable(desc);
+ Table h = TEST_UTIL.getConnection().getTable(tableName);
+
+ long ts = System.currentTimeMillis();
+ Put p = new Put(T1, ts);
+ p.addColumn(FAMILY, C0, T1);
+ h.put(p);
+ p = new Put(T1, ts + 2);
+ p.addColumn(FAMILY, C0, T2);
+ h.put(p);
+ p = new Put(T1, ts + 4);
+ p.addColumn(FAMILY, C0, T3);
+ h.put(p);
+
+ Delete d = new Delete(T1, ts + 3);
+ h.delete(d);
+
+ d = new Delete(T1, ts + 3);
+ d.addColumns(FAMILY, C0, ts + 3);
+ h.delete(d);
+
+ Get g = new Get(T1);
+ // does *not* include the delete
+ g.setTimeRange(0, ts + 3);
+ Result r = h.get(g);
+ assertArrayEquals(T2, r.getValue(FAMILY, C0));
+
+ Scan s = new Scan(T1);
+ s.setTimeRange(0, ts + 3);
+ s.setMaxVersions();
+ ResultScanner scanner = h.getScanner(s);
+ Cell[] kvs = scanner.next().rawCells();
+ assertArrayEquals(T2, CellUtil.cloneValue(kvs[0]));
+ assertArrayEquals(T1, CellUtil.cloneValue(kvs[1]));
+ scanner.close();
+
+ s = new Scan(T1);
+ s.setRaw(true);
+ s.setMaxVersions();
+ scanner = h.getScanner(s);
+ kvs = scanner.next().rawCells();
+ assertTrue(PrivateCellUtil.isDeleteFamily(kvs[0]));
+ assertArrayEquals(T3, CellUtil.cloneValue(kvs[1]));
+ assertTrue(CellUtil.isDelete(kvs[2]));
+ assertArrayEquals(T2, CellUtil.cloneValue(kvs[3]));
+ assertArrayEquals(T1, CellUtil.cloneValue(kvs[4]));
+ scanner.close();
+ h.close();
+ }
+
+ /**
+ * Basic client side validation of HBASE-10118
+ */
+ @Test
+ public void testPurgeFutureDeletes() throws Exception {
+ final TableName tableName = TableName.valueOf(name.getMethodName());
+ final byte[] ROW = Bytes.toBytes("row");
+ final byte[] FAMILY = Bytes.toBytes("family");
+ final byte[] COLUMN = Bytes.toBytes("column");
+ final byte[] VALUE = Bytes.toBytes("value");
+
+ Table table = TEST_UTIL.createTable(tableName, FAMILY);
+
+ // future timestamp
+ long ts = System.currentTimeMillis() * 2;
+ Put put = new Put(ROW, ts);
+ put.addColumn(FAMILY, COLUMN, VALUE);
+ table.put(put);
+
+ Get get = new Get(ROW);
+ Result result = table.get(get);
+ assertArrayEquals(VALUE, result.getValue(FAMILY, COLUMN));
+
+ Delete del = new Delete(ROW);
+ del.addColumn(FAMILY, COLUMN, ts);
+ table.delete(del);
+
+ get = new Get(ROW);
+ result = table.get(get);
+ assertNull(result.getValue(FAMILY, COLUMN));
+
+ // major compaction, purged future deletes
+ TEST_UTIL.getAdmin().flush(tableName);
+ TEST_UTIL.getAdmin().majorCompact(tableName);
+
+ // waiting for the major compaction to complete
+ TEST_UTIL.waitFor(6000, new Waiter.Predicate<IOException>() {
+ @Override
+ public boolean evaluate() throws IOException {
+ return TEST_UTIL.getAdmin().getCompactionState(tableName) == CompactionState.NONE;
+ }
+ });
+
+ put = new Put(ROW, ts);
+ put.addColumn(FAMILY, COLUMN, VALUE);
+ table.put(put);
+
+ get = new Get(ROW);
+ result = table.get(get);
+ assertArrayEquals(VALUE, result.getValue(FAMILY, COLUMN));
+
+ table.close();
+ }
/**
* Verifies that getConfiguration returns the same Configuration object used
@@ -660,13 +658,13 @@ public class TestFromClientSide {
ResultScanner scanner = ht.getScanner(scan);
int expectedIndex = 1;
for(Result result : ht.getScanner(scan)) {
- assertEquals(result.size(), 1);
+ assertEquals(1, result.size());
assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[expectedIndex]));
assertTrue(Bytes.equals(CellUtil.cloneQualifier(result.rawCells()[0]),
QUALIFIERS[expectedIndex]));
expectedIndex++;
}
- assertEquals(expectedIndex, 6);
+ assertEquals(6, expectedIndex);
scanner.close();
}
@@ -693,11 +691,11 @@ public class TestFromClientSide {
ResultScanner scanner = ht.getScanner(scan);
int expectedIndex = 0;
for(Result result : ht.getScanner(scan)) {
- assertEquals(result.size(), 1);
+ assertEquals(1, result.size());
assertTrue(Bytes.toLong(result.getValue(FAMILY, QUALIFIER)) > 500);
expectedIndex++;
}
- assertEquals(expectedIndex, 4);
+ assertEquals(4, expectedIndex);
scanner.close();
}
@@ -726,12 +724,12 @@ public class TestFromClientSide {
ResultScanner scanner = ht.getScanner(scan);
int count = 0;
for(Result result : ht.getScanner(scan)) {
- assertEquals(result.size(), 1);
- assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT);
- assertEquals(Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])), VALUE.length);
+ assertEquals(1, result.size());
+ assertEquals(Bytes.SIZEOF_INT, result.rawCells()[0].getValueLength());
+ assertEquals(VALUE.length, Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])));
count++;
}
- assertEquals(count, 10);
+ assertEquals(10, count);
scanner.close();
}
@@ -3499,7 +3497,9 @@ public class TestFromClientSide {
private long [] makeStamps(int n) {
long [] stamps = new long[n];
- for(int i=0;i<n;i++) stamps[i] = i+1;
+ for (int i = 0; i < n; i++) {
+ stamps[i] = i+1L;
+ }
return stamps;
}
@@ -3969,8 +3969,8 @@ public class TestFromClientSide {
put = new Put(row2);
put.addColumn(CONTENTS_FAMILY, null, value);
- assertEquals(put.size(), 1);
- assertEquals(put.getFamilyCellMap().get(CONTENTS_FAMILY).size(), 1);
+ assertEquals(1, put.size());
+ assertEquals(1, put.getFamilyCellMap().get(CONTENTS_FAMILY).size());
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
KeyValue kv = (KeyValue)put.getFamilyCellMap().get(CONTENTS_FAMILY).get(0);
@@ -4794,22 +4794,22 @@ public class TestFromClientSide {
// row doesn't exist, so using non-null value should be considered "not match".
boolean ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifEquals(VALUE).thenPut(put1);
- assertEquals(ok, false);
+ assertFalse(ok);
// row doesn't exist, so using "ifNotExists" should be considered "match".
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER).ifNotExists().thenPut(put1);
- assertEquals(ok, true);
+ assertTrue(ok);
// row now exists, so using "ifNotExists" should be considered "not match".
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER).ifNotExists().thenPut(put1);
- assertEquals(ok, false);
+ assertFalse(ok);
Put put2 = new Put(ROW);
put2.addColumn(FAMILY, QUALIFIER, value2);
// row now exists, use the matching value to check
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER).ifEquals(VALUE).thenPut(put2);
- assertEquals(ok, true);
+ assertTrue(ok);
Put put3 = new Put(anotherrow);
put3.addColumn(FAMILY, QUALIFIER, VALUE);
@@ -4839,70 +4839,70 @@ public class TestFromClientSide {
// row doesn't exist, so using "ifNotExists" should be considered "match".
boolean ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER).ifNotExists().thenPut(put2);
- assertEquals(ok, true);
+ assertTrue(ok);
// cell = "bbbb", using "aaaa" to compare only LESS/LESS_OR_EQUAL/NOT_EQUAL
// turns out "match"
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER, value1).thenPut(put2);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.EQUAL, value1).thenPut(put2);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER_OR_EQUAL, value1).thenPut(put2);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS, value1).thenPut(put2);
- assertEquals(ok, true);
+ assertTrue(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS_OR_EQUAL, value1).thenPut(put2);
- assertEquals(ok, true);
+ assertTrue(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.NOT_EQUAL, value1).thenPut(put3);
- assertEquals(ok, true);
+ assertTrue(ok);
// cell = "cccc", using "dddd" to compare only LARGER/LARGER_OR_EQUAL/NOT_EQUAL
// turns out "match"
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS, value4).thenPut(put3);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS_OR_EQUAL, value4).thenPut(put3);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.EQUAL, value4).thenPut(put3);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER, value4).thenPut(put3);
- assertEquals(ok, true);
+ assertTrue(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER_OR_EQUAL, value4).thenPut(put3);
- assertEquals(ok, true);
+ assertTrue(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.NOT_EQUAL, value4).thenPut(put2);
- assertEquals(ok, true);
+ assertTrue(ok);
// cell = "bbbb", using "bbbb" to compare only GREATER_OR_EQUAL/LESS_OR_EQUAL/EQUAL
// turns out "match"
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER, value2).thenPut(put2);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.NOT_EQUAL, value2).thenPut(put2);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS, value2).thenPut(put2);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER_OR_EQUAL, value2).thenPut(put2);
- assertEquals(ok, true);
+ assertTrue(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS_OR_EQUAL, value2).thenPut(put2);
- assertEquals(ok, true);
+ assertTrue(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.EQUAL, value2).thenPut(put3);
- assertEquals(ok, true);
+ assertTrue(ok);
}
@Test
@@ -4921,7 +4921,7 @@ public class TestFromClientSide {
boolean ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifEquals(value1).thenDelete(delete);
- assertEquals(ok, true);
+ assertTrue(ok);
}
@Test
@@ -4948,72 +4948,72 @@ public class TestFromClientSide {
// turns out "match"
boolean ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER, value1).thenDelete(delete);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.EQUAL, value1).thenDelete(delete);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER_OR_EQUAL, value1).thenDelete(delete);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS, value1).thenDelete(delete);
- assertEquals(ok, true);
+ assertTrue(ok);
table.put(put2);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS_OR_EQUAL, value1).thenDelete(delete);
- assertEquals(ok, true);
+ assertTrue(ok);
table.put(put2);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.NOT_EQUAL, value1).thenDelete(delete);
- assertEquals(ok, true);
+ assertTrue(ok);
// cell = "cccc", using "dddd" to compare only LARGER/LARGER_OR_EQUAL/NOT_EQUAL
// turns out "match"
table.put(put3);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS, value4).thenDelete(delete);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS_OR_EQUAL, value4).thenDelete(delete);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.EQUAL, value4).thenDelete(delete);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER, value4).thenDelete(delete);
- assertEquals(ok, true);
+ assertTrue(ok);
table.put(put3);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER_OR_EQUAL, value4).thenDelete(delete);
- assertEquals(ok, true);
+ assertTrue(ok);
table.put(put3);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.NOT_EQUAL, value4).thenDelete(delete);
- assertEquals(ok, true);
+ assertTrue(ok);
// cell = "bbbb", using "bbbb" to compare only GREATER_OR_EQUAL/LESS_OR_EQUAL/EQUAL
// turns out "match"
table.put(put2);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER, value2).thenDelete(delete);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.NOT_EQUAL, value2).thenDelete(delete);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS, value2).thenDelete(delete);
- assertEquals(ok, false);
+ assertFalse(ok);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.GREATER_OR_EQUAL, value2).thenDelete(delete);
- assertEquals(ok, true);
+ assertTrue(ok);
table.put(put2);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.LESS_OR_EQUAL, value2).thenDelete(delete);
- assertEquals(ok, true);
+ assertTrue(ok);
table.put(put2);
ok = table.checkAndMutate(ROW, FAMILY).qualifier(QUALIFIER)
.ifMatches(CompareOperator.EQUAL, value2).thenDelete(delete);
- assertEquals(ok, true);
+ assertTrue(ok);
}
/**
@@ -5726,7 +5726,7 @@ public class TestFromClientSide {
ResultScanner scanner = ht.getScanner(scan);
int expectedIndex = 5;
for (Result result : scanner) {
- assertEquals(result.size(), 1);
+ assertEquals(1, result.size());
Cell c = result.rawCells()[0];
assertTrue(Bytes.equals(c.getRowArray(), c.getRowOffset(), c.getRowLength(),
ROWS[expectedIndex], 0, ROWS[expectedIndex].length));
@@ -5734,7 +5734,7 @@ public class TestFromClientSide {
c.getQualifierLength(), QUALIFIERS[expectedIndex], 0, QUALIFIERS[expectedIndex].length));
expectedIndex--;
}
- assertEquals(expectedIndex, 0);
+ assertEquals(0, expectedIndex);
scanner.close();
ht.close();
}
@@ -5767,12 +5767,12 @@ public class TestFromClientSide {
ResultScanner scanner = ht.getScanner(scan);
int count = 0;
for (Result result : ht.getScanner(scan)) {
- assertEquals(result.size(), 1);
- assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT);
- assertEquals(Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])), VALUE.length);
+ assertEquals(1, result.size());
+ assertEquals(Bytes.SIZEOF_INT, result.rawCells()[0].getValueLength());
+ assertEquals(VALUE.length, Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])));
count++;
}
- assertEquals(count, 10);
+ assertEquals(10, count);
scanner.close();
ht.close();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
index 2d67b3e..6b031d8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
@@ -19,6 +19,12 @@
*/
package org.apache.hadoop.hbase.client;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@@ -35,37 +41,30 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.Coprocessor;
-import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-import org.apache.hadoop.hbase.coprocessor.RegionObserver;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos;
import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.junit.After;
import org.junit.AfterClass;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
@@ -75,6 +74,9 @@ import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
+
@Category({LargeTests.class, ClientTests.class})
public class TestFromClientSide3 {
private static final Logger LOG = LoggerFactory.getLogger(TestFromClientSide3.class);
@@ -497,12 +499,12 @@ public class TestFromClientSide3 {
Get get = new Get(ROW);
boolean exist = table.exists(get);
- assertEquals(exist, false);
+ assertFalse(exist);
table.put(put);
exist = table.exists(get);
- assertEquals(exist, true);
+ assertTrue(exist);
}
@Test
@@ -589,12 +591,12 @@ public class TestFromClientSide3 {
Get get = new Get(ROW);
boolean exist = table.exists(get);
- assertEquals(exist, false);
+ assertFalse(exist);
table.put(put);
exist = table.exists(get);
- assertEquals(exist, true);
+ assertTrue(exist);
}
@Test
@@ -614,10 +616,10 @@ public class TestFromClientSide3 {
LOG.info("Calling exists");
boolean[] results = table.existsAll(gets);
- assertEquals(results[0], false);
- assertEquals(results[1], false);
- assertEquals(results[2], true);
- assertEquals(results[3], false);
+ assertFalse(results[0]);
+ assertFalse(results[1]);
+ assertTrue(results[2]);
+ assertFalse(results[3]);
// Test with the first region.
put = new Put(new byte[] { 0x00 });
@@ -628,8 +630,8 @@ public class TestFromClientSide3 {
gets.add(new Get(new byte[] { 0x00 }));
gets.add(new Get(new byte[] { 0x00, 0x00 }));
results = table.existsAll(gets);
- assertEquals(results[0], true);
- assertEquals(results[1], false);
+ assertTrue(results[0]);
+ assertFalse(results[1]);
// Test with the last region
put = new Put(new byte[] { (byte) 0xff, (byte) 0xff });
@@ -641,9 +643,9 @@ public class TestFromClientSide3 {
gets.add(new Get(new byte[] { (byte) 0xff, (byte) 0xff }));
gets.add(new Get(new byte[] { (byte) 0xff, (byte) 0xff, (byte) 0xff }));
results = table.existsAll(gets);
- assertEquals(results[0], false);
- assertEquals(results[1], true);
- assertEquals(results[2], false);
+ assertFalse(results[0]);
+ assertTrue(results[1]);
+ assertFalse(results[2]);
}
@Test
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
index 093400e..3f618a1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
@@ -279,16 +279,19 @@ public class TestMetaCache {
private int expCount = -1;
private List<Throwable> metaCachePreservingExceptions = metaCachePreservingExceptions();
+ @Override
public void throwOnGet(FakeRSRpcServices rpcServices, ClientProtos.GetRequest request)
throws ServiceException {
throwSomeExceptions(rpcServices, request.getRegion());
}
+ @Override
public void throwOnMutate(FakeRSRpcServices rpcServices, ClientProtos.MutateRequest request)
throws ServiceException {
throwSomeExceptions(rpcServices, request.getRegion());
}
+ @Override
public void throwOnScan(FakeRSRpcServices rpcServices, ClientProtos.ScanRequest request)
throws ServiceException {
if (!request.hasScannerId()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
index ee39a83..d73ebc0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
@@ -180,7 +180,7 @@ public class TestMultipleTimestamps {
Integer[] scanRows = new Integer[] {5, 7};
Integer[] scanColumns = new Integer[] {3, 4, 5};
- Long[] scanTimestamps = new Long[] {2l, 3L};
+ Long[] scanTimestamps = new Long[] { 2L, 3L};
int scanMaxVersions = 2;
put(ht, FAMILY, putRows, putColumns, putTimestamps);
@@ -241,7 +241,7 @@ public class TestMultipleTimestamps {
Integer[] scanRows = new Integer[] {3, 5, 7};
Integer[] scanColumns = new Integer[] {3, 4, 5};
- Long[] scanTimestamps = new Long[] {2l, 4L};
+ Long[] scanTimestamps = new Long[] { 2L, 4L};
int scanMaxVersions = 5;
put(ht, FAMILY, putRows1, putColumns1, putTimestamps1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
index a06055d..f5ea10e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
@@ -261,9 +261,9 @@ public class TestReplicasClient {
AdminProtos.OpenRegionRequest orr = RequestConverter.buildOpenRegionRequest(
getRS().getServerName(), hri, null);
AdminProtos.OpenRegionResponse responseOpen = getRS().getRSRpcServices().openRegion(null, orr);
- Assert.assertEquals(responseOpen.getOpeningStateCount(), 1);
- Assert.assertEquals(responseOpen.getOpeningState(0),
- AdminProtos.OpenRegionResponse.RegionOpeningState.OPENED);
+ Assert.assertEquals(1, responseOpen.getOpeningStateCount());
+ Assert.assertEquals(AdminProtos.OpenRegionResponse.RegionOpeningState.OPENED,
+ responseOpen.getOpeningState(0));
checkRegionIsOpened(hri);
}
@@ -583,8 +583,8 @@ public class TestReplicasClient {
r = table.get(g);
Assert.assertFalse(r.isStale());
Assert.assertFalse(r.getColumnCells(f, b1).isEmpty());
- Assert.assertEquals(hedgedReadOps.getCount(), 1);
- Assert.assertEquals(hedgedReadWin.getCount(), 0);
+ Assert.assertEquals(1, hedgedReadOps.getCount());
+ Assert.assertEquals(0, hedgedReadWin.getCount());
SlowMeCopro.sleepTime.set(0);
SlowMeCopro.getSecondaryCdl().get().countDown();
LOG.info("hedged read occurred but not faster");
@@ -597,8 +597,8 @@ public class TestReplicasClient {
r = table.get(g);
Assert.assertTrue(r.isStale());
Assert.assertTrue(r.getColumnCells(f, b1).isEmpty());
- Assert.assertEquals(hedgedReadOps.getCount(), 2);
- Assert.assertEquals(hedgedReadWin.getCount(), 1);
+ Assert.assertEquals(2, hedgedReadOps.getCount());
+ Assert.assertEquals(1, hedgedReadWin.getCount());
SlowMeCopro.getPrimaryCdl().get().countDown();
LOG.info("hedged read occurred and faster");
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java
index 3190fb9..fe1cc9c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java
@@ -205,7 +205,7 @@ public class TestRestoreSnapshotFromClient {
HTableDescriptor htd = admin.getTableDescriptor(tableName);
assertEquals(2, htd.getFamilies().size());
SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 500, TEST_FAMILY2);
- long snapshot2Rows = snapshot1Rows + 500;
+ long snapshot2Rows = snapshot1Rows + 500L;
assertEquals(snapshot2Rows, countRows(table));
assertEquals(500, countRows(table, TEST_FAMILY2));
Set<String> fsFamilies = getFamiliesFromFS(tableName);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java
index 3335298..b494895 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java
@@ -194,7 +194,7 @@ public class TestResult extends TestCase {
loadValueBuffer.clear();
r.loadValue(family, qf, loadValueBuffer);
loadValueBuffer.flip();
- assertEquals(ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))), loadValueBuffer);
+ assertEquals(loadValueBuffer, ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))));
assertEquals(ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))),
r.getValueAsByteBuffer(family, qf));
}
@@ -219,7 +219,7 @@ public class TestResult extends TestCase {
loadValueBuffer.clear();
r.loadValue(family, qf, loadValueBuffer);
loadValueBuffer.flip();
- assertEquals(ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))), loadValueBuffer);
+ assertEquals(loadValueBuffer, ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))));
assertEquals(ByteBuffer.wrap(Bytes.add(value, Bytes.toBytes(i))),
r.getValueAsByteBuffer(family, qf));
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestServerBusyException.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestServerBusyException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestServerBusyException.java
index b1126e5..387253c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestServerBusyException.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestServerBusyException.java
@@ -142,7 +142,7 @@ public class TestServerBusyException {
TEST_UTIL.shutdownMiniCluster();
}
- private class TestPutThread extends Thread {
+ private static class TestPutThread extends Thread {
Table table;
int getServerBusyException = 0;
@@ -163,7 +163,7 @@ public class TestServerBusyException {
}
}
- private class TestGetThread extends Thread {
+ private static class TestGetThread extends Thread {
Table table;
int getServerBusyException = 0;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java
index 84bfa76..41e3034 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java
@@ -160,8 +160,8 @@ public class TestSizeFailures {
* @return An entry where the first item is rows observed and the second is entries observed.
*/
private Entry<Long,Long> sumTable(ResultScanner scanner) {
- long rowsObserved = 0l;
- long entriesObserved = 0l;
+ long rowsObserved = 0L;
+ long entriesObserved = 0L;
// Read all the records in the table
for (Result result : scanner) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java
index b050397..4de6a76 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java
@@ -105,7 +105,7 @@ public class TestSmallReversedScanner {
Assert.assertArrayEquals(r.getRow(), Bytes.toBytes(inputRowKeys[value]));
}
- Assert.assertEquals(value, 0);
+ Assert.assertEquals(0, value);
}
/**
@@ -133,6 +133,6 @@ public class TestSmallReversedScanner {
Assert.assertArrayEquals(r.getRow(), new byte[] { (char) 0x00 });
Assert.assertTrue(--count >= 0);
}
- Assert.assertEquals(count, 0);
+ Assert.assertEquals(0, count);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
index 1127a5e..b077d64 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
@@ -185,7 +185,7 @@ public class TestSnapshotFromClient {
admin.deleteSnapshots(Pattern.compile("TableSnapshot.*"));
List<SnapshotDescription> snapshots = admin.listSnapshots();
assertEquals(1, snapshots.size());
- assertEquals(snapshots.get(0).getName(), snapshot3);
+ assertEquals(snapshot3, snapshots.get(0).getName());
admin.deleteSnapshot(snapshot3);
admin.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
index 49c6560..fa3715d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
@@ -212,8 +212,8 @@ public class TestSnapshotMetadata {
cloneHtd.getValues().size());
assertEquals(originalTableDescriptor.getConfiguration().size(),
cloneHtd.getConfiguration().size());
- assertEquals(cloneHtd.getValue(TEST_CUSTOM_VALUE), TEST_CUSTOM_VALUE);
- assertEquals(cloneHtd.getConfigurationValue(TEST_CONF_CUSTOM_VALUE), TEST_CONF_CUSTOM_VALUE);
+ assertEquals(TEST_CUSTOM_VALUE, cloneHtd.getValue(TEST_CUSTOM_VALUE));
+ assertEquals(TEST_CONF_CUSTOM_VALUE, cloneHtd.getConfigurationValue(TEST_CONF_CUSTOM_VALUE));
assertEquals(originalTableDescriptor.getValues(), cloneHtd.getValues());
assertEquals(originalTableDescriptor.getConfiguration(), cloneHtd.getConfiguration());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java
index df2a6e8..9e543dc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java
@@ -177,7 +177,7 @@ public class TestSnapshotWithAcl extends SecureTestUtil {
byte[] value = result.getValue(TEST_FAMILY, TEST_QUALIFIER);
Assert.assertArrayEquals(value, Bytes.toBytes(rowCount++));
}
- Assert.assertEquals(rowCount, ROW_COUNT);
+ Assert.assertEquals(ROW_COUNT, rowCount);
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
index 678134b..d239eb8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
@@ -80,13 +80,13 @@ public class TestSplitOrMergeStatus {
Admin admin = TEST_UTIL.getAdmin();
initSwitchStatus(admin);
boolean[] results = admin.setSplitOrMergeEnabled(false, false, MasterSwitchType.SPLIT);
- assertEquals(results.length, 1);
+ assertEquals(1, results.length);
assertTrue(results[0]);
admin.split(t.getName());
int count = admin.getTableRegions(tableName).size();
assertTrue(originalCount == count);
results = admin.setSplitOrMergeEnabled(true, false, MasterSwitchType.SPLIT);
- assertEquals(results.length, 1);
+ assertEquals(1, results.length);
assertFalse(results[0]);
admin.split(t.getName());
while ((count = admin.getTableRegions(tableName).size()) == originalCount) {
@@ -117,7 +117,7 @@ public class TestSplitOrMergeStatus {
// Merge switch is off so merge should NOT succeed.
boolean[] results = admin.setSplitOrMergeEnabled(false, false, MasterSwitchType.MERGE);
- assertEquals(results.length, 1);
+ assertEquals(1, results.length);
assertTrue(results[0]);
List<HRegionInfo> regions = admin.getTableRegions(t.getName());
assertTrue(regions.size() > 1);
@@ -134,7 +134,7 @@ public class TestSplitOrMergeStatus {
results = admin.setSplitOrMergeEnabled(true, false, MasterSwitchType.MERGE);
regions = admin.getTableRegions(t.getName());
- assertEquals(results.length, 1);
+ assertEquals(1, results.length);
assertFalse(results[0]);
f = admin.mergeRegionsAsync(regions.get(0).getEncodedNameAsBytes(),
regions.get(1).getEncodedNameAsBytes(), true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
index 89af5de..ea60ec2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
@@ -181,15 +181,15 @@ public class TestTimestampsFilter {
Table ht = TEST_UTIL.createTable(TableName.valueOf(TABLE), FAMILIES, Integer.MAX_VALUE);
Put p = new Put(Bytes.toBytes("row"));
- p.addColumn(FAMILY, Bytes.toBytes("column0"), (long) 3, Bytes.toBytes("value0-3"));
- p.addColumn(FAMILY, Bytes.toBytes("column1"), (long) 3, Bytes.toBytes("value1-3"));
- p.addColumn(FAMILY, Bytes.toBytes("column2"), (long) 1, Bytes.toBytes("value2-1"));
- p.addColumn(FAMILY, Bytes.toBytes("column2"), (long) 2, Bytes.toBytes("value2-2"));
- p.addColumn(FAMILY, Bytes.toBytes("column2"), (long) 3, Bytes.toBytes("value2-3"));
- p.addColumn(FAMILY, Bytes.toBytes("column3"), (long) 2, Bytes.toBytes("value3-2"));
- p.addColumn(FAMILY, Bytes.toBytes("column4"), (long) 1, Bytes.toBytes("value4-1"));
- p.addColumn(FAMILY, Bytes.toBytes("column4"), (long) 2, Bytes.toBytes("value4-2"));
- p.addColumn(FAMILY, Bytes.toBytes("column4"), (long) 3, Bytes.toBytes("value4-3"));
+ p.addColumn(FAMILY, Bytes.toBytes("column0"), 3L, Bytes.toBytes("value0-3"));
+ p.addColumn(FAMILY, Bytes.toBytes("column1"), 3L, Bytes.toBytes("value1-3"));
+ p.addColumn(FAMILY, Bytes.toBytes("column2"), 1L, Bytes.toBytes("value2-1"));
+ p.addColumn(FAMILY, Bytes.toBytes("column2"), 2L, Bytes.toBytes("value2-2"));
+ p.addColumn(FAMILY, Bytes.toBytes("column2"), 3L, Bytes.toBytes("value2-3"));
+ p.addColumn(FAMILY, Bytes.toBytes("column3"), 2L, Bytes.toBytes("value3-2"));
+ p.addColumn(FAMILY, Bytes.toBytes("column4"), 1L, Bytes.toBytes("value4-1"));
+ p.addColumn(FAMILY, Bytes.toBytes("column4"), 2L, Bytes.toBytes("value4-2"));
+ p.addColumn(FAMILY, Bytes.toBytes("column4"), 3L, Bytes.toBytes("value4-3"));
ht.put(p);
ArrayList<Long> timestamps = new ArrayList<>();
@@ -209,7 +209,7 @@ public class TestTimestampsFilter {
+ Bytes.toString(CellUtil.cloneValue(kv)));
}
- assertEquals(result.listCells().size(), 2);
+ assertEquals(2, result.listCells().size());
assertTrue(CellUtil.matchingValue(result.listCells().get(0), Bytes.toBytes("value2-3")));
assertTrue(CellUtil.matchingValue(result.listCells().get(1), Bytes.toBytes("value4-3")));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java
index 6511a42..3f4029a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java
@@ -70,7 +70,7 @@ public class TestUpdateConfiguration {
admin.updateConfiguration(server);
Configuration conf = TEST_UTIL.getMiniHBaseCluster().getMaster().getConfiguration();
int custom = conf.getInt("hbase.custom.config", 0);
- assertEquals(custom, 1000);
+ assertEquals(1000, custom);
// restore hbase-site.xml
Files.copy(cnf3Path, cnfPath, StandardCopyOption.REPLACE_EXISTING);
}
@@ -97,17 +97,20 @@ public class TestUpdateConfiguration {
admin.updateConfiguration();
// Check the configuration of the Masters
- Configuration masterConfiguration = TEST_UTIL.getMiniHBaseCluster().getMaster(0).getConfiguration();
+ Configuration masterConfiguration =
+ TEST_UTIL.getMiniHBaseCluster().getMaster(0).getConfiguration();
int custom = masterConfiguration.getInt("hbase.custom.config", 0);
- assertEquals(custom, 1000);
- Configuration backupMasterConfiguration = TEST_UTIL.getMiniHBaseCluster().getMaster(1).getConfiguration();
+ assertEquals(1000, custom);
+ Configuration backupMasterConfiguration =
+ TEST_UTIL.getMiniHBaseCluster().getMaster(1).getConfiguration();
custom = backupMasterConfiguration.getInt("hbase.custom.config", 0);
- assertEquals(custom, 1000);
+ assertEquals(1000, custom);
// Check the configuration of the RegionServer
- Configuration regionServerConfiguration = TEST_UTIL.getMiniHBaseCluster().getRegionServer(0).getConfiguration();
+ Configuration regionServerConfiguration =
+ TEST_UTIL.getMiniHBaseCluster().getRegionServer(0).getConfiguration();
custom = regionServerConfiguration.getInt("hbase.custom.config", 0);
- assertEquals(custom, 1000);
+ assertEquals(1000, custom);
// restore hbase-site.xml
Files.copy(cnf3Path, cnfPath, StandardCopyOption.REPLACE_EXISTING);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java
index 3b7fd84..82eda2a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java
@@ -88,7 +88,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase {
admin1.disableTableReplication(tableName);
table = admin1.getTableDescriptor(tableName);
for (HColumnDescriptor fam : table.getColumnFamilies()) {
- assertEquals(fam.getScope(), HConstants.REPLICATION_SCOPE_LOCAL);
+ assertEquals(HConstants.REPLICATION_SCOPE_LOCAL, fam.getScope());
}
}
@@ -119,7 +119,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase {
admin1.enableTableReplication(tableName);
table = admin1.getTableDescriptor(tableName);
for (HColumnDescriptor fam : table.getColumnFamilies()) {
- assertEquals(fam.getScope(), HConstants.REPLICATION_SCOPE_GLOBAL);
+ assertEquals(HConstants.REPLICATION_SCOPE_GLOBAL, fam.getScope());
}
}
@@ -144,7 +144,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase {
admin1.enableTableReplication(tableName);
table = admin1.getTableDescriptor(tableName);
for (HColumnDescriptor fam : table.getColumnFamilies()) {
- assertEquals(fam.getScope(), HConstants.REPLICATION_SCOPE_GLOBAL);
+ assertEquals(HConstants.REPLICATION_SCOPE_GLOBAL, fam.getScope());
}
}
@@ -153,12 +153,12 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase {
admin1.disableTableReplication(tableName);
HTableDescriptor table = admin1.getTableDescriptor(tableName);
for (HColumnDescriptor fam : table.getColumnFamilies()) {
- assertEquals(fam.getScope(), HConstants.REPLICATION_SCOPE_LOCAL);
+ assertEquals(HConstants.REPLICATION_SCOPE_LOCAL, fam.getScope());
}
admin1.enableTableReplication(tableName);
table = admin1.getTableDescriptor(tableName);
for (HColumnDescriptor fam : table.getColumnFamilies()) {
- assertEquals(fam.getScope(), HConstants.REPLICATION_SCOPE_GLOBAL);
+ assertEquals(HConstants.REPLICATION_SCOPE_GLOBAL, fam.getScope());
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java
index 1f8dbc4..06e1a35 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java
@@ -42,6 +42,7 @@ public class TestConfigurationManager {
register();
}
+ @Override
public void onConfigurationChange(Configuration conf) {
notifiedOnChange = true;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
index 35bcd77..d7c70da 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
@@ -85,7 +85,7 @@ public class TestConstraint {
try {
// test that we don't fail on a valid put
Put put = new Put(row1);
- byte[] value = Integer.toString(10).getBytes();
+ byte[] value = Bytes.toBytes(Integer.toString(10));
byte[] qualifier = new byte[0];
put.addColumn(dummy, qualifier, value);
table.put(put);
@@ -119,7 +119,7 @@ public class TestConstraint {
// test that we do fail on violation
Put put = new Put(row1);
byte[] qualifier = new byte[0];
- put.addColumn(dummy, qualifier, "fail".getBytes());
+ put.addColumn(dummy, qualifier, Bytes.toBytes("fail"));
LOG.warn("Doing put in table");
try {
table.put(put);
@@ -158,7 +158,7 @@ public class TestConstraint {
// test that we don't fail because its disabled
Put put = new Put(row1);
byte[] qualifier = new byte[0];
- put.addColumn(dummy, qualifier, "pass".getBytes());
+ put.addColumn(dummy, qualifier, Bytes.toBytes("pass"));
table.put(put);
} finally {
table.close();
@@ -191,7 +191,7 @@ public class TestConstraint {
// test that we do fail on violation
Put put = new Put(row1);
byte[] qualifier = new byte[0];
- put.addColumn(dummy, qualifier, "pass".getBytes());
+ put.addColumn(dummy, qualifier, Bytes.toBytes("pass"));
LOG.warn("Doing put in table");
table.put(put);
} finally {
@@ -224,7 +224,7 @@ public class TestConstraint {
// test that we do fail on violation
Put put = new Put(row1);
byte[] qualifier = new byte[0];
- put.addColumn(dummy, qualifier, "pass".getBytes());
+ put.addColumn(dummy, qualifier, Bytes.toBytes("pass"));
try{
table.put(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java
index f3d90f6..1da31da 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java
@@ -123,7 +123,8 @@ public class SampleRegionWALCoprocessor implements WALCoprocessor, RegionCoproce
if (Arrays.equals(family, changedFamily) &&
Arrays.equals(qulifier, changedQualifier)) {
LOG.debug("Found the KeyValue from WALEdit which should be changed.");
- cell.getValueArray()[cell.getValueOffset()] += 1;
+ cell.getValueArray()[cell.getValueOffset()] =
+ (byte) (cell.getValueArray()[cell.getValueOffset()] + 1);
}
}
if (null != row) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
index 1394dbd..0d864b6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
@@ -558,7 +558,7 @@ public class SimpleRegionObserver implements RegionCoprocessor, RegionObserver {
TestRegionObserverInterface.TEST_TABLE)) {
assertNotNull(familyPaths);
assertEquals(1,familyPaths.size());
- assertArrayEquals(familyPaths.get(0).getFirst(), TestRegionObserverInterface.A);
+ assertArrayEquals(TestRegionObserverInterface.A, familyPaths.get(0).getFirst());
String familyPath = familyPaths.get(0).getSecond();
String familyName = Bytes.toString(TestRegionObserverInterface.A);
assertEquals(familyPath.substring(familyPath.length()-familyName.length()-1),"/"+familyName);
@@ -577,7 +577,7 @@ public class SimpleRegionObserver implements RegionCoprocessor, RegionObserver {
TestRegionObserverInterface.TEST_TABLE)) {
assertNotNull(familyPaths);
assertEquals(1,familyPaths.size());
- assertArrayEquals(familyPaths.get(0).getFirst(), TestRegionObserverInterface.A);
+ assertArrayEquals(TestRegionObserverInterface.A, familyPaths.get(0).getFirst());
String familyPath = familyPaths.get(0).getSecond();
String familyName = Bytes.toString(TestRegionObserverInterface.A);
assertEquals(familyPath.substring(familyPath.length()-familyName.length()-1),"/"+familyName);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
index 27865f7..6e6c34b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorConfiguration.java
@@ -108,12 +108,10 @@ public class TestCoprocessorConfiguration {
tableCoprocessorLoaded.set(false);
new RegionCoprocessorHost(region, rsServices, conf);
assertEquals("System coprocessors loading default was not honored",
- systemCoprocessorLoaded.get(),
- CoprocessorHost.DEFAULT_COPROCESSORS_ENABLED);
+ CoprocessorHost.DEFAULT_COPROCESSORS_ENABLED, systemCoprocessorLoaded.get());
assertEquals("Table coprocessors loading default was not honored",
- tableCoprocessorLoaded.get(),
- CoprocessorHost.DEFAULT_COPROCESSORS_ENABLED &&
- CoprocessorHost.DEFAULT_USER_COPROCESSORS_ENABLED);
+ CoprocessorHost.DEFAULT_COPROCESSORS_ENABLED &&
+ CoprocessorHost.DEFAULT_USER_COPROCESSORS_ENABLED, tableCoprocessorLoaded.get());
}
@Test
@@ -123,8 +121,7 @@ public class TestCoprocessorConfiguration {
systemCoprocessorLoaded.set(false);
new RegionServerCoprocessorHost(rsServices, conf);
assertEquals("System coprocessors loading default was not honored",
- systemCoprocessorLoaded.get(),
- CoprocessorHost.DEFAULT_COPROCESSORS_ENABLED);
+ CoprocessorHost.DEFAULT_COPROCESSORS_ENABLED, systemCoprocessorLoaded.get());
}
@Test
@@ -134,8 +131,7 @@ public class TestCoprocessorConfiguration {
systemCoprocessorLoaded.set(false);
new MasterCoprocessorHost(masterServices, conf);
assertEquals("System coprocessors loading default was not honored",
- systemCoprocessorLoaded.get(),
- CoprocessorHost.DEFAULT_COPROCESSORS_ENABLED);
+ CoprocessorHost.DEFAULT_COPROCESSORS_ENABLED, systemCoprocessorLoaded.get());
}
@Test
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorHost.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorHost.java
index 33a488f..de697f1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorHost.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorHost.java
@@ -34,7 +34,7 @@ public class TestCoprocessorHost {
/**
* An {@link Abortable} implementation for tests.
*/
- private class TestAbortable implements Abortable {
+ private static class TestAbortable implements Abortable {
private volatile boolean aborted = false;
@Override
[5/9] hbase git commit: HBASE-19811 Fix findbugs and error-prone
warnings in hbase-server (branch-2)
Posted by st...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
index 2a35365..bd0efd8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
@@ -93,7 +93,7 @@ public class TestMasterCoprocessorExceptionWithAbort {
fail("BuggyMasterObserver failed to throw an exception.");
} catch (IOException e) {
assertEquals("HBaseAdmin threw an interrupted IOException as expected.",
- e.getClass().getName(), "java.io.InterruptedIOException");
+ "java.io.InterruptedIOException", e.getClass().getName());
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
index 431e73e..ea817ff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
@@ -932,6 +932,7 @@ public class TestMasterObserver {
return preModifyTableActionCalled && !postCompletedModifyTableActionCalled;
}
+ @Override
public void preEnableTableAction(
final ObserverContext<MasterCoprocessorEnvironment> ctx, final TableName tableName)
throws IOException {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
index 166dfdd..09aa4ff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
@@ -341,6 +341,7 @@ public class TestWALObserver {
User user = HBaseTestingUtility.getDifferentUser(newConf,
".replay.wal.secondtime");
user.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
Path p = runWALSplit(newConf);
LOG.info("WALSplit path == " + p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
index b0b17f9..a3f2f1c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
@@ -145,7 +145,7 @@ public class TestExecutorService {
}
public static class TestEventHandler extends EventHandler {
- private AtomicBoolean lock;
+ private final AtomicBoolean lock;
private AtomicInteger counter;
public TestEventHandler(Server server, EventType eventType,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
index c31eebf..0e5fdb2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
@@ -103,13 +103,13 @@ public class TestBitComparator {
private void testOperation(byte[] data, byte[] comparatorBytes, BitComparator.BitwiseOp operator,
int expected) {
BitComparator comparator = new BitComparator(comparatorBytes, operator);
- assertEquals(comparator.compareTo(data), expected);
+ assertEquals(expected, comparator.compareTo(data));
}
private void testOperation(ByteBuffer data, byte[] comparatorBytes,
BitComparator.BitwiseOp operator, int expected) {
BitComparator comparator = new BitComparator(comparatorBytes, operator);
- assertEquals(comparator.compareTo(data, 0, data.capacity()), expected);
+ assertEquals(expected, comparator.compareTo(data, 0, data.capacity()));
}
@Test
@@ -142,13 +142,13 @@ public class TestBitComparator {
private void testOperationWithOffset(byte[] data, byte[] comparatorBytes,
BitComparator.BitwiseOp operator, int expected) {
BitComparator comparator = new BitComparator(comparatorBytes, operator);
- assertEquals(comparator.compareTo(data, 1, comparatorBytes.length), expected);
+ assertEquals(expected, comparator.compareTo(data, 1, comparatorBytes.length));
}
private void testOperationWithOffset(ByteBuffer data, byte[] comparatorBytes,
BitComparator.BitwiseOp operator, int expected) {
BitComparator comparator = new BitComparator(comparatorBytes, operator);
- assertEquals(comparator.compareTo(data, 1, comparatorBytes.length), expected);
+ assertEquals(expected, comparator.compareTo(data, 1, comparatorBytes.length));
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index ca2c88b..ec11ce0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -136,8 +136,8 @@ public class TestFilter {
Bytes.toBytes("f1"), Bytes.toBytes("f2")
};
- private long numRows = ROWS_ONE.length + ROWS_TWO.length;
- private long colsPerRow = FAMILIES.length * QUALIFIERS_ONE.length;
+ private long numRows = (long) ROWS_ONE.length + ROWS_TWO.length;
+ private long colsPerRow = (long) FAMILIES.length * QUALIFIERS_ONE.length;
@Before
public void setUp() throws Exception {
@@ -1756,15 +1756,14 @@ public class TestFilter {
assertTrue("Qualifier mismatch", CellUtil.matchingQualifier(kv, kvs[idx]));
assertFalse("Should not have returned whole value", CellUtil.matchingValue(kv, kvs[idx]));
if (useLen) {
- assertEquals("Value in result is not SIZEOF_INT",
- kv.getValueLength(), Bytes.SIZEOF_INT);
+ assertEquals("Value in result is not SIZEOF_INT", Bytes.SIZEOF_INT, kv.getValueLength());
LOG.info("idx = " + idx + ", len=" + kvs[idx].getValueLength()
+ ", actual=" + Bytes.toInt(CellUtil.cloneValue(kv)));
assertEquals("Scan value should be the length of the actual value. ",
kvs[idx].getValueLength(), Bytes.toInt(CellUtil.cloneValue(kv)) );
LOG.info("good");
} else {
- assertEquals("Value in result is not empty", kv.getValueLength(), 0);
+ assertEquals("Value in result is not empty", 0, kv.getValueLength());
}
idx++;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
index fdd7e77..ad5ee99 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java
@@ -136,6 +136,7 @@ public class TestFilterFromRegionSide {
public static class FirstSeveralCellsFilter extends FilterBase{
private int count = 0;
+ @Override
public void reset() {
count = 0;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 6574d04..2d2a425 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -768,7 +768,7 @@ public class TestFilterList {
MockFilter filter5 = new MockFilter(ReturnCode.SKIP);
MockFilter filter6 = new MockFilter(ReturnCode.SEEK_NEXT_USING_HINT);
FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, filter1, filter2);
- assertEquals(filterList.filterCell(kv1), ReturnCode.INCLUDE);
+ assertEquals(ReturnCode.INCLUDE, filterList.filterCell(kv1));
filterList = new FilterList(Operator.MUST_PASS_ONE, filter2, filter3);
assertEquals(ReturnCode.INCLUDE_AND_NEXT_COL, filterList.filterCell(kv1));
@@ -936,6 +936,7 @@ public class TestFilterList {
private static class MockNextRowFilter extends FilterBase {
private int hitCount = 0;
+ @Override
public ReturnCode filterCell(final Cell v) throws IOException {
hitCount++;
return ReturnCode.NEXT_ROW;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
index 8fa41e3..d470fac 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
@@ -301,8 +301,8 @@ public class TestFilterSerialization {
// Non-empty timestamp list
LinkedList<Long> list = new LinkedList<>();
- list.add(new Long(System.currentTimeMillis()));
- list.add(new Long(System.currentTimeMillis()));
+ list.add(System.currentTimeMillis());
+ list.add(System.currentTimeMillis());
timestampsFilter = new TimestampsFilter(list);
assertTrue(timestampsFilter.areSerializedFieldsEqual(
ProtobufUtil.toFilter(ProtobufUtil.toFilter(timestampsFilter))));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
index 5f25b49..25ea358 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
@@ -86,7 +86,7 @@ public class TestFuzzyRowFilterEndToEnd {
conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
ConstantSizeRegionSplitPolicy.class.getName());
// set no splits
- conf.setLong(HConstants.HREGION_MAX_FILESIZE, ((long) 1024) * 1024 * 1024 * 10);
+ conf.setLong(HConstants.HREGION_MAX_FILESIZE, (1024L) * 1024 * 1024 * 10);
TEST_UTIL.startMiniCluster();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
index ebccc34..768ab7a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
@@ -162,20 +162,24 @@ public class TestInvocationRecordFilter {
private List<Cell> visitedKeyValues = new ArrayList<>();
+ @Override
public void reset() {
visitedKeyValues.clear();
}
+ @Override
public ReturnCode filterCell(final Cell ignored) {
visitedKeyValues.add(ignored);
return ReturnCode.INCLUDE;
}
+ @Override
public void filterRowCells(List<Cell> kvs) {
kvs.clear();
kvs.addAll(visitedKeyValues);
}
+ @Override
public boolean hasFilterRow() {
return true;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
index bcd239d..c5200f9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
@@ -18,9 +18,12 @@
*/
package org.apache.hadoop.hbase.filter;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
@@ -89,7 +92,7 @@ public class TestParseFilter {
String filterString = " PrefixFilter('row' ) ";
PrefixFilter prefixFilter = doTestFilter(filterString, PrefixFilter.class);
byte [] prefix = prefixFilter.getPrefix();
- assertEquals(new String(prefix), "row");
+ assertEquals("row", new String(prefix, StandardCharsets.UTF_8));
filterString = " PrefixFilter(row)";
@@ -107,7 +110,7 @@ public class TestParseFilter {
ColumnPrefixFilter columnPrefixFilter =
doTestFilter(filterString, ColumnPrefixFilter.class);
byte [] columnPrefix = columnPrefixFilter.getPrefix();
- assertEquals(new String(columnPrefix), "qualifier");
+ assertEquals("qualifier", new String(columnPrefix, StandardCharsets.UTF_8));
}
@Test
@@ -116,8 +119,8 @@ public class TestParseFilter {
MultipleColumnPrefixFilter multipleColumnPrefixFilter =
doTestFilter(filterString, MultipleColumnPrefixFilter.class);
byte [][] prefixes = multipleColumnPrefixFilter.getPrefix();
- assertEquals(new String(prefixes[0]), "qualifier1");
- assertEquals(new String(prefixes[1]), "qualifier2");
+ assertEquals("qualifier1", new String(prefixes[0], StandardCharsets.UTF_8));
+ assertEquals("qualifier2", new String(prefixes[1], StandardCharsets.UTF_8));
}
@Test
@@ -126,7 +129,7 @@ public class TestParseFilter {
ColumnCountGetFilter columnCountGetFilter =
doTestFilter(filterString, ColumnCountGetFilter.class);
int limit = columnCountGetFilter.getLimit();
- assertEquals(limit, 4);
+ assertEquals(4, limit);
filterString = " ColumnCountGetFilter('abc')";
try {
@@ -151,7 +154,7 @@ public class TestParseFilter {
PageFilter pageFilter =
doTestFilter(filterString, PageFilter.class);
long pageSize = pageFilter.getPageSize();
- assertEquals(pageSize, 4);
+ assertEquals(4, pageSize);
filterString = " PageFilter('123')";
try {
@@ -168,9 +171,9 @@ public class TestParseFilter {
ColumnPaginationFilter columnPaginationFilter =
doTestFilter(filterString, ColumnPaginationFilter.class);
int limit = columnPaginationFilter.getLimit();
- assertEquals(limit, 4);
+ assertEquals(4, limit);
int offset = columnPaginationFilter.getOffset();
- assertEquals(offset, 6);
+ assertEquals(6, offset);
filterString = " ColumnPaginationFilter('124')";
try {
@@ -203,7 +206,7 @@ public class TestParseFilter {
InclusiveStopFilter inclusiveStopFilter =
doTestFilter(filterString, InclusiveStopFilter.class);
byte [] stopRowKey = inclusiveStopFilter.getStopRowKey();
- assertEquals(new String(stopRowKey), "row 3");
+ assertEquals("row 3", new String(stopRowKey, StandardCharsets.UTF_8));
}
@@ -213,13 +216,13 @@ public class TestParseFilter {
TimestampsFilter timestampsFilter =
doTestFilter(filterString, TimestampsFilter.class);
List<Long> timestamps = timestampsFilter.getTimestamps();
- assertEquals(timestamps.size(), 2);
- assertEquals(timestamps.get(0), new Long(6));
+ assertEquals(2, timestamps.size());
+ assertEquals(Long.valueOf(6), timestamps.get(0));
filterString = "TimestampsFilter()";
timestampsFilter = doTestFilter(filterString, TimestampsFilter.class);
timestamps = timestampsFilter.getTimestamps();
- assertEquals(timestamps.size(), 0);
+ assertEquals(0, timestamps.size());
filterString = "TimestampsFilter(9223372036854775808, 6)";
try {
@@ -246,7 +249,7 @@ public class TestParseFilter {
assertEquals(CompareOperator.EQUAL, rowFilter.getCompareOperator());
assertTrue(rowFilter.getComparator() instanceof BinaryComparator);
BinaryComparator binaryComparator = (BinaryComparator) rowFilter.getComparator();
- assertEquals("regionse", new String(binaryComparator.getValue()));
+ assertEquals("regionse", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
}
@Test
@@ -258,7 +261,7 @@ public class TestParseFilter {
assertTrue(familyFilter.getComparator() instanceof BinaryPrefixComparator);
BinaryPrefixComparator binaryPrefixComparator =
(BinaryPrefixComparator) familyFilter.getComparator();
- assertEquals("pre", new String(binaryPrefixComparator.getValue()));
+ assertEquals("pre", new String(binaryPrefixComparator.getValue(), StandardCharsets.UTF_8));
}
@Test
@@ -270,7 +273,7 @@ public class TestParseFilter {
assertTrue(qualifierFilter.getComparator() instanceof RegexStringComparator);
RegexStringComparator regexStringComparator =
(RegexStringComparator) qualifierFilter.getComparator();
- assertEquals("pre*", new String(regexStringComparator.getValue()));
+ assertEquals("pre*", new String(regexStringComparator.getValue(), StandardCharsets.UTF_8));
}
@Test
@@ -282,7 +285,7 @@ public class TestParseFilter {
assertTrue(valueFilter.getComparator() instanceof SubstringComparator);
SubstringComparator substringComparator =
(SubstringComparator) valueFilter.getComparator();
- assertEquals("pre", new String(substringComparator.getValue()));
+ assertEquals("pre", new String(substringComparator.getValue(), StandardCharsets.UTF_8));
}
@Test
@@ -290,8 +293,8 @@ public class TestParseFilter {
String filterString = "ColumnRangeFilter('abc', true, 'xyz', false)";
ColumnRangeFilter columnRangeFilter =
doTestFilter(filterString, ColumnRangeFilter.class);
- assertEquals("abc", new String(columnRangeFilter.getMinColumn()));
- assertEquals("xyz", new String(columnRangeFilter.getMaxColumn()));
+ assertEquals("abc", new String(columnRangeFilter.getMinColumn(), StandardCharsets.UTF_8));
+ assertEquals("xyz", new String(columnRangeFilter.getMaxColumn(), StandardCharsets.UTF_8));
assertTrue(columnRangeFilter.isMinColumnInclusive());
assertFalse(columnRangeFilter.isMaxColumnInclusive());
}
@@ -301,13 +304,14 @@ public class TestParseFilter {
String filterString = "DependentColumnFilter('family', 'qualifier', true, =, 'binary:abc')";
DependentColumnFilter dependentColumnFilter =
doTestFilter(filterString, DependentColumnFilter.class);
- assertEquals("family", new String(dependentColumnFilter.getFamily()));
- assertEquals("qualifier", new String(dependentColumnFilter.getQualifier()));
+ assertEquals("family", new String(dependentColumnFilter.getFamily(), StandardCharsets.UTF_8));
+ assertEquals("qualifier",
+ new String(dependentColumnFilter.getQualifier(), StandardCharsets.UTF_8));
assertTrue(dependentColumnFilter.getDropDependentColumn());
assertEquals(CompareOperator.EQUAL, dependentColumnFilter.getCompareOperator());
assertTrue(dependentColumnFilter.getComparator() instanceof BinaryComparator);
BinaryComparator binaryComparator = (BinaryComparator)dependentColumnFilter.getComparator();
- assertEquals("abc", new String(binaryComparator.getValue()));
+ assertEquals("abc", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
}
@Test
@@ -316,25 +320,27 @@ public class TestParseFilter {
"('family', 'qualifier', >=, 'binary:a', true, false)";
SingleColumnValueFilter singleColumnValueFilter =
doTestFilter(filterString, SingleColumnValueFilter.class);
- assertEquals("family", new String(singleColumnValueFilter.getFamily()));
- assertEquals("qualifier", new String(singleColumnValueFilter.getQualifier()));
- assertEquals(singleColumnValueFilter.getCompareOperator(), CompareOperator.GREATER_OR_EQUAL);
+ assertEquals("family", new String(singleColumnValueFilter.getFamily(), StandardCharsets.UTF_8));
+ assertEquals("qualifier",
+ new String(singleColumnValueFilter.getQualifier(), StandardCharsets.UTF_8));
+ assertEquals(CompareOperator.GREATER_OR_EQUAL, singleColumnValueFilter.getCompareOperator());
assertTrue(singleColumnValueFilter.getComparator() instanceof BinaryComparator);
BinaryComparator binaryComparator = (BinaryComparator) singleColumnValueFilter.getComparator();
- assertEquals(new String(binaryComparator.getValue()), "a");
+ assertEquals("a", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
assertTrue(singleColumnValueFilter.getFilterIfMissing());
assertFalse(singleColumnValueFilter.getLatestVersionOnly());
filterString = "SingleColumnValueFilter ('family', 'qualifier', >, 'binaryprefix:a')";
singleColumnValueFilter = doTestFilter(filterString, SingleColumnValueFilter.class);
- assertEquals("family", new String(singleColumnValueFilter.getFamily()));
- assertEquals("qualifier", new String(singleColumnValueFilter.getQualifier()));
- assertEquals(singleColumnValueFilter.getCompareOperator(), CompareOperator.GREATER);
+ assertEquals("family", new String(singleColumnValueFilter.getFamily(), StandardCharsets.UTF_8));
+ assertEquals("qualifier",
+ new String(singleColumnValueFilter.getQualifier(), StandardCharsets.UTF_8));
+ assertEquals(CompareOperator.GREATER, singleColumnValueFilter.getCompareOperator());
assertTrue(singleColumnValueFilter.getComparator() instanceof BinaryPrefixComparator);
BinaryPrefixComparator binaryPrefixComparator =
(BinaryPrefixComparator) singleColumnValueFilter.getComparator();
- assertEquals(new String(binaryPrefixComparator.getValue()), "a");
+ assertEquals("a", new String(binaryPrefixComparator.getValue(), StandardCharsets.UTF_8));
assertFalse(singleColumnValueFilter.getFilterIfMissing());
assertTrue(singleColumnValueFilter.getLatestVersionOnly());
}
@@ -345,10 +351,13 @@ public class TestParseFilter {
"SingleColumnValueExcludeFilter ('family', 'qualifier', <, 'binaryprefix:a')";
SingleColumnValueExcludeFilter singleColumnValueExcludeFilter =
doTestFilter(filterString, SingleColumnValueExcludeFilter.class);
- assertEquals(singleColumnValueExcludeFilter.getCompareOperator(), CompareOperator.LESS);
- assertEquals("family", new String(singleColumnValueExcludeFilter.getFamily()));
- assertEquals("qualifier", new String(singleColumnValueExcludeFilter.getQualifier()));
- assertEquals(new String(singleColumnValueExcludeFilter.getComparator().getValue()), "a");
+ assertEquals(CompareOperator.LESS, singleColumnValueExcludeFilter.getCompareOperator());
+ assertEquals("family",
+ new String(singleColumnValueExcludeFilter.getFamily(), StandardCharsets.UTF_8));
+ assertEquals("qualifier",
+ new String(singleColumnValueExcludeFilter.getQualifier(), StandardCharsets.UTF_8));
+ assertEquals("a", new String(singleColumnValueExcludeFilter.getComparator().getValue(),
+ StandardCharsets.UTF_8));
assertFalse(singleColumnValueExcludeFilter.getFilterIfMissing());
assertTrue(singleColumnValueExcludeFilter.getLatestVersionOnly());
@@ -356,14 +365,16 @@ public class TestParseFilter {
"('family', 'qualifier', <=, 'binaryprefix:a', true, false)";
singleColumnValueExcludeFilter =
doTestFilter(filterString, SingleColumnValueExcludeFilter.class);
- assertEquals("family", new String(singleColumnValueExcludeFilter.getFamily()));
- assertEquals("qualifier", new String(singleColumnValueExcludeFilter.getQualifier()));
- assertEquals(singleColumnValueExcludeFilter.getCompareOperator(),
- CompareOperator.LESS_OR_EQUAL);
+ assertEquals("family",
+ new String(singleColumnValueExcludeFilter.getFamily(), StandardCharsets.UTF_8));
+ assertEquals("qualifier",
+ new String(singleColumnValueExcludeFilter.getQualifier(), StandardCharsets.UTF_8));
+ assertEquals(CompareOperator.LESS_OR_EQUAL,
+ singleColumnValueExcludeFilter.getCompareOperator());
assertTrue(singleColumnValueExcludeFilter.getComparator() instanceof BinaryPrefixComparator);
BinaryPrefixComparator binaryPrefixComparator =
(BinaryPrefixComparator) singleColumnValueExcludeFilter.getComparator();
- assertEquals(new String(binaryPrefixComparator.getValue()), "a");
+ assertEquals("a", new String(binaryPrefixComparator.getValue(), StandardCharsets.UTF_8));
assertTrue(singleColumnValueExcludeFilter.getFilterIfMissing());
assertFalse(singleColumnValueExcludeFilter.getLatestVersionOnly());
}
@@ -379,7 +390,7 @@ public class TestParseFilter {
assertEquals(CompareOperator.EQUAL, valueFilter.getCompareOperator());
assertTrue(valueFilter.getComparator() instanceof BinaryComparator);
BinaryComparator binaryComparator = (BinaryComparator) valueFilter.getComparator();
- assertEquals("0", new String(binaryComparator.getValue()));
+ assertEquals("0", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
}
@Test
@@ -393,7 +404,7 @@ public class TestParseFilter {
assertEquals(CompareOperator.NOT_EQUAL, rowFilter.getCompareOperator());
assertTrue(rowFilter.getComparator() instanceof BinaryComparator);
BinaryComparator binaryComparator = (BinaryComparator) rowFilter.getComparator();
- assertEquals("row1", new String(binaryComparator.getValue()));
+ assertEquals("row1", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
}
@Test
@@ -407,7 +418,7 @@ public class TestParseFilter {
assertTrue(filters.get(1) instanceof FirstKeyOnlyFilter);
PrefixFilter PrefixFilter = (PrefixFilter) filters.get(0);
byte [] prefix = PrefixFilter.getPrefix();
- assertEquals(new String(prefix), "realtime");
+ assertEquals("realtime", new String(prefix, StandardCharsets.UTF_8));
FirstKeyOnlyFilter firstKeyOnlyFilter = (FirstKeyOnlyFilter) filters.get(1);
}
@@ -420,7 +431,7 @@ public class TestParseFilter {
ArrayList<Filter> filterListFilters = (ArrayList<Filter>) filterList.getFilters();
assertTrue(filterListFilters.get(0) instanceof FilterList);
assertTrue(filterListFilters.get(1) instanceof FamilyFilter);
- assertEquals(filterList.getOperator(), FilterList.Operator.MUST_PASS_ONE);
+ assertEquals(FilterList.Operator.MUST_PASS_ONE, filterList.getOperator());
filterList = (FilterList) filterListFilters.get(0);
FamilyFilter familyFilter = (FamilyFilter) filterListFilters.get(1);
@@ -428,22 +439,22 @@ public class TestParseFilter {
filterListFilters = (ArrayList<Filter>)filterList.getFilters();
assertTrue(filterListFilters.get(0) instanceof PrefixFilter);
assertTrue(filterListFilters.get(1) instanceof QualifierFilter);
- assertEquals(filterList.getOperator(), FilterList.Operator.MUST_PASS_ALL);
+ assertEquals(FilterList.Operator.MUST_PASS_ALL, filterList.getOperator());
assertEquals(CompareOperator.EQUAL, familyFilter.getCompareOperator());
assertTrue(familyFilter.getComparator() instanceof BinaryComparator);
BinaryComparator binaryComparator = (BinaryComparator) familyFilter.getComparator();
- assertEquals("qualifier", new String(binaryComparator.getValue()));
+ assertEquals("qualifier", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
PrefixFilter prefixFilter = (PrefixFilter) filterListFilters.get(0);
byte [] prefix = prefixFilter.getPrefix();
- assertEquals(new String(prefix), "realtime");
+ assertEquals("realtime", new String(prefix, StandardCharsets.UTF_8));
QualifierFilter qualifierFilter = (QualifierFilter) filterListFilters.get(1);
assertEquals(CompareOperator.GREATER_OR_EQUAL, qualifierFilter.getCompareOperator());
assertTrue(qualifierFilter.getComparator() instanceof BinaryComparator);
binaryComparator = (BinaryComparator) qualifierFilter.getComparator();
- assertEquals("e", new String(binaryComparator.getValue()));
+ assertEquals("e", new String(binaryComparator.getValue(), StandardCharsets.UTF_8));
}
@Test
@@ -466,7 +477,7 @@ public class TestParseFilter {
ColumnPrefixFilter columnPrefixFilter = (ColumnPrefixFilter) filters.get(0);
byte [] columnPrefix = columnPrefixFilter.getPrefix();
- assertEquals(new String(columnPrefix), "realtime");
+ assertEquals("realtime", new String(columnPrefix, StandardCharsets.UTF_8));
FirstKeyOnlyFilter firstKeyOnlyFilter = (FirstKeyOnlyFilter) filters.get(1);
@@ -477,7 +488,7 @@ public class TestParseFilter {
assertTrue(familyFilter.getComparator() instanceof SubstringComparator);
SubstringComparator substringComparator =
(SubstringComparator) familyFilter.getComparator();
- assertEquals("hihi", new String(substringComparator.getValue()));
+ assertEquals("hihi", new String(substringComparator.getValue(), StandardCharsets.UTF_8));
}
@Test
@@ -497,7 +508,7 @@ public class TestParseFilter {
SkipFilter skipFilter = (SkipFilter) filters.get(2);
byte [] columnPrefix = columnPrefixFilter.getPrefix();
- assertEquals(new String(columnPrefix), "realtime");
+ assertEquals("realtime", new String(columnPrefix, StandardCharsets.UTF_8));
assertTrue(skipFilter.getFilter() instanceof FamilyFilter);
FamilyFilter familyFilter = (FamilyFilter) skipFilter.getFilter();
@@ -506,7 +517,7 @@ public class TestParseFilter {
assertTrue(familyFilter.getComparator() instanceof SubstringComparator);
SubstringComparator substringComparator =
(SubstringComparator) familyFilter.getComparator();
- assertEquals("hihi", new String(substringComparator.getValue()));
+ assertEquals("hihi", new String(substringComparator.getValue(), StandardCharsets.UTF_8));
}
@Test
@@ -537,7 +548,7 @@ public class TestParseFilter {
}
@Test
- public void testIncorrectComparatorType () throws IOException {
+ public void testIncorrectComparatorType() throws IOException {
String filterString = "RowFilter ('>=' , 'binaryoperator:region')";
try {
doTestFilter(filterString, RowFilter.class);
@@ -584,7 +595,7 @@ public class TestParseFilter {
PrefixFilter prefixFilter = (PrefixFilter)filters.get(0);
byte [] prefix = prefixFilter.getPrefix();
- assertEquals(new String(prefix), "realtime");
+ assertEquals("realtime", new String(prefix, StandardCharsets.UTF_8));
}
@Test
@@ -606,41 +617,40 @@ public class TestParseFilter {
PrefixFilter prefixFilter = (PrefixFilter)filters.get(0);
byte [] prefix = prefixFilter.getPrefix();
- assertEquals(new String(prefix), "realtime");
+ assertEquals("realtime", new String(prefix, StandardCharsets.UTF_8));
SkipFilter skipFilter = (SkipFilter)filters.get(1);
assertTrue(skipFilter.getFilter() instanceof FirstKeyOnlyFilter);
}
@Test
- public void testUnescapedQuote1 () throws IOException {
+ public void testUnescapedQuote1() throws IOException {
String filterString = "InclusiveStopFilter ('row''3')";
InclusiveStopFilter inclusiveStopFilter =
doTestFilter(filterString, InclusiveStopFilter.class);
byte [] stopRowKey = inclusiveStopFilter.getStopRowKey();
- assertEquals(new String(stopRowKey), "row'3");
+ assertEquals("row'3", new String(stopRowKey, StandardCharsets.UTF_8));
}
@Test
- public void testUnescapedQuote2 () throws IOException {
+ public void testUnescapedQuote2() throws IOException {
String filterString = "InclusiveStopFilter ('row''3''')";
InclusiveStopFilter inclusiveStopFilter =
doTestFilter(filterString, InclusiveStopFilter.class);
byte [] stopRowKey = inclusiveStopFilter.getStopRowKey();
- assertEquals(new String(stopRowKey), "row'3'");
+ assertEquals("row'3'", new String(stopRowKey, StandardCharsets.UTF_8));
}
@Test
- public void testUnescapedQuote3 () throws IOException {
+ public void testUnescapedQuote3() throws IOException {
String filterString = " InclusiveStopFilter ('''')";
- InclusiveStopFilter inclusiveStopFilter =
- doTestFilter(filterString, InclusiveStopFilter.class);
+ InclusiveStopFilter inclusiveStopFilter = doTestFilter(filterString, InclusiveStopFilter.class);
byte [] stopRowKey = inclusiveStopFilter.getStopRowKey();
- assertEquals(new String(stopRowKey), "'");
+ assertEquals("'", new String(stopRowKey, StandardCharsets.UTF_8));
}
@Test
- public void testIncorrectFilterString () throws IOException {
+ public void testIncorrectFilterString() throws IOException {
String filterString = "()";
byte [] filterStringAsByteArray = Bytes.toBytes(filterString);
try {
@@ -652,10 +662,9 @@ public class TestParseFilter {
}
@Test
- public void testCorrectFilterString () throws IOException {
+ public void testCorrectFilterString() throws IOException {
String filterString = "(FirstKeyOnlyFilter())";
- FirstKeyOnlyFilter firstKeyOnlyFilter =
- doTestFilter(filterString, FirstKeyOnlyFilter.class);
+ FirstKeyOnlyFilter firstKeyOnlyFilter = doTestFilter(filterString, FirstKeyOnlyFilter.class);
}
@Test
@@ -665,7 +674,8 @@ public class TestParseFilter {
assertTrue(f.getSupportedFilters().contains("MyFilter"));
}
- private <T extends Filter> T doTestFilter(String filterString, Class<T> clazz) throws IOException {
+ private <T extends Filter> T doTestFilter(String filterString, Class<T> clazz)
+ throws IOException {
byte [] filterStringAsByteArray = Bytes.toBytes(filterString);
filter = f.parseFilterString(filterStringAsByteArray);
assertEquals(clazz, filter.getClass());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
index 764d033..c6b1b5f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
@@ -68,7 +68,7 @@ public class TestSingleColumnValueExcludeFilter {
filter.filterRowCells(kvs);
- assertEquals("resultSize", kvs.size(), 2);
+ assertEquals("resultSize", 2, kvs.size());
assertTrue("leftKV1", CellComparatorImpl.COMPARATOR.compare(kvs.get(0), c) == 0);
assertTrue("leftKV2", CellComparatorImpl.COMPARATOR.compare(kvs.get(1), c) == 0);
assertFalse("allRemainingWhenMatch", filter.filterAllRemaining());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
index b24d30b..5ba7dfa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hdfs.DFSClient;
@@ -248,7 +249,7 @@ public class TestBlockReorder {
*/
@Test()
public void testHBaseCluster() throws Exception {
- byte[] sb = "sb".getBytes();
+ byte[] sb = Bytes.toBytes("sb");
htu.startMiniZKCluster();
MiniHBaseCluster hbm = htu.startMiniHBaseCluster(1, 1);
@@ -442,7 +443,7 @@ public class TestBlockReorder {
do {
l = getNamenode(dfs.getClient()).getBlockLocations(fileName, 0, 1);
Assert.assertNotNull(l.getLocatedBlocks());
- Assert.assertEquals(l.getLocatedBlocks().size(), 1);
+ Assert.assertEquals(1, l.getLocatedBlocks().size());
Assert.assertTrue("Expecting " + repCount + " , got " + l.get(0).getLocations().length,
System.currentTimeMillis() < max);
} while (l.get(0).getLocations().length != repCount);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
index 33bac39..7747bdb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHFileLink.java
@@ -125,7 +125,7 @@ public class TestHFileLink {
HFileLink.parseBackReferenceName(encodedRegion+"."+
tableName.getNameAsString().replace(TableName.NAMESPACE_DELIM, '='));
assertEquals(parsedRef.getFirst(), tableName);
- assertEquals(parsedRef.getSecond(), encodedRegion);
+ assertEquals(encodedRegion, parsedRef.getSecond());
//verify resolving back reference
Path storeFileDir = new Path(refLinkDir, encodedRegion+"."+
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
index 94df090..b3148c7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
@@ -546,9 +546,9 @@ public class TestHeapSize {
assertTrue(ClassSize.OBJECT == 12 || ClassSize.OBJECT == 16); // depending on CompressedOops
}
if (ClassSize.useUnsafeLayout()) {
- assertEquals(ClassSize.OBJECT + 4, ClassSize.ARRAY);
+ assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 4);
} else {
- assertEquals(ClassSize.OBJECT + 8, ClassSize.ARRAY);
+ assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 8);
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
index f43f147..82a50c4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
@@ -72,16 +72,16 @@ public class TestBufferedDataBlockEncoder {
@Test
public void testCommonPrefixComparators() {
- KeyValue kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put);
- KeyValue kv2 = new KeyValue(row1, fam_1_2, qual1, 1l, Type.Maximum);
+ KeyValue kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put);
+ KeyValue kv2 = new KeyValue(row1, fam_1_2, qual1, 1L, Type.Maximum);
assertTrue((BufferedDataBlockEncoder.compareCommonFamilyPrefix(kv1, kv2, 4) < 0));
- kv1 = new KeyValue(row1, fam1, qual1, 1l, Type.Put);
- kv2 = new KeyValue(row_1_0, fam_1_2, qual1, 1l, Type.Maximum);
+ kv1 = new KeyValue(row1, fam1, qual1, 1L, Type.Put);
+ kv2 = new KeyValue(row_1_0, fam_1_2, qual1, 1L, Type.Maximum);
assertTrue((BufferedDataBlockEncoder.compareCommonRowPrefix(kv1, kv2, 4) < 0));
- kv1 = new KeyValue(row1, fam1, qual2, 1l, Type.Put);
- kv2 = new KeyValue(row1, fam1, qual1, 1l, Type.Maximum);
+ kv1 = new KeyValue(row1, fam1, qual2, 1L, Type.Put);
+ kv2 = new KeyValue(row1, fam1, qual1, 1L, Type.Maximum);
assertTrue((BufferedDataBlockEncoder.compareCommonQualifierPrefix(kv1, kv2, 4) > 0));
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
index f41db93..cbbc9dc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
@@ -82,7 +82,7 @@ public class TestDataBlockEncoders {
static final byte[] HFILEBLOCK_DUMMY_HEADER = new byte[HConstants.HFILEBLOCK_HEADER_SIZE];
private RedundantKVGenerator generator = new RedundantKVGenerator();
- private Random randomizer = new Random(42l);
+ private Random randomizer = new Random(42L);
private final boolean includesMemstoreTS;
private final boolean includesTags;
@@ -129,14 +129,14 @@ public class TestDataBlockEncoders {
byte[] qualifier = new byte[0];
byte[] value = new byte[0];
if (!includesTags) {
- kvList.add(new KeyValue(row, family, qualifier, 0l, value));
- kvList.add(new KeyValue(row, family, qualifier, 0l, value));
+ kvList.add(new KeyValue(row, family, qualifier, 0L, value));
+ kvList.add(new KeyValue(row, family, qualifier, 0L, value));
} else {
byte[] metaValue1 = Bytes.toBytes("metaValue1");
byte[] metaValue2 = Bytes.toBytes("metaValue2");
- kvList.add(new KeyValue(row, family, qualifier, 0l, value,
+ kvList.add(new KeyValue(row, family, qualifier, 0L, value,
new Tag[] { new ArrayBackedTag((byte) 1, metaValue1) }));
- kvList.add(new KeyValue(row, family, qualifier, 0l, value,
+ kvList.add(new KeyValue(row, family, qualifier, 0L, value,
new Tag[] { new ArrayBackedTag((byte) 1, metaValue2) }));
}
testEncodersOnDataset(kvList, includesMemstoreTS, includesTags);
@@ -158,13 +158,13 @@ public class TestDataBlockEncoders {
if (includesTags) {
byte[] metaValue1 = Bytes.toBytes("metaValue1");
byte[] metaValue2 = Bytes.toBytes("metaValue2");
- kvList.add(new KeyValue(row, family, qualifier, 0l, value,
+ kvList.add(new KeyValue(row, family, qualifier, 0L, value,
new Tag[] { new ArrayBackedTag((byte) 1, metaValue1) }));
- kvList.add(new KeyValue(row, family, qualifier, 0l, value,
+ kvList.add(new KeyValue(row, family, qualifier, 0L, value,
new Tag[] { new ArrayBackedTag((byte) 1, metaValue2) }));
} else {
- kvList.add(new KeyValue(row, family, qualifier, -1l, Type.Put, value));
- kvList.add(new KeyValue(row, family, qualifier, -2l, Type.Put, value));
+ kvList.add(new KeyValue(row, family, qualifier, -1L, Type.Put, value));
+ kvList.add(new KeyValue(row, family, qualifier, -2L, Type.Put, value));
}
testEncodersOnDataset(kvList, includesMemstoreTS, includesTags);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
index e62af9e..d46a553 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
@@ -61,10 +61,12 @@ public class TestLoadAndSwitchEncodeOnDisk extends
conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);
}
+ @Override
protected int numKeys() {
return 3000;
}
+ @Override
@Test(timeout=TIMEOUT_MS)
public void loadTest() throws Exception {
Admin admin = TEST_UTIL.getAdmin();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java
index e0d2a9b..d304e74 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestSeekBeforeWithReverseScan.java
@@ -97,12 +97,12 @@ public class TestSeekBeforeWithReverseScan {
while (scanner.next(res)) {
count++;
}
- assertEquals(Bytes.toString(res.get(0).getRowArray(), res.get(0).getRowOffset(), res.get(0)
- .getRowLength()), "b");
- assertEquals(Bytes.toString(res.get(1).getRowArray(), res.get(1).getRowOffset(), res.get(1)
- .getRowLength()), "ab");
- assertEquals(Bytes.toString(res.get(2).getRowArray(), res.get(2).getRowOffset(), res.get(2)
- .getRowLength()), "a");
+ assertEquals("b", Bytes.toString(res.get(0).getRowArray(), res.get(0).getRowOffset(),
+ res.get(0).getRowLength()));
+ assertEquals("ab", Bytes.toString(res.get(1).getRowArray(), res.get(1).getRowOffset(),
+ res.get(1).getRowLength()));
+ assertEquals("a", Bytes.toString(res.get(2).getRowArray(), res.get(2).getRowOffset(),
+ res.get(2).getRowLength()));
assertEquals(3, count);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java
index 462f77a..4300387 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java
@@ -302,7 +302,7 @@ public class CacheTestUtils {
@Override
public long heapSize() {
- return 4 + buf.length;
+ return 4L + buf.length;
}
@Override
@@ -373,9 +373,10 @@ public class CacheTestUtils {
String strKey;
/* No conflicting keys */
- for (strKey = new Long(rand.nextLong()).toString(); !usedStrings
- .add(strKey); strKey = new Long(rand.nextLong()).toString())
- ;
+ strKey = Long.toString(rand.nextLong());
+ while (!usedStrings.add(strKey)) {
+ strKey = Long.toString(rand.nextLong());
+ }
returnedBlocks[i] = new HFileBlockPair();
returnedBlocks[i].blockName = new BlockCacheKey(strKey, 0);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java
index a133cb4..aaf1711 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java
@@ -98,6 +98,7 @@ public class NanoTimer {
*
* Note: If timer is never started, "ERR" will be returned.
*/
+ @Override
public String toString() {
if (!readable()) {
return "ERR";
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index 611c524..6d3d4ab 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -297,7 +297,7 @@ public class TestCacheOnWrite {
// block we cached at write-time and block read from file should be identical
assertEquals(block.getChecksumType(), fromCache.getChecksumType());
assertEquals(block.getBlockType(), fromCache.getBlockType());
- assertNotEquals(block.getBlockType(), BlockType.ENCODED_DATA);
+ assertNotEquals(BlockType.ENCODED_DATA, block.getBlockType());
assertEquals(block.getOnDiskSizeWithHeader(), fromCache.getOnDiskSizeWithHeader());
assertEquals(block.getOnDiskSizeWithoutHeader(), fromCache.getOnDiskSizeWithoutHeader());
assertEquals(
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
index 9c36788..a8b7d1f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
@@ -124,7 +124,7 @@ public class TestHFileBlock {
static int writeTestKeyValues(HFileBlock.Writer hbw, int seed, boolean includesMemstoreTS,
boolean useTag) throws IOException {
List<KeyValue> keyValues = new ArrayList<>();
- Random randomizer = new Random(42l + seed); // just any fixed number
+ Random randomizer = new Random(42L + seed); // just any fixed number
// generate keyValues
for (int i = 0; i < NUM_KEYVALUES; ++i) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
index bec774e..a049b329 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
@@ -262,7 +262,7 @@ public class TestHFileEncryption {
assertTrue("Initial seekTo failed", scanner.seekTo());
for (i = 0; i < 100; i++) {
KeyValue kv = testKvs.get(RNG.nextInt(testKvs.size()));
- assertEquals("Unable to find KV as expected: " + kv, scanner.seekTo(kv), 0);
+ assertEquals("Unable to find KV as expected: " + kv, 0, scanner.seekTo(kv));
}
} finally {
scanner.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
index af169f5..8429ee8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
@@ -280,10 +280,10 @@ public class TestLruBlockCache {
}
// A single eviction run should have occurred
- assertEquals(cache.getStats().getEvictionCount(), 1);
+ assertEquals(1, cache.getStats().getEvictionCount());
// We expect two entries evicted
- assertEquals(cache.getStats().getEvictedCount(), 2);
+ assertEquals(2, cache.getStats().getEvictedCount());
// Our expected size overruns acceptable limit
assertTrue(expectedCacheSize >
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
index 9b2602f..3873a6c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java
@@ -93,7 +93,7 @@ public class TestBucketCache {
String ioEngineName = "offheap";
String persistencePath = null;
- private class MockedBucketCache extends BucketCache {
+ private static class MockedBucketCache extends BucketCache {
public MockedBucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes,
int writerThreads, int writerQLen, String persistencePath) throws FileNotFoundException,
@@ -314,12 +314,18 @@ public class TestBucketCache {
BucketCache cache = new BucketCache(ioEngineName, capacitySize, constructedBlockSize,
constructedBlockSizes, writeThreads, writerQLen, persistencePath, 100, conf);
- assertEquals(BucketCache.ACCEPT_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getAcceptableFactor(), 0.9f, 0);
- assertEquals(BucketCache.MIN_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getMinFactor(), 0.5f, 0);
- assertEquals(BucketCache.EXTRA_FREE_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getExtraFreeFactor(), 0.5f, 0);
- assertEquals(BucketCache.SINGLE_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getSingleFactor(), 0.1f, 0);
- assertEquals(BucketCache.MULTI_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getMultiFactor(), 0.7f, 0);
- assertEquals(BucketCache.MEMORY_FACTOR_CONFIG_NAME + " failed to propagate.", cache.getMemoryFactor(), 0.2f, 0);
+ assertEquals(BucketCache.ACCEPT_FACTOR_CONFIG_NAME + " failed to propagate.", 0.9f,
+ cache.getAcceptableFactor(), 0);
+ assertEquals(BucketCache.MIN_FACTOR_CONFIG_NAME + " failed to propagate.", 0.5f,
+ cache.getMinFactor(), 0);
+ assertEquals(BucketCache.EXTRA_FREE_FACTOR_CONFIG_NAME + " failed to propagate.", 0.5f,
+ cache.getExtraFreeFactor(), 0);
+ assertEquals(BucketCache.SINGLE_FACTOR_CONFIG_NAME + " failed to propagate.", 0.1f,
+ cache.getSingleFactor(), 0);
+ assertEquals(BucketCache.MULTI_FACTOR_CONFIG_NAME + " failed to propagate.", 0.7f,
+ cache.getMultiFactor(), 0);
+ assertEquals(BucketCache.MEMORY_FACTOR_CONFIG_NAME + " failed to propagate.", 0.2f,
+ cache.getMemoryFactor(), 0);
}
@Test
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
index 09429dd..0a168ba 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyIPC.java
@@ -148,7 +148,7 @@ public class TestNettyIPC extends AbstractTestIPC {
super(server, name, services, bindAddress, conf, scheduler, true);
}
- final class FailingConnection extends NettyServerRpcConnection {
+ static final class FailingConnection extends NettyServerRpcConnection {
private FailingConnection(TestFailingRpcServer rpcServer, Channel channel) {
super(rpcServer, channel);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
index 83a7acc..f6f6fc5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
@@ -112,7 +112,7 @@ public class TestProtoBufRpc {
// Test echo method
EchoRequestProto echoRequest = EchoRequestProto.newBuilder().setMessage("hello").build();
EchoResponseProto echoResponse = stub.echo(null, echoRequest);
- assertEquals(echoResponse.getMessage(), "hello");
+ assertEquals("hello", echoResponse.getMessage());
stub.error(null, emptyRequest);
fail("Expected exception is not thrown");
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java
index 9d2fd91..9e70c93 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.ipc;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.junit.Assert.assertTrue;
-import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-
import java.io.IOException;
import java.net.Socket;
import java.net.SocketAddress;
@@ -39,6 +37,7 @@ import org.apache.hadoop.hbase.client.MetricsConnection;
import org.apache.hadoop.hbase.client.RetriesExhaustedException;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
@@ -50,6 +49,8 @@ import org.junit.rules.TestRule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+
@Category(MediumTests.class)
public class TestRpcClientLeaks {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
@@ -112,7 +113,7 @@ public class TestRpcClientLeaks {
conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2);
Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(TableName.valueOf(name.getMethodName()));
- table.get(new Get("asd".getBytes()));
+ table.get(new Get(Bytes.toBytes("asd")));
connection.close();
for (Socket socket : MyRpcClientImpl.savedSockets) {
assertTrue("Socket + " + socket + " is not closed", socket.isClosed());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
index e4a8767..e646c14 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
@@ -166,7 +166,8 @@ public class TestSimpleRpcScheduler {
for (String callQueueName:callQueueInfo.getCallQueueNames()) {
for (String calledMethod: callQueueInfo.getCalledMethodNames(callQueueName)) {
- assertEquals(callQueueInfo.getCallMethodCount(callQueueName, calledMethod), totalCallMethods);
+ assertEquals(totalCallMethods,
+ callQueueInfo.getCallMethodCount(callQueueName, calledMethod));
}
}
@@ -327,7 +328,7 @@ public class TestSimpleRpcScheduler {
RpcScheduler scheduler = new SimpleRpcScheduler(schedConf, 2, 1, 1, priority,
HConstants.QOS_THRESHOLD);
- assertNotEquals(scheduler, null);
+ assertNotEquals(null, scheduler);
}
@Test
@@ -571,6 +572,7 @@ public class TestSimpleRpcScheduler {
};
CallRunner cr = new CallRunner(null, putCall) {
+ @Override
public void run() {
if (sleepTime <= 0) return;
try {
@@ -581,10 +583,12 @@ public class TestSimpleRpcScheduler {
}
}
+ @Override
public RpcCall getRpcCall() {
return putCall;
}
+ @Override
public void drop() {
}
};
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java
index b080d7f..6b6f0de 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java
@@ -77,6 +77,7 @@ abstract public class MapreduceTestingShim {
}
private static class MapreduceV1Shim extends MapreduceTestingShim {
+ @Override
public JobContext newJobContext(Configuration jobConf) throws IOException {
// Implementing:
// return new JobContext(jobConf, new JobID());
@@ -105,6 +106,7 @@ abstract public class MapreduceTestingShim {
}
}
+ @Override
public JobConf obtainJobConf(MiniMRCluster cluster) {
if (cluster == null) return null;
try {
@@ -129,6 +131,7 @@ abstract public class MapreduceTestingShim {
};
private static class MapreduceV2Shim extends MapreduceTestingShim {
+ @Override
public JobContext newJobContext(Configuration jobConf) {
return newJob(jobConf);
}
@@ -147,6 +150,7 @@ abstract public class MapreduceTestingShim {
}
}
+ @Override
public JobConf obtainJobConf(MiniMRCluster cluster) {
try {
Method meth = MiniMRCluster.class.getMethod("getJobTrackerConf", emptyParam);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
index 16f98a0..53e80f3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
@@ -365,10 +365,8 @@ public class TestAssignmentListener {
drainingServerTracker.start();
// Confirm our ServerManager lists are empty.
- Assert.assertEquals(serverManager.getOnlineServers(),
- new HashMap<ServerName, ServerLoad>());
- Assert.assertEquals(serverManager.getDrainingServersList(),
- new ArrayList<ServerName>());
+ Assert.assertEquals(new HashMap<ServerName, ServerLoad>(), serverManager.getOnlineServers());
+ Assert.assertEquals(new ArrayList<ServerName>(), serverManager.getDrainingServersList());
// checkAndRecordNewServer() is how servers are added to the ServerManager.
ArrayList<ServerName> onlineDrainingServers = new ArrayList<>();
@@ -381,8 +379,7 @@ public class TestAssignmentListener {
}
// Verify the ServerManager lists are correctly updated.
- Assert.assertEquals(serverManager.getOnlineServers(), onlineServers);
- Assert.assertEquals(serverManager.getDrainingServersList(),
- onlineDrainingServers);
+ Assert.assertEquals(onlineServers, serverManager.getOnlineServers());
+ Assert.assertEquals(onlineDrainingServers, serverManager.getDrainingServersList());
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
index e8aa755..aa5ad1e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
@@ -190,7 +190,7 @@ public class TestMasterFailover {
RegionState metaState = MetaTableLocator.getMetaRegionState(hrs.getZooKeeper());
assertEquals("hbase:meta should be online on RS",
metaState.getServerName(), metaServerName);
- assertEquals("hbase:meta should be online on RS", metaState.getState(), State.OPEN);
+ assertEquals("hbase:meta should be online on RS", State.OPEN, metaState.getState());
// Start up a new master
LOG.info("Starting up a new master");
@@ -203,7 +203,7 @@ public class TestMasterFailover {
metaState = MetaTableLocator.getMetaRegionState(activeMaster.getZooKeeper());
assertEquals("hbase:meta should be online on RS",
metaState.getServerName(), metaServerName);
- assertEquals("hbase:meta should be online on RS", metaState.getState(), State.OPEN);
+ assertEquals("hbase:meta should be online on RS", State.OPEN, metaState.getState());
// Done, shutdown the cluster
} finally {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
index fd44c89..15c8b6a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java
@@ -189,6 +189,7 @@ public class TestMasterNoCluster {
TESTUTIL.getConfiguration(), rs0, rs0, rs0.getServerName(),
HRegionInfo.FIRST_META_REGIONINFO);
HMaster master = new HMaster(conf) {
+ @Override
InetAddress getRemoteInetAddress(final int port, final long serverStartCode)
throws UnknownHostException {
// Return different address dependent on port passed.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index cd7af50..e399f2e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
@@ -242,7 +243,7 @@ public class TestMasterOperationsForRegionReplicas {
}
assert(defaultReplicas.size() == numRegions);
Collection<Integer> counts = new HashSet<>(defaultReplicas.values());
- assert(counts.size() == 1 && counts.contains(new Integer(numReplica)));
+ assert(counts.size() == 1 && counts.contains(numReplica));
} finally {
ADMIN.disableTable(tableName);
ADMIN.deleteTable(tableName);
@@ -336,7 +337,7 @@ public class TestMasterOperationsForRegionReplicas {
byte[] startKey = region.getStartKey();
if (region.getTable().equals(table)) {
setOfStartKeys.add(startKey); //ignore other tables
- LOG.info("--STARTKEY " + new String(startKey)+"--");
+ LOG.info("--STARTKEY {}--", new String(startKey, StandardCharsets.UTF_8));
}
}
// the number of startkeys will be equal to the number of regions hosted in each server
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
index 29c24f7..648ea64 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
@@ -121,6 +121,7 @@ public class TestMasterShutdown {
master.start();
LOG.info("Called master start on " + master.getName());
Thread shutdownThread = new Thread("Shutdown-Thread") {
+ @Override
public void run() {
LOG.info("Before call to shutdown master");
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java
index ebfec22..2e6c699 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMetaShutdownHandler.java
@@ -88,8 +88,7 @@ public class TestMetaShutdownHandler {
}
RegionState metaState =
MetaTableLocator.getMetaRegionState(master.getZooKeeper());
- assertEquals("Meta should be not in transition",
- metaState.getState(), RegionState.State.OPEN);
+ assertEquals("Meta should be not in transition", RegionState.State.OPEN, metaState.getState());
assertNotEquals("Meta should be moved off master",
metaServerName, master.getServerName());
@@ -115,8 +114,7 @@ public class TestMetaShutdownHandler {
regionStates.isRegionOnline(HRegionInfo.FIRST_META_REGIONINFO));
// Now, make sure meta is registered in zk
metaState = MetaTableLocator.getMetaRegionState(master.getZooKeeper());
- assertEquals("Meta should be not in transition",
- metaState.getState(), RegionState.State.OPEN);
+ assertEquals("Meta should be not in transition", RegionState.State.OPEN, metaState.getState());
assertEquals("Meta should be assigned", metaState.getServerName(),
regionStates.getRegionServerOfRegion(HRegionInfo.FIRST_META_REGIONINFO));
assertNotEquals("Meta should be assigned on a different server",
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
index e99d533..2794952 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
@@ -389,8 +389,7 @@ public class TestRegionPlacement {
lastRegionOpenedCount = currentRegionOpened;
assertEquals("There are only " + regionMovement + " instead of "
- + expected + " region movement for " + attempt + " attempts",
- regionMovement, expected);
+ + expected + " region movement for " + attempt + " attempts", expected, regionMovement);
}
/**
@@ -469,6 +468,7 @@ public class TestRegionPlacement {
final AtomicInteger totalRegionNum = new AtomicInteger(0);
LOG.info("The start of region placement verification");
MetaTableAccessor.Visitor visitor = new MetaTableAccessor.Visitor() {
+ @Override
public boolean visit(Result result) throws IOException {
try {
@SuppressWarnings("deprecation")
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
index cd5239e..128d7ee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
@@ -384,23 +384,23 @@ public class TestSplitLogManager {
@Test (timeout=180000)
public void testTaskResigned() throws Exception {
LOG.info("TestTaskResigned - resubmit task node once in RESIGNED state");
- assertEquals(tot_mgr_resubmit.sum(), 0);
+ assertEquals(0, tot_mgr_resubmit.sum());
slm = new SplitLogManager(master, conf);
- assertEquals(tot_mgr_resubmit.sum(), 0);
+ assertEquals(0, tot_mgr_resubmit.sum());
TaskBatch batch = new TaskBatch();
String tasknode = submitTaskAndWait(batch, "foo/1");
- assertEquals(tot_mgr_resubmit.sum(), 0);
+ assertEquals(0, tot_mgr_resubmit.sum());
final ServerName worker1 = ServerName.valueOf("worker1,1,1");
- assertEquals(tot_mgr_resubmit.sum(), 0);
+ assertEquals(0, tot_mgr_resubmit.sum());
SplitLogTask slt = new SplitLogTask.Resigned(worker1);
- assertEquals(tot_mgr_resubmit.sum(), 0);
+ assertEquals(0, tot_mgr_resubmit.sum());
ZKUtil.setData(zkw, tasknode, slt.toByteArray());
ZKUtil.checkExists(zkw, tasknode);
// Could be small race here.
if (tot_mgr_resubmit.sum() == 0) {
waitForCounter(tot_mgr_resubmit, 0, 1, to/2);
}
- assertEquals(tot_mgr_resubmit.sum(), 1);
+ assertEquals(1, tot_mgr_resubmit.sum());
byte[] taskstate = ZKUtil.getData(zkw, tasknode);
slt = SplitLogTask.parseFrom(taskstate);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
index 1f61ee7..5a75297 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
@@ -65,9 +65,8 @@ public class TestTableStateManager {
TEST_UTIL.restartHBaseCluster(1);
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
- Assert.assertEquals(
- master.getTableStateManager().getTableState(tableName),
- TableState.State.DISABLED);
+ Assert.assertEquals(TableState.State.DISABLED,
+ master.getTableStateManager().getTableState(tableName));
}
private void setTableStateInZK(ZKWatcher watcher, final TableName tableName,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
index 83fafff..9bd4443 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
@@ -107,10 +107,12 @@ public class MockMasterServices extends MockNoopMasterServices {
this.walManager = new MasterWalManager(this);
// Mock an AM.
this.assignmentManager = new AssignmentManager(this, new MockRegionStateStore(this)) {
+ @Override
public boolean isTableEnabled(final TableName tableName) {
return true;
}
+ @Override
public boolean isTableDisabled(final TableName tableName) {
return false;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
index 37d9820..d2a4020 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
@@ -501,8 +501,8 @@ public class TestSplitTableRegionProcedure {
daughters.get(i),
startRow,
numRows,
- ColumnFamilyName1.getBytes(),
- ColumnFamilyName2.getBytes());
+ Bytes.toBytes(ColumnFamilyName1),
+ Bytes.toBytes(ColumnFamilyName2));
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
index adf56b8..e180fb5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java
@@ -159,6 +159,7 @@ public class BalancerTestBase {
public MockMapping(Configuration conf) {
}
+ @Override
public List<String> resolve(List<String> names) {
List<String> ret = new ArrayList<>(names.size());
for (String name : names) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
index 08b27ec..644de6a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
@@ -392,6 +392,7 @@ public class TestLogsCleaner {
.when(zk).getData("/hbase/replication/rs", null, new Stat());
}
+ @Override
public RecoverableZooKeeper getRecoverableZooKeeper() {
return zk;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
index 2948701..85f0d1f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
@@ -351,6 +351,7 @@ public class TestReplicationHFileCleaner {
.when(zk).getData("/hbase/replication/hfile-refs", null, new Stat());
}
+ @Override
public RecoverableZooKeeper getRecoverableZooKeeper() {
return zk;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
index 6069041..6a71df3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
@@ -183,7 +183,7 @@ public class TestSnapshotFromMaster {
DisabledTableSnapshotHandler mockHandler = Mockito.mock(DisabledTableSnapshotHandler.class);
Mockito.when(mockHandler.getException()).thenReturn(null);
Mockito.when(mockHandler.getSnapshot()).thenReturn(desc);
- Mockito.when(mockHandler.isFinished()).thenReturn(new Boolean(true));
+ Mockito.when(mockHandler.isFinished()).thenReturn(Boolean.TRUE);
Mockito.when(mockHandler.getCompletionTimestamp())
.thenReturn(EnvironmentEdgeManager.currentTime());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
index 7f031cc..1f743db 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
import org.hamcrest.core.IsInstanceOf;
import org.hamcrest.core.StringStartsWith;
import org.junit.After;
@@ -60,6 +61,7 @@ import org.junit.rules.TestName;
import org.junit.rules.TestRule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+
import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;
@@ -105,8 +107,10 @@ public class TestLockProcedure {
setupConf(UTIL.getConfiguration());
UTIL.startMiniCluster(1);
UTIL.getAdmin().createNamespace(NamespaceDescriptor.create(namespace).build());
- UTIL.createTable(tableName1, new byte[][]{"fam".getBytes()}, new byte[][] {"1".getBytes()});
- UTIL.createTable(tableName2, new byte[][]{"fam".getBytes()}, new byte[][] {"1".getBytes()});
+ UTIL.createTable(tableName1,
+ new byte[][]{ Bytes.toBytes("fam")}, new byte[][] {Bytes.toBytes("1")});
+ UTIL.createTable(tableName2,
+ new byte[][]{Bytes.toBytes("fam")}, new byte[][] {Bytes.toBytes("1")});
masterRpcService = UTIL.getHBaseCluster().getMaster().getMasterRpcServices();
procExec = UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor();
tableRegions1 = UTIL.getAdmin().getRegions(tableName1);
@@ -195,7 +199,7 @@ public class TestLockProcedure {
LockHeartbeatResponse response = masterRpcService.lockHeartbeat(null,
LockHeartbeatRequest.newBuilder().setProcId(procId).build());
if (response.getLockStatus() == LockHeartbeatResponse.LockStatus.LOCKED) {
- assertEquals(response.getTimeoutMs(), HEARTBEAT_TIMEOUT);
+ assertEquals(HEARTBEAT_TIMEOUT, response.getTimeoutMs());
LOG.debug(String.format("Proc id %s acquired lock.", procId));
return true;
}
@@ -349,7 +353,8 @@ public class TestLockProcedure {
CountDownLatch latch = new CountDownLatch(1);
// MasterRpcServices don't set latch with LockProcedure, so create one and submit it directly.
LockProcedure lockProc = new LockProcedure(UTIL.getConfiguration(),
- TableName.valueOf("table"), org.apache.hadoop.hbase.procedure2.LockType.EXCLUSIVE, "desc", latch);
+ TableName.valueOf("table"),
+ org.apache.hadoop.hbase.procedure2.LockType.EXCLUSIVE, "desc", latch);
procExec.submitProcedure(lockProc);
assertTrue(latch.await(2000, TimeUnit.MILLISECONDS));
releaseLock(lockProc.getProcId());
[2/9] hbase git commit: HBASE-19811 Fix findbugs and error-prone
warnings in hbase-server (branch-2)
Posted by st...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
index 189b37f..2adcf9e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java
@@ -82,6 +82,7 @@ public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibili
public void testAddLabels() throws Throwable {
PrivilegedExceptionAction<VisibilityLabelsResponse> action =
new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
String[] labels = { "L1", SECRET, "L2", "invalid~", "L3" };
VisibilityLabelsResponse response = null;
@@ -122,6 +123,7 @@ public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibili
do {
PrivilegedExceptionAction<VisibilityLabelsResponse> action =
new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, "ABC", "XYZ" };
try (Connection conn = ConnectionFactory.createConnection(conf)) {
@@ -170,6 +172,7 @@ public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibili
public void testListLabels() throws Throwable {
PrivilegedExceptionAction<ListLabelsResponse> action =
new PrivilegedExceptionAction<ListLabelsResponse>() {
+ @Override
public ListLabelsResponse run() throws Exception {
ListLabelsResponse response = null;
try (Connection conn = ConnectionFactory.createConnection(conf)) {
@@ -200,6 +203,7 @@ public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibili
public void testListLabelsWithRegEx() throws Throwable {
PrivilegedExceptionAction<ListLabelsResponse> action =
new PrivilegedExceptionAction<ListLabelsResponse>() {
+ @Override
public ListLabelsResponse run() throws Exception {
ListLabelsResponse response = null;
try (Connection conn = ConnectionFactory.createConnection(conf)) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
index 0a7d918..9d536fe 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
@@ -17,6 +17,17 @@
*/
package org.apache.hadoop.hbase.security.visibility;
+import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
@@ -42,14 +53,10 @@ import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.DefaultEnvironmentEdge;
-import org.apache.hadoop.hbase.util.EnvironmentEdge;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.Threads;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
-import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -57,17 +64,6 @@ import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.io.InterruptedIOException;
-import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
/**
* Tests visibility labels with deletes
*/
@@ -205,7 +201,7 @@ public class TestVisibilityLabelsWithDeletes {
public void testVisibilityLabelsWithDeleteFamilyVersion() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
- long[] ts = new long[] { 123l, 125l };
+ long[] ts = new long[] { 123L, 125L };
try (Table table = createTableAndWriteDataWithLabels(ts,
CONFIDENTIAL + "|" + TOPSECRET, SECRET)) {
PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
@@ -215,7 +211,7 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(TOPSECRET + "|" + CONFIDENTIAL));
- d.addFamilyVersion(fam, 123l);
+ d.addFamilyVersion(fam, 123L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -243,7 +239,7 @@ public class TestVisibilityLabelsWithDeletes {
public void testVisibilityLabelsWithDeleteColumnExactVersion() throws Exception {
setAuths();
final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
- long[] ts = new long[] { 123l, 125l };
+ long[] ts = new long[] { 123L, 125L };
try (Table table = createTableAndWriteDataWithLabels(ts,
CONFIDENTIAL + "|" + TOPSECRET, SECRET);) {
PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
@@ -253,7 +249,7 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(TOPSECRET + "|" + CONFIDENTIAL));
- d.addColumn(fam, qual, 123l);
+ d.addColumn(fam, qual, 123L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -291,7 +287,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" +
SECRET + "&" + TOPSECRET+")"));
- d.addColumns(fam, qual, 125l);
+ d.addColumns(fam, qual, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -313,17 +309,17 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -508,7 +504,7 @@ public class TestVisibilityLabelsWithDeletes {
Scan s = new Scan();
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 1);
+ assertEquals(1, next.length);
} catch (Throwable t) {
throw new IOException(t);
}
@@ -529,7 +525,7 @@ public class TestVisibilityLabelsWithDeletes {
Scan s = new Scan();
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 0);
+ assertEquals(0, next.length);
} catch (Throwable t) {
throw new IOException(t);
}
@@ -566,7 +562,7 @@ public class TestVisibilityLabelsWithDeletes {
Scan s = new Scan();
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 0);
+ assertEquals(0, next.length);
} catch (Throwable t) {
throw new IOException(t);
}
@@ -586,7 +582,7 @@ public class TestVisibilityLabelsWithDeletes {
Scan s = new Scan();
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 0);
+ assertEquals(0, next.length);
} catch (Throwable t) {
throw new IOException(t);
}
@@ -622,7 +618,7 @@ public class TestVisibilityLabelsWithDeletes {
Scan s = new Scan();
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 1);
+ assertEquals(1, next.length);
} catch (Throwable t) {
throw new IOException(t);
}
@@ -643,7 +639,7 @@ public class TestVisibilityLabelsWithDeletes {
Scan s = new Scan();
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 0);
+ assertEquals(0, next.length);
} catch (Throwable t) {
throw new IOException(t);
}
@@ -680,7 +676,7 @@ public class TestVisibilityLabelsWithDeletes {
Scan s = new Scan();
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 0);
+ assertEquals(0, next.length);
} catch (Throwable t) {
throw new IOException(t);
}
@@ -700,7 +696,7 @@ public class TestVisibilityLabelsWithDeletes {
Scan s = new Scan();
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 0);
+ assertEquals(0, next.length);
} catch (Throwable t) {
throw new IOException(t);
}
@@ -737,7 +733,7 @@ public class TestVisibilityLabelsWithDeletes {
ResultScanner scanner = table.getScanner(s);
// The delete would not be able to apply it because of visibility mismatch
Result[] next = scanner.next(3);
- assertEquals(next.length, 1);
+ assertEquals(1, next.length);
} catch (Throwable t) {
throw new IOException(t);
}
@@ -759,7 +755,7 @@ public class TestVisibilityLabelsWithDeletes {
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
// this will alone match
- assertEquals(next.length, 0);
+ assertEquals(0, next.length);
} catch (Throwable t) {
throw new IOException(t);
}
@@ -810,7 +806,7 @@ public class TestVisibilityLabelsWithDeletes {
s.setAuthorizations(new Authorizations(SECRET));
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 1);
+ assertEquals(1, next.length);
put = new Put(Bytes.toBytes("row1"));
put.addColumn(fam, qual, value1);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
@@ -836,13 +832,13 @@ public class TestVisibilityLabelsWithDeletes {
s.setAuthorizations(new Authorizations(CONFIDENTIAL));
scanner = table.getScanner(s);
next = scanner.next(3);
- assertEquals(next.length, 1);
+ assertEquals(1, next.length);
s = new Scan();
s.setMaxVersions(5);
s.setAuthorizations(new Authorizations(SECRET));
scanner = table.getScanner(s);
Result[] next1 = scanner.next(3);
- assertEquals(next1.length, 0);
+ assertEquals(0, next1.length);
}
}
@@ -886,7 +882,7 @@ public class TestVisibilityLabelsWithDeletes {
s.setAuthorizations(new Authorizations(SECRET));
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 1);
+ assertEquals(1, next.length);
put = new Put(Bytes.toBytes("row1"));
put.addColumn(fam, qual, value1);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
@@ -912,13 +908,13 @@ public class TestVisibilityLabelsWithDeletes {
s.setAuthorizations(new Authorizations(CONFIDENTIAL));
scanner = table.getScanner(s);
next = scanner.next(3);
- assertEquals(next.length, 1);
+ assertEquals(1, next.length);
s = new Scan();
s.setMaxVersions(5);
s.setAuthorizations(new Authorizations(SECRET));
scanner = table.getScanner(s);
Result[] next1 = scanner.next(3);
- assertEquals(next1.length, 0);
+ assertEquals(0, next1.length);
}
}
@@ -933,11 +929,11 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 123l, value);
+ put.addColumn(fam, qual, 123L, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 124l, value1);
+ put.addColumn(fam, qual, 124L, value1);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
@@ -947,7 +943,7 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET));
- d.addColumns(fam, qual, 126l);
+ d.addColumns(fam, qual, 126L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -957,7 +953,7 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
- d.addColumn(fam, qual, 123l);
+ d.addColumn(fam, qual, 123L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -971,7 +967,7 @@ public class TestVisibilityLabelsWithDeletes {
s.setAuthorizations(new Authorizations(CONFIDENTIAL, SECRET));
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 0);
+ assertEquals(0, next.length);
}
}
@Test
@@ -987,11 +983,11 @@ public class TestVisibilityLabelsWithDeletes {
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put1 = new Put(Bytes.toBytes("row1"));
- put1.addColumn(fam, qual, 123l, value);
+ put1.addColumn(fam, qual, 123L, value);
put1.setCellVisibility(new CellVisibility(CONFIDENTIAL));
Put put2 = new Put(Bytes.toBytes("row1"));
- put2.addColumn(fam, qual, 123l, value1);
+ put2.addColumn(fam, qual, 123L, value1);
put2.setCellVisibility(new CellVisibility(SECRET));
table.put(createList(put1, put2));
@@ -1000,7 +996,7 @@ public class TestVisibilityLabelsWithDeletes {
s.setAuthorizations(new Authorizations(CONFIDENTIAL, SECRET));
ResultScanner scanner = table.getScanner(s);
- assertEquals(scanner.next(3).length, 1);
+ assertEquals(1, scanner.next(3).length);
scanner.close();
PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
@@ -1010,7 +1006,7 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
- d.addColumn(fam, qual, 123l);
+ d.addColumn(fam, qual, 123L);
table.delete(d);
}
@@ -1018,7 +1014,7 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET));
- d.addColumn(fam, qual, 123l);
+ d.addColumn(fam, qual, 123L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -1031,7 +1027,7 @@ public class TestVisibilityLabelsWithDeletes {
s.setMaxVersions(5);
s.setAuthorizations(new Authorizations(CONFIDENTIAL));
scanner = table.getScanner(s);
- assertEquals(scanner.next(3).length, 0);
+ assertEquals(0, scanner.next(3).length);
scanner.close();
}
}
@@ -1126,22 +1122,22 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -1162,36 +1158,36 @@ public class TestVisibilityLabelsWithDeletes {
List<Put> puts = new ArrayList<>(5);
Put put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 123l, value);
+ put.addColumn(fam, qual, 123L, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 124l, value);
+ put.addColumn(fam, qual, 124L, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 125l, value);
+ put.addColumn(fam, qual, 125L, value);
put.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 126l, value);
+ put.addColumn(fam, qual, 126L, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127L, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
puts.add(put);
TEST_UTIL.getAdmin().flush(tableName);
put = new Put(Bytes.toBytes("row2"));
- put.addColumn(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127L, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET
+ "&" + SECRET + ")"));
puts.add(put);
@@ -1212,28 +1208,28 @@ public class TestVisibilityLabelsWithDeletes {
List<Put> puts = new ArrayList<>(5);
Put put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 123l, value);
+ put.addColumn(fam, qual, 123L, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 124l, value);
+ put.addColumn(fam, qual, 124L, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 125l, value);
+ put.addColumn(fam, qual, 125L, value);
put.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual1, 126l, value);
+ put.addColumn(fam, qual1, 126L, value);
put.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual2, 127l, value);
+ put.addColumn(fam, qual2, 127L, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
puts.add(put);
@@ -1253,23 +1249,23 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
List<Put> puts = new ArrayList<>(5);
Put put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 123l, value);
+ put.addColumn(fam, qual, 123L, value);
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 124l, value);
+ put.addColumn(fam, qual, 124L, value);
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 125l, value);
+ put.addColumn(fam, qual, 125L, value);
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 126l, value);
+ put.addColumn(fam, qual, 126L, value);
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127L, value);
puts.add(put);
Table table = TEST_UTIL.getConnection().getTable(tableName);
@@ -1278,7 +1274,7 @@ public class TestVisibilityLabelsWithDeletes {
TEST_UTIL.getAdmin().flush(tableName);
put = new Put(Bytes.toBytes("row2"));
- put.addColumn(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127L, value);
table.put(put);
return table;
@@ -1300,7 +1296,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" +
SECRET + "&" + TOPSECRET+")"));
- d.addColumn(fam, qual, 125l);
+ d.addColumn(fam, qual, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -1322,27 +1318,27 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -1386,22 +1382,22 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -1417,7 +1413,7 @@ public class TestVisibilityLabelsWithDeletes {
try (Table table = doPuts(tableName)) {
TEST_UTIL.getAdmin().flush(tableName);
Put put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 128l, value);
+ put.addColumn(fam, qual, 128L, value);
put.setCellVisibility(new CellVisibility(TOPSECRET));
table.put(put);
PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
@@ -1449,27 +1445,27 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 128l);
+ assertEquals(128L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -1477,7 +1473,7 @@ public class TestVisibilityLabelsWithDeletes {
current.getRowLength(), row2, 0, row2.length));
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 129l, value);
+ put.addColumn(fam, qual, 129L, value);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
@@ -1493,7 +1489,7 @@ public class TestVisibilityLabelsWithDeletes {
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 129l);
+ assertEquals(129L, current.getTimestamp());
}
}
@Test
@@ -1521,7 +1517,7 @@ public class TestVisibilityLabelsWithDeletes {
SUPERUSER.runAs(actiona);
TEST_UTIL.getAdmin().flush(tableName);
Put put = new Put(Bytes.toBytes("row3"));
- put.addColumn(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127L, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL + "&" + PRIVATE));
table.put(put);
TEST_UTIL.getAdmin().flush(tableName);
@@ -1539,22 +1535,22 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -1598,12 +1594,12 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -1623,7 +1619,7 @@ public class TestVisibilityLabelsWithDeletes {
public Void run() throws Exception {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
- d.addColumns(fam, qual, 125l);
+ d.addColumns(fam, qual, 125L);
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
table.delete(d);
@@ -1647,24 +1643,24 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
current.getQualifierLength(), qual1, 0, qual1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
current.getQualifierLength(), qual2, 0, qual2.length));
}
@@ -1681,11 +1677,11 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual1, 125l, value);
+ put.addColumn(fam, qual1, 125L, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual1, 126l, value);
+ put.addColumn(fam, qual1, 126L, value);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
TEST_UTIL.getAdmin().flush(tableName);
@@ -1694,11 +1690,11 @@ public class TestVisibilityLabelsWithDeletes {
public Void run() throws Exception {
Delete d1 = new Delete(row1);
d1.setCellVisibility(new CellVisibility(SECRET));
- d1.addColumns(fam, qual, 126l);
+ d1.addColumns(fam, qual, 126L);
Delete d2 = new Delete(row1);
d2.setCellVisibility(new CellVisibility(CONFIDENTIAL));
- d2.addColumns(fam, qual1, 125l);
+ d2.addColumns(fam, qual1, 125L);
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
@@ -1715,7 +1711,7 @@ public class TestVisibilityLabelsWithDeletes {
s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 1);
+ assertEquals(1, next.length);
}
}
@Test
@@ -1729,11 +1725,11 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual1, 125l, value);
+ put.addColumn(fam, qual1, 125L, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual1, 126l, value);
+ put.addColumn(fam, qual1, 126L, value);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
TEST_UTIL.getAdmin().flush(tableName);
@@ -1742,11 +1738,11 @@ public class TestVisibilityLabelsWithDeletes {
public Void run() throws Exception {
Delete d1 = new Delete(row1);
d1.setCellVisibility(new CellVisibility(SECRET));
- d1.addColumns(fam, qual, 126l);
+ d1.addColumns(fam, qual, 126L);
Delete d2 = new Delete(row1);
d2.setCellVisibility(new CellVisibility(CONFIDENTIAL));
- d2.addColumns(fam, qual1, 126l);
+ d2.addColumns(fam, qual1, 126L);
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
@@ -1763,7 +1759,7 @@ public class TestVisibilityLabelsWithDeletes {
s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
ResultScanner scanner = table.getScanner(s);
Result[] next = scanner.next(3);
- assertEquals(next.length, 1);
+ assertEquals(1, next.length);
}
}
@Test
@@ -1838,27 +1834,27 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -1881,7 +1877,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ SECRET + "&" + TOPSECRET + ")"));
- d.addFamily(fam, 126l);
+ d.addFamily(fam, 126L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -1903,17 +1899,17 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -1936,7 +1932,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" +
SECRET + "&" + TOPSECRET+")"));
- d.addFamily(fam, 126l);
+ d.addFamily(fam, 126L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -1948,7 +1944,7 @@ public class TestVisibilityLabelsWithDeletes {
TEST_UTIL.getAdmin().flush(tableName);
Put put = new Put(Bytes.toBytes("row3"));
- put.addColumn(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127L, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL + "&" + PRIVATE));
table.put(put);
TEST_UTIL.getAdmin().flush(tableName);
@@ -1966,7 +1962,7 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -1989,7 +1985,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ TOPSECRET + "&" + SECRET+")"));
- d.addFamily(fam, 125l);
+ d.addFamily(fam, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2010,22 +2006,22 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -2041,7 +2037,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
- d.addFamily(fam, 127l);
+ d.addFamily(fam, 127L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2061,18 +2057,18 @@ public class TestVisibilityLabelsWithDeletes {
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row2, 0, row2.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
}
}
@@ -2100,11 +2096,11 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
- d.addFamilyVersion(fam, 123l);
+ d.addFamilyVersion(fam, 123L);
table.delete(d);
d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
- d.addFamilyVersion(fam, 125l);
+ d.addFamilyVersion(fam, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2126,17 +2122,17 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
}
}
@@ -2153,11 +2149,11 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET + ")"));
- d.addColumn(fam, qual, 126l);
+ d.addColumn(fam, qual, 126L);
table.delete(d);
d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
- d.addFamilyVersion(fam, 125l);
+ d.addFamilyVersion(fam, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2179,17 +2175,17 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
// Issue 2nd delete
actiona = new PrivilegedExceptionAction<Void>() {
@Override
@@ -2218,12 +2214,12 @@ public class TestVisibilityLabelsWithDeletes {
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
}
}
@@ -2258,7 +2254,7 @@ public class TestVisibilityLabelsWithDeletes {
d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
- d.addFamilyVersion(fam, 125l);
+ d.addFamilyVersion(fam, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2280,17 +2276,17 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
// Issue 2nd delete
actiona = new PrivilegedExceptionAction<Void>() {
@Override
@@ -2319,12 +2315,12 @@ public class TestVisibilityLabelsWithDeletes {
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
}
}
@@ -2340,7 +2336,7 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
- d.addColumn(fam, qual, 125l);
+ d.addColumn(fam, qual, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2361,22 +2357,22 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -2392,7 +2388,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
- d.addColumn(fam, qual, 127l);
+ d.addColumn(fam, qual, 127L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2412,23 +2408,23 @@ public class TestVisibilityLabelsWithDeletes {
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row2, 0, row2.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
}
}
@@ -2446,7 +2442,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")" +
"|(" + TOPSECRET + "&" + SECRET + ")"));
- d.addColumn(fam, qual, 127l);
+ d.addColumn(fam, qual, 127L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2467,22 +2463,22 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -2497,7 +2493,7 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
- d.addColumn(fam, qual, 127l);
+ d.addColumn(fam, qual, 127L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2517,28 +2513,28 @@ public class TestVisibilityLabelsWithDeletes {
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row2, 0, row2.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
}
}
@Test
@@ -2556,7 +2552,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ TOPSECRET + "&" + SECRET+")"));
- d.addColumn(fam, qual, 125l);
+ d.addColumn(fam, qual, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2577,27 +2573,27 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -2613,7 +2609,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
- d.addColumn(fam, qual, 127l);
+ d.addColumn(fam, qual, 127L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2633,28 +2629,28 @@ public class TestVisibilityLabelsWithDeletes {
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row2, 0, row2.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
}
}
@Test
@@ -2671,7 +2667,7 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
- d.addColumn(fam, qual, 125l);
+ d.addColumn(fam, qual, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2692,22 +2688,22 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -2723,7 +2719,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
- d.addFamily(fam, 124l);
+ d.addFamily(fam, 124L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2743,18 +2739,18 @@ public class TestVisibilityLabelsWithDeletes {
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row2, 0, row2.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
}
}
@@ -2789,7 +2785,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ TOPSECRET + "&" + SECRET+")"));
- d.addColumns(fam, qual, 125l);
+ d.addColumns(fam, qual, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2810,22 +2806,22 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -2841,7 +2837,7 @@ public class TestVisibilityLabelsWithDeletes {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
- d.addColumn(fam, qual, 127l);
+ d.addColumn(fam, qual, 127L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2861,17 +2857,17 @@ public class TestVisibilityLabelsWithDeletes {
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -2892,7 +2888,7 @@ public class TestVisibilityLabelsWithDeletes {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
- d.addColumn(fam, qual, 125l);
+ d.addColumn(fam, qual, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2916,7 +2912,7 @@ public class TestVisibilityLabelsWithDeletes {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
- d.addColumns(fam, qual, 125l);
+ d.addColumns(fam, qual, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2941,7 +2937,7 @@ public class TestVisibilityLabelsWithDeletes {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
- d.addFamily(fam, 125l);
+ d.addFamily(fam, 125L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -3016,7 +3012,7 @@ public class TestVisibilityLabelsWithDeletes {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
- d.addFamilyVersion(fam, 126l);
+ d.addFamilyVersion(fam, 126L);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -3043,27 +3039,27 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(),
row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 127l);
+ assertEquals(127L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(),
row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 126l);
+ assertEquals(126L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(),
row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 125l);
+ assertEquals(125L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(),
row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(),
row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
cellScanner = next[1].cellScanner();
cellScanner.advance();
current = cellScanner.current();
@@ -3083,11 +3079,11 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 123l, value);
+ put.addColumn(fam, qual, 123L, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(Bytes.toBytes("row1"));
- put.addColumn(fam, qual, 124l, value);
+ put.addColumn(fam, qual, 124L, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL + "|" + PRIVATE));
table.put(put);
TEST_UTIL.getAdmin().flush(tableName);
@@ -3097,7 +3093,7 @@ public class TestVisibilityLabelsWithDeletes {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
- d.addColumn(fam, qual, 124l);
+ d.addColumn(fam, qual, 124L);
d.setCellVisibility(new CellVisibility(PRIVATE ));
table.delete(d);
} catch (Throwable t) {
@@ -3120,12 +3116,12 @@ public class TestVisibilityLabelsWithDeletes {
Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 124l);
+ assertEquals(124L, current.getTimestamp());
cellScanner.advance();
current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), row1, 0, row1.length));
- assertEquals(current.getTimestamp(), 123l);
+ assertEquals(123L, current.getTimestamp());
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java
index b6a1c6d..b82d503 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java
@@ -107,6 +107,7 @@ public class TestVisibilityLabelsWithSLGStack {
private static void addLabels() throws Exception {
PrivilegedExceptionAction<VisibilityLabelsResponse> action =
new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ @Override
public VisibilityLabelsResponse run() throws Exception {
String[] labels = { SECRET, CONFIDENTIAL };
try (Connection conn = ConnectionFactory.createConnection(conf)) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java
index 9f24f6c..906b9d5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java
@@ -96,6 +96,7 @@ public class TestVisibilityLablesWithGroups {
// Set up for the test
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(conf)) {
VisibilityClient.addLabels(conn, new String[] { SECRET, CONFIDENTIAL });
@@ -116,6 +117,7 @@ public class TestVisibilityLablesWithGroups {
TEST_UTIL.createTable(tableName, CF);
// put the data.
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
@@ -137,6 +139,7 @@ public class TestVisibilityLablesWithGroups {
// 'admin' user is part of 'supergroup', thus can see all the cells.
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
@@ -178,6 +181,7 @@ public class TestVisibilityLablesWithGroups {
// Get testgroup's labels.
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
GetAuthsResponse authsResponse = null;
try (Connection conn = ConnectionFactory.createConnection(conf)) {
@@ -197,6 +201,7 @@ public class TestVisibilityLablesWithGroups {
// Test that test user can see what 'testgroup' has been authorized to.
TESTUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
@@ -281,6 +286,7 @@ public class TestVisibilityLablesWithGroups {
// Clear 'testgroup' of CONFIDENTIAL label.
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
VisibilityLabelsResponse response = null;
try (Connection conn = ConnectionFactory.createConnection(conf)) {
@@ -295,6 +301,7 @@ public class TestVisibilityLablesWithGroups {
// Get testgroup's labels. No label is returned.
SUPERUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
GetAuthsResponse authsResponse = null;
try (Connection conn = ConnectionFactory.createConnection(conf)) {
@@ -313,6 +320,7 @@ public class TestVisibilityLablesWithGroups {
// Test that test user cannot see the cells with the labels anymore.
TESTUSER.runAs(new PrivilegedExceptionAction<Void>() {
+ @Override
public Void run() throws Exception {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
index fa88a53..ebf38a5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
@@ -136,7 +136,7 @@ public class TestVisibilityWithCheckAuths {
Table table = connection.getTable(tableName)) {
Put p = new Put(row1);
p.setCellVisibility(new CellVisibility(PUBLIC + "&" + TOPSECRET));
- p.addColumn(fam, qual, 125l, value);
+ p.addColumn(fam, qual, 125L, value);
table.put(p);
Assert.fail("Testcase should fail with AccesDeniedException");
} catch (Throwable t) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java
index 43ba304..7a55584 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java
@@ -209,25 +209,25 @@ public class TestWithDisabledAuthorization {
s.setAuthorizations(new Authorizations());
try (ResultScanner scanner = t.getScanner(s)) {
Result[] next = scanner.next(10);
- assertEquals(next.length, 4);
+ assertEquals(4, next.length);
}
s = new Scan();
s.setAuthorizations(new Authorizations(SECRET));
try (ResultScanner scanner = t.getScanner(s)) {
Result[] next = scanner.next(10);
- assertEquals(next.length, 4);
+ assertEquals(4, next.length);
}
s = new Scan();
s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
try (ResultScanner scanner = t.getScanner(s)) {
Result[] next = scanner.next(10);
- assertEquals(next.length, 4);
+ assertEquals(4, next.length);
}
s = new Scan();
s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL, PRIVATE));
try (ResultScanner scanner = t.getScanner(s)) {
Result[] next = scanner.next(10);
- assertEquals(next.length, 4);
+ assertEquals(4, next.length);
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
index a2c015c..9b02096 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
@@ -313,6 +313,7 @@ public class TestFlushSnapshotFromClient {
// Merge two regions
List<HRegionInfo> regions = admin.getTableRegions(TABLE_NAME);
Collections.sort(regions, new Comparator<HRegionInfo>() {
+ @Override
public int compare(HRegionInfo r1, HRegionInfo r2) {
return Bytes.compareTo(r1.getStartKey(), r2.getStartKey());
}
@@ -354,6 +355,7 @@ public class TestFlushSnapshotFromClient {
// Merge two regions
List<HRegionInfo> regions = admin.getTableRegions(TABLE_NAME);
Collections.sort(regions, new Comparator<HRegionInfo>() {
+ @Override
public int compare(HRegionInfo r1, HRegionInfo r2) {
return Bytes.compareTo(r1.getStartKey(), r2.getStartKey());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/MapreduceTestingShim.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/MapreduceTestingShim.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/MapreduceTestingShim.java
index 3f7d441..07fdcd2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/MapreduceTestingShim.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/MapreduceTestingShim.java
@@ -77,6 +77,7 @@ abstract public class MapreduceTestingShim {
}
private static class MapreduceV1Shim extends MapreduceTestingShim {
+ @Override
public JobContext newJobContext(Configuration jobConf) throws IOException {
// Implementing:
// return new JobContext(jobConf, new JobID());
@@ -105,6 +106,7 @@ abstract public class MapreduceTestingShim {
}
}
+ @Override
public JobConf obtainJobConf(MiniMRCluster cluster) {
if (cluster == null) return null;
try {
@@ -129,6 +131,7 @@ abstract public class MapreduceTestingShim {
};
private static class MapreduceV2Shim extends MapreduceTestingShim {
+ @Override
public JobContext newJobContext(Configuration jobConf) {
return newJob(jobConf);
}
@@ -147,6 +150,7 @@ abstract public class MapreduceTestingShim {
}
}
+ @Override
public JobConf obtainJobConf(MiniMRCluster cluster) {
try {
Method meth = MiniMRCluster.class.getMethod("getJobTrackerConf", emptyParam);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
index 8b1c96e..a3ca323 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
@@ -681,7 +681,7 @@ public class TestLoadIncrementalHFiles {
compare[1] = "r".getBytes();
compare[2] = "u".getBytes();
- assertEquals(keysArray.length, 3);
+ assertEquals(3, keysArray.length);
for (int row = 0; row < keysArray.length; row++) {
assertArrayEquals(keysArray[row], compare[row]);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
index 56c9eca..2d32a3c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java
@@ -420,7 +420,7 @@ public class TestLoadIncrementalHFilesSplitRecovery {
// check that data was loaded
// The three expected attempts are 1) failure because need to split, 2)
// load of split top 3) load of split bottom
- assertEquals(attemptedCalls.get(), 3);
+ assertEquals(3, attemptedCalls.get());
assertExpectedTable(table, ROWCOUNT, 2);
}
}
[8/9] hbase git commit: HBASE-19811 Fix findbugs and error-prone
warnings in hbase-server (branch-2)
Posted by st...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
index 65c4d08..ab282d5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
@@ -117,6 +117,7 @@ public class RSProcedureDispatcher
}
}
+ @Override
protected void abortPendingOperations(final ServerName serverName,
final Set<RemoteProcedure> operations) {
// TODO: Replace with a ServerNotOnlineException()
@@ -126,10 +127,12 @@ public class RSProcedureDispatcher
}
}
+ @Override
public void serverAdded(final ServerName serverName) {
addNode(serverName);
}
+ @Override
public void serverRemoved(final ServerName serverName) {
removeNode(serverName);
}
@@ -138,6 +141,7 @@ public class RSProcedureDispatcher
* Base remote call
*/
protected abstract class AbstractRSRemoteCall implements Callable<Void> {
+ @Override
public abstract Void call();
private final ServerName serverName;
@@ -269,6 +273,7 @@ public class RSProcedureDispatcher
this.remoteProcedures = remoteProcedures;
}
+ @Override
public Void call() {
request = ExecuteProceduresRequest.newBuilder();
if (LOG.isTraceEnabled()) {
@@ -290,11 +295,13 @@ public class RSProcedureDispatcher
return null;
}
+ @Override
public void dispatchOpenRequests(final MasterProcedureEnv env,
final List<RegionOpenOperation> operations) {
request.addOpenRegion(buildOpenRegionRequest(env, getServerName(), operations));
}
+ @Override
public void dispatchCloseRequests(final MasterProcedureEnv env,
final List<RegionCloseOperation> operations) {
for (RegionCloseOperation op: operations) {
@@ -471,11 +478,13 @@ public class RSProcedureDispatcher
return null;
}
+ @Override
public void dispatchOpenRequests(final MasterProcedureEnv env,
final List<RegionOpenOperation> operations) {
submitTask(new OpenRegionRemoteCall(serverName, operations));
}
+ @Override
public void dispatchCloseRequests(final MasterProcedureEnv env,
final List<RegionCloseOperation> operations) {
for (RegionCloseOperation op: operations) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
index a8475f0..559863e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
@@ -86,6 +86,7 @@ public class SnapshotHFileCleaner extends BaseHFileCleanerDelegate {
return false;
}
+ @Override
public void setConf(final Configuration conf) {
super.setConf(conf);
try {
@@ -95,6 +96,7 @@ public class SnapshotHFileCleaner extends BaseHFileCleanerDelegate {
Path rootDir = FSUtils.getRootDir(conf);
cache = new SnapshotFileCache(fs, rootDir, cacheRefreshPeriod, cacheRefreshPeriod,
"snapshot-hfile-cleaner-cache-refresher", new SnapshotFileCache.SnapshotFileInspector() {
+ @Override
public Collection<String> filesUnderSnapshot(final Path snapshotDir)
throws IOException {
return SnapshotReferenceUtil.getHFileNames(conf, fs, snapshotDir);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/CachedMobFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/CachedMobFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/CachedMobFile.java
index 397570c..7436d9c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/CachedMobFile.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/CachedMobFile.java
@@ -54,6 +54,7 @@ public class CachedMobFile extends MobFile implements Comparable<CachedMobFile>
this.accessCount = accessCount;
}
+ @Override
public int compareTo(CachedMobFile that) {
if (this.accessCount == that.accessCount) return 0;
return this.accessCount < that.accessCount ? 1 : -1;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
index 053cba6..120f11e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java
@@ -86,6 +86,7 @@ public class ExpiredMobFileCleaner extends Configured implements Tool {
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="REC_CATCH_EXCEPTION",
justification="Intentional")
+ @Override
public int run(String[] args) throws Exception {
if (args.length != 2) {
printUsage();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactionRequest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactionRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactionRequest.java
index aaf545b..ab917a2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactionRequest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactionRequest.java
@@ -294,6 +294,7 @@ public class PartitionedMobCompactionRequest extends MobCompactionRequest {
this.endKey = endKey;
}
+ @Override
public int compareTo(CompactionDelPartitionId o) {
/*
* 1). Compare the start key, if the k1 < k2, then k1 is less
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
index 6b90e6b..1b6ad91 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
@@ -74,6 +74,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
* monitored Handler.
* @return the queue timestamp or -1 if there is no RPC currently running.
*/
+ @Override
public long getRPCQueueTime() {
if (getState() != State.RUNNING) {
return -1;
@@ -86,6 +87,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
* monitored Handler.
* @return the start timestamp or -1 if there is no RPC currently running.
*/
+ @Override
public long getRPCStartTime() {
if (getState() != State.RUNNING) {
return -1;
@@ -98,6 +100,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
* by this Handler.
* @return a string representing the method call without parameters
*/
+ @Override
public synchronized String getRPC() {
return getRPC(false);
}
@@ -108,6 +111,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
* @param withParams toggle inclusion of parameters in the RPC String
* @return A human-readable string representation of the method call.
*/
+ @Override
public synchronized String getRPC(boolean withParams) {
if (getState() != State.RUNNING) {
// no RPC is currently running
@@ -132,6 +136,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
* by this Handler.
* @return A human-readable string representation of the method call.
*/
+ @Override
public long getRPCPacketLength() {
if (getState() != State.RUNNING || packet == null) {
// no RPC is currently running, or we don't have an RPC's packet info
@@ -146,6 +151,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
* @return A human-readable string representation of the address and port
* of the client.
*/
+ @Override
public String getClient() {
return clientAddress + ":" + remotePort;
}
@@ -155,6 +161,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
* RPC call.
* @return true if the monitored handler is currently servicing an RPC call.
*/
+ @Override
public boolean isRPCRunning() {
return getState() == State.RUNNING;
}
@@ -166,6 +173,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
* @return true if the monitored handler is currently servicing an RPC call
* to a database command.
*/
+ @Override
public synchronized boolean isOperationRunning() {
if(!isRPCRunning()) {
return false;
@@ -183,6 +191,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
* @param methodName The name of the method that will be called by the RPC.
* @param params The parameters that will be passed to the indicated method.
*/
+ @Override
public synchronized void setRPC(String methodName, Object [] params,
long queueTime) {
this.methodName = methodName;
@@ -197,6 +206,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
* that it can later compute its size if asked for it.
* @param param The protobuf received by the RPC for this call
*/
+ @Override
public void setRPCPacket(Message param) {
this.packet = param;
}
@@ -206,6 +216,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
* @param clientAddress the address of the current client
* @param remotePort the port from which the client connected
*/
+ @Override
public void setConnection(String clientAddress, int remotePort) {
this.clientAddress = clientAddress;
this.remotePort = remotePort;
@@ -218,6 +229,7 @@ public class MonitoredRPCHandlerImpl extends MonitoredTaskImpl
this.packet = null;
}
+ @Override
public synchronized Map<String, Object> toMap() {
// only include RPC info if the Handler is actively servicing an RPC call
Map<String, Object> map = super.toMap();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
index b3869f4..bedb5e2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
@@ -154,6 +154,7 @@ class MonitoredTaskImpl implements MonitoredTask {
* Force the completion timestamp backwards so that
* it expires now.
*/
+ @Override
public void expireNow() {
stateTime -= 180 * 1000;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
index 6749d2f..4aff779 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
@@ -319,7 +319,7 @@ public class TaskMonitor {
OPERATION("operation"),
ALL("all");
- private String type;
+ private final String type;
private TaskType(String type) {
this.type = type.toLowerCase();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java
index 49b344f..fe3edfa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java
@@ -103,7 +103,7 @@ public class Procedure implements Callable<Void>, ForeignExceptionListener {
//
/** lock to prevent nodes from acquiring and then releasing before we can track them */
- private Object joinBarrierLock = new Object();
+ private final Object joinBarrierLock = new Object();
private final List<String> acquiringMembers;
private final List<String> inBarrierMembers;
private final HashMap<String, byte[]> dataFromFinishedMembers;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
index af4d2d7..9ebb1d7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
@@ -88,11 +88,9 @@ public abstract class ProcedureManagerHost<E extends ProcedureManager> {
E impl;
Object o = null;
try {
- o = implClass.newInstance();
+ o = implClass.getDeclaredConstructor().newInstance();
impl = (E)o;
- } catch (InstantiationException e) {
- throw new IOException(e);
- } catch (IllegalAccessException e) {
+ } catch (Exception e) {
throw new IOException(e);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
index 6416e6a..d15f5ac 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
@@ -153,6 +153,7 @@ abstract public class Subprocedure implements Callable<Void> {
* Subprocedure, ForeignException)}.
*/
@SuppressWarnings("finally")
+ @Override
final public Void call() {
LOG.debug("Starting subprocedure '" + barrierName + "' with timeout " +
executionTimeoutTimer.getMaxTime() + "ms");
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java
index c1fb8f5..71ba28e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java
@@ -19,19 +19,21 @@ package org.apache.hadoop.hbase.procedure;
import java.io.IOException;
import java.io.InterruptedIOException;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
-import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
+import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
+import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+
/**
* ZooKeeper based {@link ProcedureCoordinatorRpcs} for a {@link ProcedureCoordinator}
*/
@@ -218,8 +220,8 @@ public class ZKProcedureCoordinator implements ProcedureCoordinatorRpcs {
} else {
dataFromMember = Arrays.copyOfRange(dataFromMember, ProtobufUtil.lengthOfPBMagic(),
dataFromMember.length);
- LOG.debug("Finished data from procedure '" + procName
- + "' member '" + member + "': " + new String(dataFromMember));
+ LOG.debug("Finished data from procedure '{}' member '{}': {}", procName, member,
+ new String(dataFromMember, StandardCharsets.UTF_8));
coordinator.memberFinishedBarrier(procName, member, dataFromMember);
}
} else {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
index ea41ae8..f29d133 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
@@ -348,6 +348,7 @@ public class ZKProcedureMemberRpcs implements ProcedureMemberRpcs {
}
}
+ @Override
public void start(final String memberName, final ProcedureMember listener) {
LOG.debug("Starting procedure member '" + memberName + "'");
this.member = listener;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
index 976e36b..9eb3fb3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
@@ -157,6 +157,7 @@ public abstract class ZKProcedureUtil
return ZNodePaths.joinZNode(controller.abortZnode, opInstanceName);
}
+ @Override
public ZKWatcher getWatcher() {
return watcher;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java
index e68a1ce..6783e7d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java
@@ -455,7 +455,7 @@ public class MasterQuotaManager implements RegionStateListener {
}
private static class NamedLock<T> {
- private HashSet<T> locks = new HashSet<>();
+ private final HashSet<T> locks = new HashSet<>();
public void lock(final T name) throws InterruptedException {
synchronized (locks) {
@@ -501,6 +501,7 @@ public class MasterQuotaManager implements RegionStateListener {
return time;
}
+ @Override
public boolean equals(Object o) {
if (o instanceof SizeSnapshotWithTimestamp) {
SizeSnapshotWithTimestamp other = (SizeSnapshotWithTimestamp) o;
@@ -509,6 +510,7 @@ public class MasterQuotaManager implements RegionStateListener {
return false;
}
+ @Override
public int hashCode() {
HashCodeBuilder hcb = new HashCodeBuilder();
return hcb.append(size).append(time).toHashCode();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java
index 5e20ce9..869ead3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java
@@ -386,7 +386,8 @@ public class QuotaObserverChore extends ScheduledChore {
for (TableName tableInNS : tablesByNamespace.get(namespace)) {
final SpaceQuotaSnapshot tableQuotaSnapshot =
tableSnapshotStore.getCurrentState(tableInNS);
- final boolean hasTableQuota = QuotaSnapshotStore.NO_QUOTA != tableQuotaSnapshot;
+ final boolean hasTableQuota =
+ !Objects.equals(QuotaSnapshotStore.NO_QUOTA, tableQuotaSnapshot);
if (hasTableQuota && tableQuotaSnapshot.getQuotaStatus().isInViolation()) {
// Table-level quota violation policy is being applied here.
if (LOG.isTraceEnabled()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RateLimiter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RateLimiter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RateLimiter.java
index 0c856b1..852d8a6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RateLimiter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RateLimiter.java
@@ -100,6 +100,7 @@ public abstract class RateLimiter {
this.avail = limit;
}
+ @Override
public String toString() {
String rateLimiter = this.getClass().getSimpleName();
if (getLimit() == Long.MAX_VALUE) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java
index 2d4414c..b0bdede 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java
@@ -53,7 +53,7 @@ public class RegionServerSpaceQuotaManager {
private SpaceQuotaRefresherChore spaceQuotaRefresher;
private AtomicReference<Map<TableName, SpaceQuotaSnapshot>> currentQuotaSnapshots;
private boolean started = false;
- private ConcurrentHashMap<TableName,SpaceViolationPolicyEnforcement> enforcedPolicies;
+ private final ConcurrentHashMap<TableName,SpaceViolationPolicyEnforcement> enforcedPolicies;
private SpaceViolationPolicyEnforcementFactory factory;
public RegionServerSpaceQuotaManager(RegionServerServices rsServices) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierFactory.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierFactory.java
index 3fb7ad3..f19595f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierFactory.java
@@ -54,8 +54,8 @@ public class SpaceQuotaSnapshotNotifierFactory {
.getClass(SNAPSHOT_NOTIFIER_KEY, SNAPSHOT_NOTIFIER_DEFAULT,
SpaceQuotaSnapshotNotifier.class);
try {
- return clz.newInstance();
- } catch (InstantiationException | IllegalAccessException e) {
+ return clz.getDeclaredConstructor().newInstance();
+ } catch (Exception e) {
throw new IllegalArgumentException("Failed to instantiate the implementation", e);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
index 8a19908..6dbe0a8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java
@@ -54,7 +54,7 @@ public abstract class AbstractMemStore implements MemStore {
// Used to track when to flush
private volatile long timeOfOldestEdit;
- public final static long FIXED_OVERHEAD = ClassSize.OBJECT
+ public final static long FIXED_OVERHEAD = (long) ClassSize.OBJECT
+ (4 * ClassSize.REFERENCE)
+ (2 * Bytes.SIZEOF_LONG); // snapshotId, timeOfOldestEdit
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AdaptiveMemStoreCompactionStrategy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AdaptiveMemStoreCompactionStrategy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AdaptiveMemStoreCompactionStrategy.java
index 232ffe3..9a866a1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AdaptiveMemStoreCompactionStrategy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AdaptiveMemStoreCompactionStrategy.java
@@ -101,10 +101,13 @@ public class AdaptiveMemStoreCompactionStrategy extends MemStoreCompactionStrate
public void resetStats() {
compactionProbability = initialCompactionProbability;
}
+
+ @Override
protected Action getMergingAction() {
return Action.MERGE_COUNT_UNIQUE_KEYS;
}
+ @Override
protected Action getFlattenAction() {
return Action.FLATTEN;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java
index 523ccf2..bf9b191 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkImmutableSegment.java
@@ -82,7 +82,7 @@ public class CellChunkImmutableSegment extends ImmutableSegment {
@Override
protected long indexEntrySize() {
- return (ClassSize.CELL_CHUNK_MAP_ENTRY - KeyValue.FIXED_OVERHEAD);
+ return ((long) ClassSize.CELL_CHUNK_MAP_ENTRY - KeyValue.FIXED_OVERHEAD);
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java
index 6159385..a4fe883 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java
@@ -73,113 +73,140 @@ public class CellSet implements NavigableSet<Cell> {
return delegatee;
}
+ @Override
public Cell ceiling(Cell e) {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public Iterator<Cell> descendingIterator() {
return this.delegatee.descendingMap().values().iterator();
}
+ @Override
public NavigableSet<Cell> descendingSet() {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public Cell floor(Cell e) {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public SortedSet<Cell> headSet(final Cell toElement) {
return headSet(toElement, false);
}
+ @Override
public NavigableSet<Cell> headSet(final Cell toElement,
boolean inclusive) {
return new CellSet(this.delegatee.headMap(toElement, inclusive), UNKNOWN_NUM_UNIQUES);
}
+ @Override
public Cell higher(Cell e) {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public Iterator<Cell> iterator() {
return this.delegatee.values().iterator();
}
+ @Override
public Cell lower(Cell e) {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public Cell pollFirst() {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public Cell pollLast() {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public SortedSet<Cell> subSet(Cell fromElement, Cell toElement) {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public NavigableSet<Cell> subSet(Cell fromElement,
boolean fromInclusive, Cell toElement, boolean toInclusive) {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public SortedSet<Cell> tailSet(Cell fromElement) {
return tailSet(fromElement, true);
}
+ @Override
public NavigableSet<Cell> tailSet(Cell fromElement, boolean inclusive) {
return new CellSet(this.delegatee.tailMap(fromElement, inclusive), UNKNOWN_NUM_UNIQUES);
}
+ @Override
public Comparator<? super Cell> comparator() {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public Cell first() {
return this.delegatee.firstEntry().getValue();
}
+ @Override
public Cell last() {
return this.delegatee.lastEntry().getValue();
}
+ @Override
public boolean add(Cell e) {
return this.delegatee.put(e, e) == null;
}
+ @Override
public boolean addAll(Collection<? extends Cell> c) {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public void clear() {
this.delegatee.clear();
}
+ @Override
public boolean contains(Object o) {
//noinspection SuspiciousMethodCalls
return this.delegatee.containsKey(o);
}
+ @Override
public boolean containsAll(Collection<?> c) {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public boolean isEmpty() {
return this.delegatee.isEmpty();
}
+ @Override
public boolean remove(Object o) {
return this.delegatee.remove(o) != null;
}
+ @Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
@@ -188,14 +215,17 @@ public class CellSet implements NavigableSet<Cell> {
return this.delegatee.get(kv);
}
+ @Override
public int size() {
return this.delegatee.size();
}
+ @Override
public Object[] toArray() {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
+ @Override
public <T> T[] toArray(T[] a) {
throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
index d874b2e..3cb4103 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
@@ -143,7 +143,7 @@ public class CompactingMemStore extends AbstractMemStore {
factor = conf.getDouble(IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY,
IN_MEMORY_FLUSH_THRESHOLD_FACTOR_DEFAULT);
}
- inmemoryFlushSize *= factor;
+ inmemoryFlushSize = (long) (inmemoryFlushSize * factor);
LOG.info("Setting in-memory flush size threshold to " + inmemoryFlushSize
+ " and immutable segments index to be of type " + indexType);
}
@@ -365,7 +365,7 @@ public class CompactingMemStore extends AbstractMemStore {
MutableSegment activeTmp = active;
List<? extends Segment> pipelineList = pipeline.getSegments();
List<? extends Segment> snapshotList = snapshot.getAllSegments();
- long order = 1 + pipelineList.size() + snapshotList.size();
+ long order = 1L + pipelineList.size() + snapshotList.size();
// The list of elements in pipeline + the active element + the snapshot segment
// The order is the Segment ordinal
List<KeyValueScanner> list = createList((int) order);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
index 1aae068..8bd990a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
@@ -279,6 +279,7 @@ public class CompositeImmutableSegment extends ImmutableSegment {
/**
* Dumps all cells of the segment into the given log
*/
+ @Override
void dump(Logger log) {
for (ImmutableSegment s : segments) {
s.dump(log);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
index 4539ed6..daae083 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java
@@ -89,6 +89,7 @@ public class DateTieredStoreEngine extends StoreEngine<DefaultStoreFlusher,
super.forceSelect(request);
}
+ @Override
public List<Path> compact(ThroughputController throughputController, User user)
throws IOException {
if (request instanceof DateTieredCompactionRequest) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
index b3f0a44..26bf640 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
@@ -61,11 +61,9 @@ public class DefaultStoreFlusher extends StoreFlusher {
synchronized (flushLock) {
status.setStatus("Flushing " + store + ": creating writer");
// Write the map out to the disk
- writer = store.createWriterInTmp(cellsCount, store.getColumnFamilyDescriptor().getCompressionType(),
- /* isCompaction = */ false,
- /* includeMVCCReadpoint = */ true,
- /* includesTags = */ snapshot.isTagsPresent(),
- /* shouldDropBehind = */ false);
+ writer = store.createWriterInTmp(cellsCount,
+ store.getColumnFamilyDescriptor().getCompressionType(), false, true,
+ snapshot.isTagsPresent(), false);
IOException e = null;
try {
performFlush(scanner, writer, smallestReadPoint, throughputController);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
index d56a1c2..740eb08 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java
@@ -26,6 +26,7 @@ import java.util.Map;
import java.util.NavigableSet;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -82,15 +83,15 @@ public class HMobStore extends HStore {
private MobCacheConfig mobCacheConfig;
private Path homePath;
private Path mobFamilyPath;
- private volatile long cellsCountCompactedToMob = 0;
- private volatile long cellsCountCompactedFromMob = 0;
- private volatile long cellsSizeCompactedToMob = 0;
- private volatile long cellsSizeCompactedFromMob = 0;
- private volatile long mobFlushCount = 0;
- private volatile long mobFlushedCellsCount = 0;
- private volatile long mobFlushedCellsSize = 0;
- private volatile long mobScanCellsCount = 0;
- private volatile long mobScanCellsSize = 0;
+ private AtomicLong cellsCountCompactedToMob = new AtomicLong();
+ private AtomicLong cellsCountCompactedFromMob = new AtomicLong();
+ private AtomicLong cellsSizeCompactedToMob = new AtomicLong();
+ private AtomicLong cellsSizeCompactedFromMob = new AtomicLong();
+ private AtomicLong mobFlushCount = new AtomicLong();
+ private AtomicLong mobFlushedCellsCount = new AtomicLong();
+ private AtomicLong mobFlushedCellsSize = new AtomicLong();
+ private AtomicLong mobScanCellsCount = new AtomicLong();
+ private AtomicLong mobScanCellsSize = new AtomicLong();
private ColumnFamilyDescriptor family;
private Map<String, List<Path>> map = new ConcurrentHashMap<>();
private final IdLock keyLock = new IdLock();
@@ -453,76 +454,75 @@ public class HMobStore extends HStore {
}
public void updateCellsCountCompactedToMob(long count) {
- cellsCountCompactedToMob += count;
+ cellsCountCompactedToMob.addAndGet(count);
}
public long getCellsCountCompactedToMob() {
- return cellsCountCompactedToMob;
+ return cellsCountCompactedToMob.get();
}
public void updateCellsCountCompactedFromMob(long count) {
- cellsCountCompactedFromMob += count;
+ cellsCountCompactedFromMob.addAndGet(count);
}
public long getCellsCountCompactedFromMob() {
- return cellsCountCompactedFromMob;
+ return cellsCountCompactedFromMob.get();
}
public void updateCellsSizeCompactedToMob(long size) {
- cellsSizeCompactedToMob += size;
+ cellsSizeCompactedToMob.addAndGet(size);
}
public long getCellsSizeCompactedToMob() {
- return cellsSizeCompactedToMob;
+ return cellsSizeCompactedToMob.get();
}
public void updateCellsSizeCompactedFromMob(long size) {
- cellsSizeCompactedFromMob += size;
+ cellsSizeCompactedFromMob.addAndGet(size);
}
public long getCellsSizeCompactedFromMob() {
- return cellsSizeCompactedFromMob;
+ return cellsSizeCompactedFromMob.get();
}
- @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "VO_VOLATILE_INCREMENT")
public void updateMobFlushCount() {
- mobFlushCount++;
+ mobFlushCount.incrementAndGet();
}
public long getMobFlushCount() {
- return mobFlushCount;
+ return mobFlushCount.get();
}
public void updateMobFlushedCellsCount(long count) {
- mobFlushedCellsCount += count;
+ mobFlushedCellsCount.addAndGet(count);
}
public long getMobFlushedCellsCount() {
- return mobFlushedCellsCount;
+ return mobFlushedCellsCount.get();
}
public void updateMobFlushedCellsSize(long size) {
- mobFlushedCellsSize += size;
+ mobFlushedCellsSize.addAndGet(size);
}
public long getMobFlushedCellsSize() {
- return mobFlushedCellsSize;
+ return mobFlushedCellsSize.get();
}
public void updateMobScanCellsCount(long count) {
- mobScanCellsCount += count;
+ mobScanCellsCount.addAndGet(count);
}
public long getMobScanCellsCount() {
- return mobScanCellsCount;
+ return mobScanCellsCount.get();
}
public void updateMobScanCellsSize(long size) {
- mobScanCellsSize += size;
+ mobScanCellsSize.addAndGet(size);
}
public long getMobScanCellsSize() {
- return mobScanCellsSize;
+ return mobScanCellsSize.get();
}
public byte[] getRefCellTags() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index e18c80e..c0ccc1d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -26,6 +26,7 @@ import java.io.IOException;
import java.io.InterruptedIOException;
import java.lang.reflect.Constructor;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.AbstractList;
import java.util.ArrayList;
@@ -1015,7 +1016,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
}
long storeMaxSequenceId = store.getMaxSequenceId().orElse(0L);
- maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(),
+ maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()),
storeMaxSequenceId);
if (maxSeqId == -1 || storeMaxSequenceId > maxSeqId) {
maxSeqId = storeMaxSequenceId;
@@ -5524,7 +5525,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
HStore store = this.stores.get(column);
if (store == null) {
throw new IllegalArgumentException(
- "No column family : " + new String(column) + " available");
+ "No column family : " + new String(column, StandardCharsets.UTF_8) + " available");
}
Collection<HStoreFile> storeFiles = store.getStorefiles();
if (storeFiles == null) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index 37ec595..bd7b4a0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -904,7 +904,7 @@ public class HRegionServer extends HasThread implements
*/
private boolean isClusterUp() {
return this.masterless ||
- this.clusterStatusTracker != null && this.clusterStatusTracker.isClusterUp();
+ (this.clusterStatusTracker != null && this.clusterStatusTracker.isClusterUp());
}
/**
@@ -1745,7 +1745,7 @@ public class HRegionServer extends HasThread implements
if (r.shouldFlush(whyFlush)) {
FlushRequester requester = server.getFlushRequester();
if (requester != null) {
- long randomDelay = RandomUtils.nextInt(0, RANGE_OF_DELAY) + MIN_DELAY_TIME;
+ long randomDelay = (long) RandomUtils.nextInt(0, RANGE_OF_DELAY) + MIN_DELAY_TIME;
LOG.info(getName() + " requesting flush of " +
r.getRegionInfo().getRegionNameAsString() + " because " +
whyFlush.toString() +
@@ -3111,13 +3111,13 @@ public class HRegionServer extends HasThread implements
}
}
- final Boolean previous = this.regionsInTransitionInRS.putIfAbsent(encodedName.getBytes(),
+ final Boolean previous = this.regionsInTransitionInRS.putIfAbsent(Bytes.toBytes(encodedName),
Boolean.FALSE);
if (Boolean.TRUE.equals(previous)) {
LOG.info("Received CLOSE for the region:" + encodedName + " , which we are already " +
"trying to OPEN. Cancelling OPENING.");
- if (!regionsInTransitionInRS.replace(encodedName.getBytes(), previous, Boolean.FALSE)){
+ if (!regionsInTransitionInRS.replace(Bytes.toBytes(encodedName), previous, Boolean.FALSE)) {
// The replace failed. That should be an exceptional case, but theoretically it can happen.
// We're going to try to do a standard close then.
LOG.warn("The opening for region " + encodedName + " was done before we could cancel it." +
@@ -3140,7 +3140,7 @@ public class HRegionServer extends HasThread implements
if (actualRegion == null) {
LOG.debug("Received CLOSE for a region which is not online, and we're not opening.");
- this.regionsInTransitionInRS.remove(encodedName.getBytes());
+ this.regionsInTransitionInRS.remove(Bytes.toBytes(encodedName));
// The master deletes the znode when it receives this exception.
throw new NotServingRegionException("The region " + encodedName +
" is not online, and is not opening.");
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
index d3509c2..afd85f8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
@@ -44,6 +44,7 @@ public class HRegionServerCommandLine extends ServerCommandLine {
this.regionServerClass = clazz;
}
+ @Override
protected String getUsage() {
return USAGE;
}
@@ -73,6 +74,7 @@ public class HRegionServerCommandLine extends ServerCommandLine {
return 0;
}
+ @Override
public int run(String args[]) throws Exception {
if (args.length != 1) {
usage(null);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 01121dd..f228d44 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -42,6 +42,7 @@ import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Predicate;
@@ -149,8 +150,8 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
volatile boolean forceMajor = false;
/* how many bytes to write between status checks */
static int closeCheckInterval = 0;
- private volatile long storeSize = 0L;
- private volatile long totalUncompressedBytes = 0L;
+ private AtomicLong storeSize = new AtomicLong();
+ private AtomicLong totalUncompressedBytes = new AtomicLong();
/**
* RWLock for store operations.
@@ -209,13 +210,13 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
private int compactionCheckMultiplier;
protected Encryption.Context cryptoContext = Encryption.Context.NONE;
- private volatile long flushedCellsCount = 0;
- private volatile long compactedCellsCount = 0;
- private volatile long majorCompactedCellsCount = 0;
- private volatile long flushedCellsSize = 0;
- private volatile long flushedOutputFileSize = 0;
- private volatile long compactedCellsSize = 0;
- private volatile long majorCompactedCellsSize = 0;
+ private AtomicLong flushedCellsCount = new AtomicLong();
+ private AtomicLong compactedCellsCount = new AtomicLong();
+ private AtomicLong majorCompactedCellsCount = new AtomicLong();
+ private AtomicLong flushedCellsSize = new AtomicLong();
+ private AtomicLong flushedOutputFileSize = new AtomicLong();
+ private AtomicLong compactedCellsSize = new AtomicLong();
+ private AtomicLong majorCompactedCellsSize = new AtomicLong();
/**
* Constructor
@@ -544,8 +545,9 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
HStoreFile storeFile = completionService.take().get();
if (storeFile != null) {
long length = storeFile.getReader().length();
- this.storeSize += length;
- this.totalUncompressedBytes += storeFile.getReader().getTotalUncompressedBytes();
+ this.storeSize.addAndGet(length);
+ this.totalUncompressedBytes
+ .addAndGet(storeFile.getReader().getTotalUncompressedBytes());
LOG.debug("loaded {}", storeFile);
results.add(storeFile);
}
@@ -844,8 +846,8 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
private void bulkLoadHFile(HStoreFile sf) throws IOException {
StoreFileReader r = sf.getReader();
- this.storeSize += r.length();
- this.totalUncompressedBytes += r.getTotalUncompressedBytes();
+ this.storeSize.addAndGet(r.length());
+ this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes());
// Append the new storefile into the list
this.lock.writeLock().lock();
@@ -1021,8 +1023,8 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
HStoreFile sf = createStoreFileAndReader(dstPath);
StoreFileReader r = sf.getReader();
- this.storeSize += r.length();
- this.totalUncompressedBytes += r.getTotalUncompressedBytes();
+ this.storeSize.addAndGet(r.length());
+ this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes());
if (LOG.isInfoEnabled()) {
LOG.info("Added " + sf + ", entries=" + r.getEntries() +
@@ -1373,11 +1375,11 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
writeCompactionWalRecord(filesToCompact, sfs);
replaceStoreFiles(filesToCompact, sfs);
if (cr.isMajor()) {
- majorCompactedCellsCount += getCompactionProgress().totalCompactingKVs;
- majorCompactedCellsSize += getCompactionProgress().totalCompactedSize;
+ majorCompactedCellsCount.addAndGet(getCompactionProgress().totalCompactingKVs);
+ majorCompactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize);
} else {
- compactedCellsCount += getCompactionProgress().totalCompactingKVs;
- compactedCellsSize += getCompactionProgress().totalCompactedSize;
+ compactedCellsCount.addAndGet(getCompactionProgress().totalCompactingKVs);
+ compactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize);
}
long outputBytes = getTotalSize(sfs);
@@ -1478,7 +1480,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
}
}
message.append("total size for store is ")
- .append(StringUtils.TraditionalBinaryPrefix.long2String(storeSize, "", 1))
+ .append(StringUtils.TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1))
.append(". This selection was in queue for ")
.append(StringUtils.formatTimeDiff(compactionStartTime, cr.getSelectionTime()))
.append(", and took ").append(StringUtils.formatTimeDiff(now, compactionStartTime))
@@ -1772,7 +1774,8 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
completeCompaction(delSfs);
LOG.info("Completed removal of " + delSfs.size() + " unnecessary (expired) file(s) in "
+ this + " of " + this.getRegionInfo().getRegionNameAsString()
- + "; total size for store is " + TraditionalBinaryPrefix.long2String(storeSize, "", 1));
+ + "; total size for store is "
+ + TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1));
}
public void cancelRequestedCompaction(CompactionContext compaction) {
@@ -1826,16 +1829,16 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
@VisibleForTesting
protected void completeCompaction(Collection<HStoreFile> compactedFiles)
throws IOException {
- this.storeSize = 0L;
- this.totalUncompressedBytes = 0L;
+ this.storeSize.set(0L);
+ this.totalUncompressedBytes.set(0L);
for (HStoreFile hsf : this.storeEngine.getStoreFileManager().getStorefiles()) {
StoreFileReader r = hsf.getReader();
if (r == null) {
LOG.warn("StoreFile {} has a null Reader", hsf);
continue;
}
- this.storeSize += r.length();
- this.totalUncompressedBytes += r.getTotalUncompressedBytes();
+ this.storeSize.addAndGet(r.length());
+ this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes());
}
}
@@ -1896,7 +1899,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
@Override
public long getSize() {
- return storeSize;
+ return storeSize.get();
}
public void triggerMajorCompaction() {
@@ -2043,7 +2046,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
@Override
public long getStoreSizeUncompressed() {
- return this.totalUncompressedBytes;
+ return this.totalUncompressedBytes.get();
}
@Override
@@ -2235,9 +2238,9 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
committedFiles.add(sf.getPath());
}
- HStore.this.flushedCellsCount += cacheFlushCount;
- HStore.this.flushedCellsSize += cacheFlushSize;
- HStore.this.flushedOutputFileSize += outputFileSize;
+ HStore.this.flushedCellsCount.addAndGet(cacheFlushCount);
+ HStore.this.flushedCellsSize.addAndGet(cacheFlushSize);
+ HStore.this.flushedOutputFileSize.addAndGet(outputFileSize);
// Add new file to store files. Clear snapshot too while we have the Store write lock.
return HStore.this.updateStorefiles(storeFiles, snapshot.getId());
@@ -2270,8 +2273,9 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
StoreFileInfo storeFileInfo = fs.getStoreFileInfo(getColumnFamilyName(), file);
HStoreFile storeFile = createStoreFileAndReader(storeFileInfo);
storeFiles.add(storeFile);
- HStore.this.storeSize += storeFile.getReader().length();
- HStore.this.totalUncompressedBytes += storeFile.getReader().getTotalUncompressedBytes();
+ HStore.this.storeSize.addAndGet(storeFile.getReader().length());
+ HStore.this.totalUncompressedBytes
+ .addAndGet(storeFile.getReader().getTotalUncompressedBytes());
if (LOG.isInfoEnabled()) {
LOG.info("Region: " + HStore.this.getRegionInfo().getEncodedName() +
" added " + storeFile + ", entries=" + storeFile.getReader().getEntries() +
@@ -2315,7 +2319,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
}
public static final long FIXED_OVERHEAD =
- ClassSize.align(ClassSize.OBJECT + (17 * ClassSize.REFERENCE) + (11 * Bytes.SIZEOF_LONG)
+ ClassSize.align(ClassSize.OBJECT + (26 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG)
+ (5 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN));
public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD
@@ -2354,37 +2358,37 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
@Override
public long getFlushedCellsCount() {
- return flushedCellsCount;
+ return flushedCellsCount.get();
}
@Override
public long getFlushedCellsSize() {
- return flushedCellsSize;
+ return flushedCellsSize.get();
}
@Override
public long getFlushedOutputFileSize() {
- return flushedOutputFileSize;
+ return flushedOutputFileSize.get();
}
@Override
public long getCompactedCellsCount() {
- return compactedCellsCount;
+ return compactedCellsCount.get();
}
@Override
public long getCompactedCellsSize() {
- return compactedCellsSize;
+ return compactedCellsSize.get();
}
@Override
public long getMajorCompactedCellsCount() {
- return majorCompactedCellsCount;
+ return majorCompactedCellsCount.get();
}
@Override
public long getMajorCompactedCellsSize() {
- return majorCompactedCellsSize;
+ return majorCompactedCellsSize.get();
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
index 19a63b4..21446d2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
@@ -95,7 +95,7 @@ public class IncreasingToUpperBoundRegionSplitPolicy extends ConstantSizeRegionS
}
}
- return foundABigStore | force;
+ return foundABigStore || force;
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java
index 1e71bc8..fe52758 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java
@@ -67,5 +67,6 @@ public interface InternalScanner extends Closeable {
* Closes the scanner and releases any resources it has allocated
* @throws IOException
*/
+ @Override
void close() throws IOException;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
index 779ed49..053ae99 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
@@ -104,6 +104,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner
}
}
+ @Override
public Cell peek() {
if (this.current == null) {
return null;
@@ -111,6 +112,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner
return this.current.peek();
}
+ @Override
public Cell next() throws IOException {
if(this.current == null) {
return null;
@@ -182,6 +184,8 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner
public KVScannerComparator(CellComparator kvComparator) {
this.kvComparator = kvComparator;
}
+
+ @Override
public int compare(KeyValueScanner left, KeyValueScanner right) {
int comparison = compare(left.peek(), right.peek());
if (comparison != 0) {
@@ -210,6 +214,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner
}
}
+ @Override
public void close() {
for (KeyValueScanner scanner : this.scannersForDelayedClose) {
scanner.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
index 796f7c9..864cc06 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
@@ -84,6 +84,7 @@ public interface KeyValueScanner extends Shipper, Closeable {
/**
* Close the KeyValue scanner.
*/
+ @Override
void close();
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java
index d564e40..a8c3362 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java
@@ -167,8 +167,8 @@ public class MemStoreCompactor {
// Substitute the pipeline with one segment
if (!isInterrupted.get()) {
- if (resultSwapped = compactingMemStore.swapCompactedSegments(
- versionedList, result, merge)) {
+ resultSwapped = compactingMemStore.swapCompactedSegments(versionedList, result, merge);
+ if (resultSwapped) {
// update compaction strategy
strategy.updateStats(result);
// update the wal so it can be truncated and not get too long
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
index fdee404..f7493b0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
@@ -26,6 +26,7 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.Set;
import java.util.SortedMap;
import java.util.concurrent.BlockingQueue;
@@ -714,8 +715,13 @@ class MemStoreFlusher implements FlushRequester {
}
@Override
+ public int hashCode() {
+ return System.identityHashCode(this);
+ }
+
+ @Override
public boolean equals(Object obj) {
- return (this == obj);
+ return Objects.equals(this, obj);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.java
index f43573e..02824ba 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.java
@@ -76,6 +76,7 @@ public class MemStoreMergerSegmentsIterator extends MemStoreSegmentsIterator {
return null;
}
+ @Override
public void close() {
if (closed) {
return;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
index eaaa4ae..09929e1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
@@ -788,7 +788,8 @@ class MetricsRegionServerWrapperImpl
OptionalDouble storeAvgStoreFileAge = store.getAvgStoreFileAge();
if (storeAvgStoreFileAge.isPresent()) {
- avgAgeNumerator += storeAvgStoreFileAge.getAsDouble() * storeHFiles;
+ avgAgeNumerator =
+ (long) (avgAgeNumerator + storeAvgStoreFileAge.getAsDouble() * storeHFiles);
}
tempStorefileIndexSize += store.getStorefilesRootLevelIndexSize();
@@ -931,6 +932,7 @@ class MetricsRegionServerWrapperImpl
return averageRegionSize;
}
+ @Override
public long getDataMissCount() {
if (this.cacheStats == null) {
return 0;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
index 2aa1a82..533a05d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
@@ -249,7 +249,7 @@ public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable
OptionalDouble storeAvgStoreFileAge = store.getAvgStoreFileAge();
if (storeAvgStoreFileAge.isPresent()) {
- avgAgeNumerator += storeAvgStoreFileAge.getAsDouble() * storeHFiles;
+ avgAgeNumerator += (long) storeAvgStoreFileAge.getAsDouble() * storeHFiles;
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java
index 54095e0..0c3551b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java
@@ -74,8 +74,12 @@ public class MultiVersionConcurrencyControl {
public void advanceTo(long newStartPoint) {
while (true) {
long seqId = this.getWritePoint();
- if (seqId >= newStartPoint) break;
- if (this.tryAdvanceTo(/* newSeqId = */ newStartPoint, /* expected = */ seqId)) break;
+ if (seqId >= newStartPoint) {
+ break;
+ }
+ if (this.tryAdvanceTo(newStartPoint, seqId)) {
+ break;
+ }
}
}
@@ -239,6 +243,7 @@ public class MultiVersionConcurrencyControl {
}
@VisibleForTesting
+ @Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("readPoint", readPoint)
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index b6c0ebe..5a01581 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -2807,7 +2807,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
private static final long serialVersionUID = -4305297078988180130L;
@Override
- public Throwable fillInStackTrace() {
+ public synchronized Throwable fillInStackTrace() {
return this;
}
};
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
index dc1708c..1986668 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
@@ -279,6 +279,7 @@ public class RegionServerCoprocessorHost extends
* @return An instance of RegionServerServices, an object NOT for general user-space Coprocessor
* consumption.
*/
+ @Override
public RegionServerServices getRegionServerServices() {
return this.regionServerServices;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
index 017e0fb..6b2267f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java
@@ -435,8 +435,8 @@ public class ScannerContext {
TIME_LIMIT_REACHED_MID_ROW(true, true),
BATCH_LIMIT_REACHED(true, true);
- private boolean moreValues;
- private boolean limitReached;
+ private final boolean moreValues;
+ private final boolean limitReached;
private NextState(boolean moreValues, boolean limitReached) {
this.moreValues = moreValues;
@@ -492,13 +492,13 @@ public class ScannerContext {
* limits, the checker must know their own scope (i.e. are they checking the limits between
* rows, between cells, etc...)
*/
- int depth;
+ final int depth;
LimitScope(int depth) {
this.depth = depth;
}
- int depth() {
+ final int depth() {
return depth;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
index b67b54e..0b1d251 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
@@ -97,7 +97,7 @@ public class ServerNonceManager {
}
public boolean isExpired(long minRelevantTime) {
- return getActivityTime() < (minRelevantTime & (~0l >>> 3));
+ return getActivityTime() < (minRelevantTime & (~0L >>> 3));
}
public void setMvcc(long mvcc) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.java
index 4f3e0f2..9753080 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.java
@@ -24,6 +24,7 @@ public class SteppingSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy
* This allows a table to spread quickly across servers, while avoiding creating
* too many regions.
*/
+ @Override
protected long getSizeToCheck(final int tableRegionsCount) {
return tableRegionsCount == 1 ? this.initialSize : getDesiredMaxFileSize();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
index 89b2acd..80d0ad7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
@@ -187,14 +187,17 @@ public class StoreFileScanner implements KeyValueScanner {
return scanners;
}
+ @Override
public String toString() {
return "StoreFileScanner[" + hfs.toString() + ", cur=" + cur + "]";
}
+ @Override
public Cell peek() {
return cur;
}
+ @Override
public Cell next() throws IOException {
Cell retKey = cur;
@@ -215,6 +218,7 @@ public class StoreFileScanner implements KeyValueScanner {
return retKey;
}
+ @Override
public boolean seek(Cell key) throws IOException {
if (seekCount != null) seekCount.increment();
@@ -242,6 +246,7 @@ public class StoreFileScanner implements KeyValueScanner {
}
}
+ @Override
public boolean reseek(Cell key) throws IOException {
if (seekCount != null) seekCount.increment();
@@ -298,6 +303,7 @@ public class StoreFileScanner implements KeyValueScanner {
return true;
}
+ @Override
public void close() {
if (closed) return;
cur = null;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
index 595231f..59b91d5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
@@ -104,12 +104,8 @@ public class StripeStoreFlusher extends StoreFlusher {
return new StripeMultiFileWriter.WriterFactory() {
@Override
public StoreFileWriter createWriter() throws IOException {
- StoreFileWriter writer = store.createWriterInTmp(
- kvCount, store.getColumnFamilyDescriptor().getCompressionType(),
- /* isCompaction = */ false,
- /* includeMVCCReadpoint = */ true,
- /* includesTags = */ true,
- /* shouldDropBehind = */ false);
+ StoreFileWriter writer = store.createWriterInTmp(kvCount,
+ store.getColumnFamilyDescriptor().getCompressionType(), false, true, true, false);
return writer;
}
};
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
index ed4a025..056f076 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
@@ -39,7 +39,6 @@ import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
import org.apache.hadoop.hbase.regionserver.CellSink;
-import org.apache.hadoop.hbase.regionserver.CustomizedScanInfoBuilder;
import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
@@ -261,10 +260,8 @@ public abstract class Compactor<T extends CellSink> {
throws IOException {
// When all MVCC readpoints are 0, don't write them.
// See HBASE-8166, HBASE-12600, and HBASE-13389.
- return store.createWriterInTmp(fd.maxKeyCount, this.compactionCompression,
- /* isCompaction = */true,
- /* includeMVCCReadpoint = */fd.maxMVCCReadpoint > 0,
- /* includesTags = */fd.maxTagsLength > 0, shouldDropBehind);
+ return store.createWriterInTmp(fd.maxKeyCount, this.compactionCompression, true,
+ fd.maxMVCCReadpoint > 0, fd.maxTagsLength > 0, shouldDropBehind);
}
private ScanInfo preCompactScannerOpen(CompactionRequestImpl request, ScanType scanType,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
index 905562c..cf04d00 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java
@@ -108,6 +108,7 @@ public class DateTieredCompactionPolicy extends SortedCompactionPolicy {
}
}
+ @Override
public boolean shouldPerformMajorCompaction(Collection<HStoreFile> filesToCompact)
throws IOException {
long mcTime = getNextMajorCompactTime(filesToCompact);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
index a6ea9b2..e0be6cf 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
@@ -209,6 +209,7 @@ public class RatioBasedCompactionPolicy extends SortedCompactionPolicy {
* @param filesCompacting files being scheduled to compact.
* @return true to schedule a request.
*/
+ @Override
public boolean needsCompaction(Collection<HStoreFile> storeFiles,
List<HStoreFile> filesCompacting) {
int numCandidates = storeFiles.size() - filesCompacting.size();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java
index 4f6aba9..3eb830a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java
@@ -106,6 +106,7 @@ public abstract class SortedCompactionPolicy extends CompactionPolicy {
* @param filesToCompact Files to compact. Can be null.
* @return True if we should run a major compaction.
*/
+ @Override
public abstract boolean shouldPerformMajorCompaction(Collection<HStoreFile> filesToCompact)
throws IOException;
@@ -154,6 +155,7 @@ public abstract class SortedCompactionPolicy extends CompactionPolicy {
* @param compactionSize Total size of some compaction
* @return whether this should be a large or small compaction
*/
+ @Override
public boolean throttleCompaction(long compactionSize) {
return compactionSize > comConf.getThrottlePoint();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ExplicitColumnTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ExplicitColumnTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ExplicitColumnTracker.java
index 85394fd..c0f13c0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ExplicitColumnTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ExplicitColumnTracker.java
@@ -89,10 +89,12 @@ public class ExplicitColumnTracker implements ColumnTracker {
/**
* Done when there are no more columns to match against.
*/
+ @Override
public boolean done() {
return this.index >= columns.length;
}
+ @Override
public ColumnCount getColumnHint() {
return this.column;
}
@@ -182,6 +184,7 @@ public class ExplicitColumnTracker implements ColumnTracker {
}
// Called between every row.
+ @Override
public void reset() {
this.index = 0;
this.column = this.columns[this.index];
@@ -240,6 +243,7 @@ public class ExplicitColumnTracker implements ColumnTracker {
}
}
+ @Override
public boolean isDone(long timestamp) {
return minVersions <= 0 && isExpired(timestamp);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanWildcardColumnTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanWildcardColumnTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanWildcardColumnTracker.java
index 419e93b..f2ad1e6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanWildcardColumnTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ScanWildcardColumnTracker.java
@@ -180,6 +180,7 @@ public class ScanWildcardColumnTracker implements ColumnTracker {
* scanner).
* @return The column count.
*/
+ @Override
public ColumnCount getColumnHint() {
return null;
}
@@ -205,6 +206,7 @@ public class ScanWildcardColumnTracker implements ColumnTracker {
}
}
+ @Override
public boolean isDone(long timestamp) {
return minVersions <= 0 && isExpired(timestamp);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
index 0ace782..faf3b77 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java
@@ -346,7 +346,7 @@ public class AsyncFSWAL extends AbstractFSWAL<AsyncWriter> {
long currentHighestProcessedAppendTxid = highestProcessedAppendTxid;
highestProcessedAppendTxidAtLastSync = currentHighestProcessedAppendTxid;
final long startTimeNs = System.nanoTime();
- final long epoch = epochAndState >>> 2;
+ final long epoch = (long) epochAndState >>> 2L;
writer.sync().whenCompleteAsync((result, error) -> {
if (error != null) {
syncFailed(epoch, error);
[7/9] hbase git commit: HBASE-19811 Fix findbugs and error-prone
warnings in hbase-server (branch-2)
Posted by st...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
index 0c880f5..101e64b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
@@ -625,7 +625,7 @@ public class FSHLog extends AbstractFSWAL<Writer> {
} finally {
rollWriterLock.unlock();
}
- if (lowReplication || writer != null && writer.getLength() > logrollsize) {
+ if (lowReplication || (writer != null && writer.getLength() > logrollsize)) {
requestLogRoll(lowReplication);
}
}
@@ -866,7 +866,7 @@ public class FSHLog extends AbstractFSWAL<Writer> {
private final SyncFuture[] syncFutures;
// Had 'interesting' issues when this was non-volatile. On occasion, we'd not pass all
// syncFutures to the next sync'ing thread.
- private volatile int syncFuturesCount = 0;
+ private AtomicInteger syncFuturesCount = new AtomicInteger();
private volatile SafePointZigZagLatch zigzagLatch;
/**
* Set if we get an exception appending or syncing so that all subsequence appends and syncs on
@@ -894,10 +894,10 @@ public class FSHLog extends AbstractFSWAL<Writer> {
private void cleanupOutstandingSyncsOnException(final long sequence, final Exception e) {
// There could be handler-count syncFutures outstanding.
- for (int i = 0; i < this.syncFuturesCount; i++) {
+ for (int i = 0; i < this.syncFuturesCount.get(); i++) {
this.syncFutures[i].done(sequence, e);
}
- this.syncFuturesCount = 0;
+ this.syncFuturesCount.set(0);
}
/**
@@ -905,7 +905,7 @@ public class FSHLog extends AbstractFSWAL<Writer> {
*/
private boolean isOutstandingSyncs() {
// Look at SyncFutures in the EventHandler
- for (int i = 0; i < this.syncFuturesCount; i++) {
+ for (int i = 0; i < this.syncFuturesCount.get(); i++) {
if (!this.syncFutures[i].isDone()) {
return true;
}
@@ -938,9 +938,9 @@ public class FSHLog extends AbstractFSWAL<Writer> {
try {
if (truck.type() == RingBufferTruck.Type.SYNC) {
- this.syncFutures[this.syncFuturesCount++] = truck.unloadSync();
+ this.syncFutures[this.syncFuturesCount.getAndIncrement()] = truck.unloadSync();
// Force flush of syncs if we are carrying a full complement of syncFutures.
- if (this.syncFuturesCount == this.syncFutures.length) {
+ if (this.syncFuturesCount.get() == this.syncFutures.length) {
endOfBatch = true;
}
} else if (truck.type() == RingBufferTruck.Type.APPEND) {
@@ -979,7 +979,7 @@ public class FSHLog extends AbstractFSWAL<Writer> {
if (this.exception == null) {
// If not a batch, return to consume more events from the ring buffer before proceeding;
// we want to get up a batch of syncs and appends before we go do a filesystem sync.
- if (!endOfBatch || this.syncFuturesCount <= 0) {
+ if (!endOfBatch || this.syncFuturesCount.get() <= 0) {
return;
}
// syncRunnerIndex is bound to the range [0, Integer.MAX_INT - 1] as follows:
@@ -997,7 +997,7 @@ public class FSHLog extends AbstractFSWAL<Writer> {
// Below expects that the offer 'transfers' responsibility for the outstanding syncs to
// the syncRunner. We should never get an exception in here.
this.syncRunners[this.syncRunnerIndex].offer(sequence, this.syncFutures,
- this.syncFuturesCount);
+ this.syncFuturesCount.get());
} catch (Exception e) {
// Should NEVER get here.
requestLogRoll();
@@ -1010,7 +1010,7 @@ public class FSHLog extends AbstractFSWAL<Writer> {
? this.exception : new DamagedWALException("On sync", this.exception));
}
attainSafePoint(sequence);
- this.syncFuturesCount = 0;
+ this.syncFuturesCount.set(0);
} catch (Throwable t) {
LOG.error("UNEXPECTED!!! syncFutures.length=" + this.syncFutures.length, t);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java
index 57eccbb..4e88df0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java
@@ -85,6 +85,7 @@ class FSWALEntry extends Entry {
}
}
+ @Override
public String toString() {
return "sequence=" + this.txid + ", " + super.toString();
};
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
index ebb6079..5d8d8c0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java
@@ -102,10 +102,12 @@ public class ProtobufLogReader extends ReaderBase {
public long trailerSize() {
if (trailerPresent) {
// sizeof PB_WAL_COMPLETE_MAGIC + sizof trailerSize + trailer
- final long calculatedSize = PB_WAL_COMPLETE_MAGIC.length + Bytes.SIZEOF_INT + trailer.getSerializedSize();
+ final long calculatedSize = (long) PB_WAL_COMPLETE_MAGIC.length + Bytes.SIZEOF_INT
+ + trailer.getSerializedSize();
final long expectedSize = fileLength - walEditsStopOffset;
if (expectedSize != calculatedSize) {
- LOG.warn("After parsing the trailer, we expect the total footer to be "+ expectedSize +" bytes, but we calculate it as being " + calculatedSize);
+ LOG.warn("After parsing the trailer, we expect the total footer to be {} bytes, but we "
+ + "calculate it as being {}", expectedSize, calculatedSize);
}
return expectedSize;
} else {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java
index 2093421..42d0299 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java
@@ -84,7 +84,7 @@ public abstract class BaseReplicationEndpoint extends AbstractService
for (String filterName : filterNames) {
try {
Class<?> clazz = Class.forName(filterName);
- filters.add((WALEntryFilter) clazz.newInstance());
+ filters.add((WALEntryFilter) clazz.getDeclaredConstructor().newInstance());
} catch (Exception e) {
LOG.error("Unable to create WALEntryFilter " + filterName, e);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java
index b28c58f..8a4d331 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java
@@ -35,7 +35,7 @@ public class DefaultSourceFSConfigurationProvider implements SourceFSConfigurati
LoggerFactory.getLogger(DefaultSourceFSConfigurationProvider.class);
// Map containing all the source clusters configurations against their replication cluster id
- private Map<String, Configuration> sourceClustersConfs = new HashMap<>();
+ private final Map<String, Configuration> sourceClustersConfs = new HashMap<>();
private static final String XML = ".xml";
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
index dcd79a6..28494e6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
@@ -105,6 +105,7 @@ public class Replication implements
public Replication() {
}
+ @Override
public void initialize(Server server, FileSystem fs, Path logDir, Path oldLogDir,
WALFileLengthProvider walFileLengthProvider) throws IOException {
this.server = server;
@@ -165,12 +166,14 @@ public class Replication implements
/*
* Returns an object to listen to new wal changes
**/
+ @Override
public WALActionsListener getWALActionsListener() {
return this;
}
/**
* Stops replication service.
*/
+ @Override
public void stopReplicationService() {
join();
}
@@ -199,6 +202,7 @@ public class Replication implements
* @param sourceHFileArchiveDirPath Path that point to the source cluster hfile archive directory
* @throws IOException
*/
+ @Override
public void replicateLogEntries(List<WALEntry> entries, CellScanner cells,
String replicationClusterId, String sourceBaseNamespaceDirPath,
String sourceHFileArchiveDirPath) throws IOException {
@@ -211,6 +215,7 @@ public class Replication implements
* it starts
* @throws IOException
*/
+ @Override
public void startReplicationService() throws IOException {
try {
this.replicationManager.init();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
index 57e185a..902971e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
@@ -110,7 +110,7 @@ public class ReplicationSink {
try {
@SuppressWarnings("rawtypes")
Class c = Class.forName(className);
- this.provider = (SourceFSConfigurationProvider) c.newInstance();
+ this.provider = (SourceFSConfigurationProvider) c.getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new IllegalArgumentException("Configured source fs configuration provider class "
+ className + " throws error.", e);
@@ -123,7 +123,7 @@ public class ReplicationSink {
WALEntrySinkFilter filter = null;
try {
filter = walEntryFilterClass == null? null:
- (WALEntrySinkFilter)walEntryFilterClass.newInstance();
+ (WALEntrySinkFilter)walEntryFilterClass.getDeclaredConstructor().newInstance();
} catch (Exception e) {
LOG.warn("Failed to instantiate " + walEntryFilterClass);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
index 16fb4a7..9db3cc2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
@@ -40,11 +40,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.regionserver.RSRpcServices;
import org.apache.hadoop.hbase.replication.ChainWALEntryFilter;
import org.apache.hadoop.hbase.replication.ClusterMarkingEntryFilter;
@@ -61,8 +57,11 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL.Entry;
-import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
/**
* Class that handles the source of a replication stream.
@@ -225,7 +224,8 @@ public class ReplicationSource extends Thread implements ReplicationSourceInterf
// A peerId will not have "-" in its name, see HBASE-11394
peerId = peerClusterZnode.split("-")[0];
}
- Map<TableName, List<String>> tableCFMap = replicationPeers.getConnectedPeer(peerId).getTableCFs();
+ Map<TableName, List<String>> tableCFMap =
+ replicationPeers.getConnectedPeer(peerId).getTableCFs();
if (tableCFMap != null) {
List<String> tableCfs = tableCFMap.get(tableName);
if (tableCFMap.containsKey(tableName)
@@ -470,7 +470,8 @@ public class ReplicationSource extends Thread implements ReplicationSourceInterf
}
if (this.replicationEndpoint != null) {
try {
- this.replicationEndpoint.awaitTerminated(sleepForRetries * maxRetriesMultiplier, TimeUnit.MILLISECONDS);
+ this.replicationEndpoint
+ .awaitTerminated(sleepForRetries * maxRetriesMultiplier, TimeUnit.MILLISECONDS);
} catch (TimeoutException te) {
LOG.warn("Got exception while waiting for endpoint to shutdown for replication source :"
+ this.peerClusterZnode,
@@ -494,7 +495,9 @@ public class ReplicationSource extends Thread implements ReplicationSourceInterf
public Path getCurrentPath() {
// only for testing
for (ReplicationSourceShipper worker : workerThreads.values()) {
- if (worker.getCurrentPath() != null) return worker.getCurrentPath();
+ if (worker.getCurrentPath() != null) {
+ return worker.getCurrentPath();
+ }
}
return null;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
index 23ae704..55ebdc1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
@@ -110,7 +110,7 @@ public class ReplicationSourceManager implements ReplicationListener {
private final Configuration conf;
private final FileSystem fs;
// The paths to the latest log of each wal group, for new coming peers
- private Set<Path> latestPaths;
+ private final Set<Path> latestPaths;
// Path to the wals directories
private final Path logDir;
// Path to the wal archive
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java
index 4643a22..e56fab2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java
@@ -355,7 +355,8 @@ public class ReplicationSourceWALReader extends Thread {
List<StoreDescriptor> stores = bld.getStoresList();
int totalStores = stores.size();
for (int j = 0; j < totalStores; j++) {
- totalStoreFilesSize += stores.get(j).getStoreFileSizeBytes();
+ totalStoreFilesSize =
+ (int) (totalStoreFilesSize + stores.get(j).getStoreFileSizeBytes());
}
} catch (IOException e) {
LOG.error("Failed to deserialize bulk load entry from wal edit. "
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java
index bfc415c..7e444cb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java
@@ -154,6 +154,7 @@ class AccessControlFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
// no implementation, server-side use only
throw new UnsupportedOperationException(
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 5a3c883..f191c9d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -2186,8 +2186,8 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor,
// Also using acl as table name to be inline with the results of global admin and will
// help in avoiding any leakage of information about being superusers.
for (String user: Superusers.getSuperUsers()) {
- perms.add(new UserPermission(user.getBytes(), AccessControlLists.ACL_TABLE_NAME, null,
- Action.values()));
+ perms.add(new UserPermission(Bytes.toBytes(user), AccessControlLists.ACL_TABLE_NAME,
+ null, Action.values()));
}
}
response = AccessControlUtil.buildGetUserPermissionsResponse(perms);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthResult.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthResult.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthResult.java
index 1949b98..cecca41 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthResult.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthResult.java
@@ -207,6 +207,7 @@ public class AuthResult {
return sb.toString();
}
+ @Override
public String toString() {
return "AuthResult" + toContextString();
}
@@ -279,6 +280,7 @@ public class AuthResult {
return this;
}
+ @Override
public String toString() {
String familiesString = toFamiliesString(families, family, qualifier);
String[] params = new String[] {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
index de8ea5d..59b91a0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
@@ -330,6 +330,7 @@ public class AuthenticationTokenSecretManager
interrupt();
}
+ @Override
public void run() {
zkLeader.start();
zkLeader.waitToBecomeLeader();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java
index 0bc74b1..c2dd046 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.security.visibility;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
@@ -103,7 +104,8 @@ public class ExpressionParser {
}
index++;
} while (index < endPos && !isEndOfLabel(exp[index]));
- leafExp = new String(exp, labelOffset, index - labelOffset).trim();
+ leafExp =
+ new String(exp, labelOffset, index - labelOffset, StandardCharsets.UTF_8).trim();
if (leafExp.isEmpty()) {
throw new ParseException("Error parsing expression " + expS + " at column : " + index);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
index 1ba6029..6e00f40 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
@@ -36,6 +36,7 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.Optional;
import org.apache.hadoop.conf.Configuration;
@@ -753,8 +754,9 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso
logResult(true, "addLabels", "Adding labels allowed", null, labels, null);
int i = 0;
for (OperationStatus status : opStatus) {
- while (response.getResult(i) != successResult)
+ while (!Objects.equals(response.getResult(i), successResult)) {
i++;
+ }
if (status.getOperationStatusCode() != SUCCESS) {
RegionActionResult.Builder failureResultBuilder = RegionActionResult.newBuilder();
failureResultBuilder.setException(buildException(new DoNotRetryIOException(
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
index f6ed72f..f3e4853 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java
@@ -149,7 +149,7 @@ public class VisibilityNewVersionBehaivorTracker extends NewVersionBehaviorTrack
List<Tag> putVisTags = new ArrayList<>();
Byte putCellVisTagsFormat = VisibilityUtils.extractVisibilityTags(put, putVisTags);
return putVisTags.isEmpty() == delInfo.tags.isEmpty() && (
- putVisTags.isEmpty() && delInfo.tags.isEmpty() || VisibilityLabelServiceManager
+ (putVisTags.isEmpty() && delInfo.tags.isEmpty()) || VisibilityLabelServiceManager
.getInstance().getVisibilityLabelService()
.matchVisibility(putVisTags, putCellVisTagsFormat, delInfo.tags, delInfo.format));
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java
index 78b5037..fd479b4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java
@@ -58,6 +58,7 @@ public class LeafExpressionNode implements ExpressionNode {
return true;
}
+ @Override
public LeafExpressionNode deepClone() {
LeafExpressionNode clone = new LeafExpressionNode(this.identifier);
return clone;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java
index 77b34e9..83610fa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java
@@ -91,6 +91,7 @@ public class NonLeafExpressionNode implements ExpressionNode {
return this.op == Operator.NOT;
}
+ @Override
public NonLeafExpressionNode deepClone() {
NonLeafExpressionNode clone = new NonLeafExpressionNode(this.op);
for (ExpressionNode exp : this.childExps) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java
index fdec5ac..f7ffe46 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java
@@ -23,12 +23,13 @@ import org.apache.yetus.audience.InterfaceAudience;
public enum Operator {
AND('&'), OR('|'), NOT('!');
- private char rep;
+ private final char rep;
private Operator(char rep) {
this.rep = rep;
}
+ @Override
public String toString() {
return String.valueOf(this.rep);
};
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index 7d7e526..97a74ed 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -300,7 +300,8 @@ public final class SnapshotInfo extends AbstractHBaseTool {
boolean inArchive = false;
long size = -1;
try {
- if ((inArchive = fs.exists(link.getArchivePath()))) {
+ if (fs.exists(link.getArchivePath())) {
+ inArchive = true;
size = fs.getFileStatus(link.getArchivePath()).getLen();
hfilesArchiveSize.addAndGet(size);
hfilesArchiveCount.incrementAndGet();
@@ -311,7 +312,8 @@ public final class SnapshotInfo extends AbstractHBaseTool {
!isArchivedFileStillReferenced(link.getArchivePath(), filesMap)) {
nonSharedHfilesArchiveSize.addAndGet(size);
}
- } else if (inArchive = fs.exists(link.getMobPath())) {
+ } else if (fs.exists(link.getMobPath())) {
+ inArchive = true;
size = fs.getFileStatus(link.getMobPath()).getLen();
hfilesMobSize.addAndGet(size);
hfilesMobCount.incrementAndGet();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
index a5468ee..1d7f4f6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java
@@ -75,23 +75,28 @@ public final class SnapshotManifestV1 {
this.fs = fs;
}
+ @Override
public HRegionFileSystem regionOpen(final RegionInfo regionInfo) throws IOException {
HRegionFileSystem snapshotRegionFs = HRegionFileSystem.createRegionOnFileSystem(conf,
fs, snapshotDir, regionInfo);
return snapshotRegionFs;
}
+ @Override
public void regionClose(final HRegionFileSystem region) {
}
+ @Override
public Path familyOpen(final HRegionFileSystem snapshotRegionFs, final byte[] familyName) {
Path familyDir = snapshotRegionFs.getStoreDir(Bytes.toString(familyName));
return familyDir;
}
+ @Override
public void familyClose(final HRegionFileSystem region, final Path family) {
}
+ @Override
public void storeFile(final HRegionFileSystem region, final Path familyDir,
final StoreFileInfo storeFile) throws IOException {
Path referenceFile = new Path(familyDir, storeFile.getPath().getName());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
index 4d35f0b..4e60d67 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
@@ -77,12 +77,14 @@ public final class SnapshotManifestV2 {
this.fs = fs;
}
+ @Override
public SnapshotRegionManifest.Builder regionOpen(final RegionInfo regionInfo) {
SnapshotRegionManifest.Builder manifest = SnapshotRegionManifest.newBuilder();
manifest.setRegionInfo(ProtobufUtil.toRegionInfo(regionInfo));
return manifest;
}
+ @Override
public void regionClose(final SnapshotRegionManifest.Builder region) throws IOException {
// we should ensure the snapshot dir exist, maybe it has been deleted by master
// see HBASE-16464
@@ -99,6 +101,7 @@ public final class SnapshotManifestV2 {
}
}
+ @Override
public SnapshotRegionManifest.FamilyFiles.Builder familyOpen(
final SnapshotRegionManifest.Builder region, final byte[] familyName) {
SnapshotRegionManifest.FamilyFiles.Builder family =
@@ -107,11 +110,13 @@ public final class SnapshotManifestV2 {
return family;
}
+ @Override
public void familyClose(final SnapshotRegionManifest.Builder region,
final SnapshotRegionManifest.FamilyFiles.Builder family) {
region.addFamilyFiles(family.build());
}
+ @Override
public void storeFile(final SnapshotRegionManifest.Builder region,
final SnapshotRegionManifest.FamilyFiles.Builder family, final StoreFileInfo storeFile)
throws IOException {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
index c14c944..3cff047 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java
@@ -971,7 +971,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
continue;
}
Path familyDir = familyStat.getPath();
- byte[] familyName = familyDir.getName().getBytes();
+ byte[] familyName = Bytes.toBytes(familyDir.getName());
// Skip invalid family
try {
ColumnFamilyDescriptorBuilder.isLegalColumnFamilyName(familyName);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java
index 9950570..efad97e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BoundedPriorityBlockingQueue.java
@@ -155,6 +155,7 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
this.queue = new PriorityQueue<>(capacity, comparator);
}
+ @Override
public boolean offer(E e) {
if (e == null) throw new NullPointerException();
@@ -171,6 +172,7 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
return false;
}
+ @Override
public void put(E e) throws InterruptedException {
if (e == null) throw new NullPointerException();
@@ -186,6 +188,7 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
}
}
+ @Override
public boolean offer(E e, long timeout, TimeUnit unit)
throws InterruptedException {
if (e == null) throw new NullPointerException();
@@ -206,6 +209,7 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
return true;
}
+ @Override
public E take() throws InterruptedException {
E result = null;
lock.lockInterruptibly();
@@ -221,6 +225,7 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
return result;
}
+ @Override
public E poll() {
E result = null;
lock.lock();
@@ -235,6 +240,7 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
return result;
}
+ @Override
public E poll(long timeout, TimeUnit unit)
throws InterruptedException {
long nanos = unit.toNanos(timeout);
@@ -254,6 +260,7 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
return result;
}
+ @Override
public E peek() {
lock.lock();
try {
@@ -263,6 +270,7 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
}
}
+ @Override
public int size() {
lock.lock();
try {
@@ -272,6 +280,7 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
}
}
+ @Override
public Iterator<E> iterator() {
throw new UnsupportedOperationException();
}
@@ -280,6 +289,7 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
return queue.comparator();
}
+ @Override
public int remainingCapacity() {
lock.lock();
try {
@@ -289,10 +299,12 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
}
}
+ @Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
+ @Override
public boolean contains(Object o) {
lock.lock();
try {
@@ -302,10 +314,12 @@ public class BoundedPriorityBlockingQueue<E> extends AbstractQueue<E> implements
}
}
+ @Override
public int drainTo(Collection<? super E> c) {
return drainTo(c, Integer.MAX_VALUE);
}
+ @Override
public int drainTo(Collection<? super E> c, int maxElements) {
if (c == null)
throw new NullPointerException();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
index 4207f39..2cf3bb9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
public class FSMapRUtils extends FSUtils {
private static final Logger LOG = LoggerFactory.getLogger(FSMapRUtils.class);
+ @Override
public void recoverFileLease(final FileSystem fs, final Path p,
Configuration conf, CancelableProgressable reporter) throws IOException {
LOG.info("Recovering file " + p.toString() +
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
index f258e6c..04a3384 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java
@@ -52,7 +52,7 @@ class FSRegionScanner implements Runnable {
/**
* Maps each region to the RS with highest locality for that region.
*/
- private Map<String,String> regionToBestLocalityRSMapping;
+ private final Map<String,String> regionToBestLocalityRSMapping;
/**
* Maps region encoded names to maps of hostnames to fractional locality of
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index c76cd90..c3f3bd8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -479,7 +479,7 @@ public class FSTableDescriptors implements TableDescriptors {
// Clean away old versions
for (FileStatus file : status) {
Path path = file.getPath();
- if (file != mostCurrent) {
+ if (!file.equals(mostCurrent)) {
if (!fs.delete(file.getPath(), false)) {
LOG.warn("Failed cleanup of " + path);
} else {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index d0276c0..3ee13c4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -3800,7 +3800,7 @@ public class HBaseFsck extends Configured implements Closeable {
@Override
public int hashCode() {
int hash = Arrays.hashCode(getRegionName());
- hash ^= getRegionId();
+ hash = (int) (hash ^ getRegionId());
hash ^= Arrays.hashCode(getStartKey());
hash ^= Arrays.hashCode(getEndKey());
hash ^= Boolean.valueOf(isOffline()).hashCode();
@@ -3808,7 +3808,7 @@ public class HBaseFsck extends Configured implements Closeable {
if (regionServer != null) {
hash ^= regionServer.hashCode();
}
- hash ^= modTime;
+ hash = (int) (hash ^ modTime);
return hash;
}
}
@@ -4054,7 +4054,7 @@ public class HBaseFsck extends Configured implements Closeable {
return -1;
}
// both l.hdfsEntry and r.hdfsEntry must not be null.
- return (int) (l.hdfsEntry.hri.getRegionId()- r.hdfsEntry.hri.getRegionId());
+ return Long.compare(l.hdfsEntry.hri.getRegionId(), r.hdfsEntry.hri.getRegionId());
}
};
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdLock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdLock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdLock.java
index e06805c..eba9acd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdLock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdLock.java
@@ -52,6 +52,7 @@ public class IdLock {
this.id = id;
}
+ @Override
public String toString() {
return "id=" + id + ", numWaiter=" + numWaiters + ", isLocked="
+ locked;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
index 7b9cbb6..75b8ccd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
@@ -685,7 +685,7 @@ public class RegionSplitter {
}
}
try {
- return splitClass.asSubclass(SplitAlgorithm.class).newInstance();
+ return splitClass.asSubclass(SplitAlgorithm.class).getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new IOException("Problem loading split algorithm: ", e);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowBloomContext.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowBloomContext.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowBloomContext.java
index 2819b82..f1c9ad3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowBloomContext.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RowBloomContext.java
@@ -38,6 +38,7 @@ public class RowBloomContext extends BloomContext {
super(bloomFilterWriter, comparator);
}
+ @Override
public void addLastBloomKey(Writer writer) throws IOException {
if (this.getLastCell() != null) {
byte[] key = CellUtil.copyRow(this.getLastCell());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java
index a897f40..d3da773 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java
@@ -52,10 +52,12 @@ abstract public class ShutdownHookManager {
private static class ShutdownHookManagerV1 extends ShutdownHookManager {
// priority is ignored in hadoop versions earlier than 2.0
- public void addShutdownHook(Thread shutdownHookThread, int priority) {
+ @Override
+ public void addShutdownHook(Thread shutdownHookThread, int priority) {
Runtime.getRuntime().addShutdownHook(shutdownHookThread);
}
+ @Override
public boolean removeShutdownHook(Runnable shutdownHook) {
Thread shutdownHookThread = null;
if (!(shutdownHook instanceof Thread)) {
@@ -67,6 +69,7 @@ abstract public class ShutdownHookManager {
};
private static class ShutdownHookManagerV2 extends ShutdownHookManager {
+ @Override
public void addShutdownHook(Thread shutdownHookThread, int priority) {
try {
Methods.call(shutdownHookManagerClass,
@@ -79,6 +82,7 @@ abstract public class ShutdownHookManager {
}
}
+ @Override
public boolean removeShutdownHook(Runnable shutdownHook) {
try {
return (Boolean)
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
index 74d502e..d9badfa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java
@@ -22,6 +22,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Pattern;
@@ -455,7 +456,7 @@ public abstract class AbstractFSWALProvider<T extends AbstractFSWAL<?>> implemen
} catch (FileNotFoundException fnfe) {
// If the log was archived, continue reading from there
Path archivedLog = AbstractFSWALProvider.getArchivedLogPath(path, conf);
- if (path != archivedLog) {
+ if (!Objects.equals(path, archivedLog)) {
return openReader(archivedLog, conf);
} else {
throw fnfe;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
index 725f9ff..2105490 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
@@ -185,7 +185,7 @@ class DisabledWALProvider implements WALProvider {
public void sync() {
if (!this.listeners.isEmpty()) {
for (WALActionsListener listener : this.listeners) {
- listener.postSync(0l, 0);
+ listener.postSync(0L, 0);
}
}
}
@@ -195,6 +195,7 @@ class DisabledWALProvider implements WALProvider {
sync();
}
+ @Override
public Long startCacheFlush(final byte[] encodedRegionName, Map<byte[], Long>
flushedFamilyNamesToSeq) {
return startCacheFlush(encodedRegionName, flushedFamilyNamesToSeq.keySet());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java
index 14505a8..f1662bc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java
@@ -63,7 +63,7 @@ public class FSHLogProvider extends AbstractFSWALProvider<FSHLog> {
ProtobufLogWriter.class, Writer.class);
Writer writer = null;
try {
- writer = logWriterClass.newInstance();
+ writer = logWriterClass.getDeclaredConstructor().newInstance();
writer.init(fs, path, conf, overwritable);
return writer;
} catch (Exception e) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
index a3e54a5..a0ef817 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java
@@ -104,19 +104,14 @@ public class RegionGroupingProvider implements WALProvider {
}
LOG.info("Instantiating RegionGroupingStrategy of type " + clazz);
try {
- final RegionGroupingStrategy result = clazz.newInstance();
+ final RegionGroupingStrategy result = clazz.getDeclaredConstructor().newInstance();
result.init(conf, providerId);
return result;
- } catch (InstantiationException exception) {
+ } catch (Exception e) {
LOG.error("couldn't set up region grouping strategy, check config key " +
REGION_GROUPING_STRATEGY);
- LOG.debug("Exception details for failure to load region grouping strategy.", exception);
- throw new IOException("couldn't set up region grouping strategy", exception);
- } catch (IllegalAccessException exception) {
- LOG.error("couldn't set up region grouping strategy, check config key " +
- REGION_GROUPING_STRATEGY);
- LOG.debug("Exception details for failure to load region grouping strategy.", exception);
- throw new IOException("couldn't set up region grouping strategy", exception);
+ LOG.debug("Exception details for failure to load region grouping strategy.", e);
+ throw new IOException("couldn't set up region grouping strategy", e);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
index 4008bb0..d478e4f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java
@@ -99,6 +99,7 @@ public interface WAL extends Closeable, WALFileLengthProvider {
* underlying resources after this call; i.e. filesystem based WALs can archive or
* delete files.
*/
+ @Override
void close() throws IOException;
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
index d70b8cd..d59c824 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
@@ -75,7 +75,7 @@ public class WALFactory implements WALFileLengthProvider {
multiwal(RegionGroupingProvider.class),
asyncfs(AsyncFSWALProvider.class);
- Class<? extends WALProvider> clazz;
+ final Class<? extends WALProvider> clazz;
Providers(Class<? extends WALProvider> clazz) {
this.clazz = clazz;
}
@@ -139,17 +139,13 @@ public class WALFactory implements WALFileLengthProvider {
List<WALActionsListener> listeners, String providerId) throws IOException {
LOG.info("Instantiating WALProvider of type " + clazz);
try {
- final WALProvider result = clazz.newInstance();
+ final WALProvider result = clazz.getDeclaredConstructor().newInstance();
result.init(this, conf, listeners, providerId);
return result;
- } catch (InstantiationException exception) {
- LOG.error("couldn't set up WALProvider, the configured class is " + clazz);
- LOG.debug("Exception details for failure to load WALProvider.", exception);
- throw new IOException("couldn't set up WALProvider", exception);
- } catch (IllegalAccessException exception) {
+ } catch (Exception e) {
LOG.error("couldn't set up WALProvider, the configured class is " + clazz);
- LOG.debug("Exception details for failure to load WALProvider.", exception);
- throw new IOException("couldn't set up WALProvider", exception);
+ LOG.debug("Exception details for failure to load WALProvider.", e);
+ throw new IOException("couldn't set up WALProvider", e);
}
}
@@ -294,7 +290,7 @@ public class WALFactory implements WALFileLengthProvider {
AbstractFSWALProvider.Reader reader = null;
while (true) {
try {
- reader = lrClass.newInstance();
+ reader = lrClass.getDeclaredConstructor().newInstance();
reader.init(fs, path, conf, null);
return reader;
} catch (IOException e) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKeyImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKeyImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKeyImpl.java
index 983fae9..0a5acda 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKeyImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKeyImpl.java
@@ -470,8 +470,8 @@ public class WALKeyImpl implements WALKey {
@Override
public int hashCode() {
int result = Bytes.hashCode(this.encodedRegionName);
- result ^= getSequenceId();
- result ^= this.writeTime;
+ result = (int) (result ^ getSequenceId());
+ result = (int) (result ^ this.writeTime);
return result;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
index 22f7e1a..97f80e0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java
@@ -1084,7 +1084,7 @@ public class WALSplitter {
protected EntryBuffers entryBuffers;
protected ConcurrentHashMap<String, SinkWriter> writers = new ConcurrentHashMap<>();
- protected ConcurrentHashMap<String, Long> regionMaximumEditLogSeqNum =
+ protected final ConcurrentHashMap<String, Long> regionMaximumEditLogSeqNum =
new ConcurrentHashMap<>();
@@ -1645,8 +1645,10 @@ public class WALSplitter {
List<IOException> thrown, List<Path> paths)
throws InterruptedException, ExecutionException {
for (final Map.Entry<byte[], RegionEntryBuffer> buffer : entryBuffers.buffers.entrySet()) {
- LOG.info("Submitting writeThenClose of " + buffer.getValue().encodedRegionName);
+ LOG.info("Submitting writeThenClose of {}",
+ Arrays.toString(buffer.getValue().encodedRegionName));
completionService.submit(new Callable<Void>() {
+ @Override
public Void call() throws Exception {
Path dst = writeThenClose(buffer.getValue());
paths.add(dst);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java
index aec4bbd..44d3e87 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java
@@ -151,6 +151,7 @@ public class AcidGuaranteesTestTool extends AbstractHBaseTool {
table = connection.getTable(TABLE_NAME);
}
+ @Override
public void doAnAction() throws Exception {
// Pick a random row to write into
byte[] targetRow = targetRows[rand.nextInt(targetRows.length)];
@@ -197,6 +198,7 @@ public class AcidGuaranteesTestTool extends AbstractHBaseTool {
table = connection.getTable(TABLE_NAME);
}
+ @Override
public void doAnAction() throws Exception {
Get g = new Get(targetRow);
Result res = table.get(g);
@@ -264,6 +266,7 @@ public class AcidGuaranteesTestTool extends AbstractHBaseTool {
table = connection.getTable(TABLE_NAME);
}
+ @Override
public void doAnAction() throws Exception {
Scan s = new Scan();
for (byte[] family : targetFamilies) {
@@ -344,6 +347,7 @@ public class AcidGuaranteesTestTool extends AbstractHBaseTool {
}
// Add a flusher
ctx.addThread(new RepeatingTestThread(ctx) {
+ @Override
public void doAnAction() throws Exception {
try {
admin.flush(TABLE_NAME);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
index 92581b8..5f2ffb2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.hbase;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.NavigableMap;
import org.apache.hadoop.conf.Configuration;
@@ -383,7 +384,7 @@ public abstract class HBaseTestCase extends TestCase {
if (res_value != null) {
assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) +
" at timestamp " +
- timestamp, value, new String(res_value));
+ timestamp, value, new String(res_value, StandardCharsets.UTF_8));
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index a686e33..75abd5e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -33,6 +33,7 @@ import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Arrays;
@@ -1596,7 +1597,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
if (status.getSecond() != 0) {
LOG.debug(status.getSecond() - status.getFirst() + "/" + status.getSecond()
+ " regions updated.");
- Thread.sleep(1 * 1000l);
+ Thread.sleep(1 * 1000L);
} else {
LOG.debug("All regions updated.");
break;
@@ -1983,7 +1984,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
expectedCount = 1;
}
if (count != expectedCount) {
- String row = new String(new byte[] {b1,b2,b3});
+ String row = new String(new byte[] {b1,b2,b3}, StandardCharsets.UTF_8);
throw new RuntimeException("Row:" + row + " has a seen count of " + count + " " +
"instead of " + expectedCount);
}
@@ -2079,7 +2080,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
get.setConsistency(Consistency.TIMELINE);
Result result = table.get(get);
assertTrue(failMsg, result.containsColumn(f, null));
- assertEquals(failMsg, result.getColumnCells(f, null).size(), 1);
+ assertEquals(failMsg, 1, result.getColumnCells(f, null).size());
Cell cell = result.getColumnLatestCell(f, null);
assertTrue(failMsg,
Bytes.equals(data, 0, data.length, cell.getValueArray(), cell.getValueOffset(),
@@ -2114,7 +2115,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
if (!present) continue;
assertTrue(failMsg, result.containsColumn(f, null));
- assertEquals(failMsg, result.getColumnCells(f, null).size(), 1);
+ assertEquals(failMsg, 1, result.getColumnCells(f, null).size());
Cell cell = result.getColumnLatestCell(f, null);
assertTrue(failMsg,
Bytes.equals(data, 0, data.length, cell.getValueArray(), cell.getValueOffset(),
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
index 378f6ec..9959e31 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
@@ -165,7 +165,8 @@ public class MiniHBaseCluster extends HBaseCluster {
@Override
public void run() {
try {
- this.user.runAs(new PrivilegedAction<Object>(){
+ this.user.runAs(new PrivilegedAction<Object>() {
+ @Override
public Object run() {
runRegionServer();
return null;
@@ -195,6 +196,7 @@ public class MiniHBaseCluster extends HBaseCluster {
@Override
public void abort(final String reason, final Throwable cause) {
this.user.runAs(new PrivilegedAction<Object>() {
+ @Override
public Object run() {
abortRegionServer(reason, cause);
return null;
@@ -497,6 +499,7 @@ public class MiniHBaseCluster extends HBaseCluster {
* Returns the current active master, if available.
* @return the active HMaster, null if none is active.
*/
+ @Override
public MasterService.BlockingInterface getMasterAdminService() {
return this.hbaseCluster.getActiveMaster().getMasterRpcServices();
}
@@ -588,6 +591,7 @@ public class MiniHBaseCluster extends HBaseCluster {
* masters left.
* @throws InterruptedException
*/
+ @Override
public boolean waitForActiveAndReadyMaster(long timeout) throws IOException {
List<JVMClusterUtil.MasterThread> mts;
long start = System.currentTimeMillis();
@@ -628,6 +632,7 @@ public class MiniHBaseCluster extends HBaseCluster {
/**
* Shut down the mini HBase cluster
*/
+ @Override
public void shutdown() throws IOException {
if (this.hbaseCluster != null) {
this.hbaseCluster.shutdown();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java
index 1d8de45..86ac2f8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java
@@ -119,6 +119,7 @@ public abstract class MultithreadedTestUtil {
this.ctx = ctx;
}
+ @Override
public void run() {
try {
doWork();
@@ -143,6 +144,7 @@ public abstract class MultithreadedTestUtil {
super(ctx);
}
+ @Override
public final void doWork() throws Exception {
try {
while (ctx.shouldRun() && !stopped) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java
index 06cfdcf..f21d79d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java
@@ -49,6 +49,7 @@ public class TestHDFSBlocksDistribution {
}
public class MockHDFSBlocksDistribution extends HDFSBlocksDistribution {
+ @Override
public Map<String,HostAndWeight> getHostAndWeights() {
HashMap<String, HostAndWeight> map = new HashMap<>();
map.put("test", new HostAndWeight(null, 100));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
index c5cda27..1210361 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java
@@ -24,6 +24,7 @@ import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -93,7 +94,7 @@ public class TestIOFencing {
}
public abstract static class CompactionBlockerRegion extends HRegion {
- volatile int compactCount = 0;
+ AtomicInteger compactCount = new AtomicInteger();
volatile CountDownLatch compactionsBlocked = new CountDownLatch(0);
volatile CountDownLatch compactionsWaiting = new CountDownLatch(0);
@@ -129,7 +130,7 @@ public class TestIOFencing {
try {
return super.compact(compaction, store, throughputController);
} finally {
- compactCount++;
+ compactCount.getAndIncrement();
}
}
@@ -139,7 +140,7 @@ public class TestIOFencing {
try {
return super.compact(compaction, store, throughputController, user);
} finally {
- compactCount++;
+ compactCount.getAndIncrement();
}
}
@@ -336,7 +337,7 @@ public class TestIOFencing {
}
LOG.info("Allowing compaction to proceed");
compactingRegion.allowCompactions();
- while (compactingRegion.compactCount == 0) {
+ while (compactingRegion.compactCount.get() == 0) {
Thread.sleep(1000);
}
// The server we killed stays up until the compaction that was started before it was killed
@@ -349,7 +350,7 @@ public class TestIOFencing {
FIRST_BATCH_COUNT + SECOND_BATCH_COUNT);
admin.majorCompact(TABLE_NAME);
startWaitTime = System.currentTimeMillis();
- while (newRegion.compactCount == 0) {
+ while (newRegion.compactCount.get() == 0) {
Thread.sleep(1000);
assertTrue("New region never compacted",
System.currentTimeMillis() - startWaitTime < 180000);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
index 7b6c5a5..620abef 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java
@@ -163,6 +163,7 @@ public class TestMetaTableAccessorNoCluster {
.thenThrow(new ServiceException("Server not running (2 of 3)"))
.thenThrow(new ServiceException("Server not running (3 of 3)"))
.thenAnswer(new Answer<ScanResponse>() {
+ @Override
public ScanResponse answer(InvocationOnMock invocation) throws Throwable {
((HBaseRpcController) invocation.getArgument(0)).setCellScanner(CellUtil
.createCellScanner(cellScannables));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
index 71492b1..63d2cc2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java
@@ -127,14 +127,14 @@ public class TestMetaTableLocator {
assertEquals(state, MetaTableLocator.getMetaRegionState(this.watcher).getState());
}
MetaTableLocator.setMetaLocation(this.watcher, SN, RegionState.State.OPEN);
- assertEquals(mtl.getMetaRegionLocation(this.watcher), SN);
+ assertEquals(SN, mtl.getMetaRegionLocation(this.watcher));
assertEquals(RegionState.State.OPEN,
MetaTableLocator.getMetaRegionState(this.watcher).getState());
mtl.deleteMetaLocation(this.watcher);
assertNull(MetaTableLocator.getMetaRegionState(this.watcher).getServerName());
- assertEquals(MetaTableLocator.getMetaRegionState(this.watcher).getState(),
- RegionState.State.OFFLINE);
+ assertEquals(RegionState.State.OFFLINE,
+ MetaTableLocator.getMetaRegionState(this.watcher).getState());
assertNull(mtl.getMetaRegionLocation(this.watcher));
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java
index bdb74a4..24a8830 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java
@@ -48,6 +48,7 @@ import java.io.IOException;
super(conf);
}
+ @Override
protected int movedRegionCleanerPeriod() {
return 500;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
index 1a0215e..acf7861 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
@@ -101,6 +101,7 @@ public class TestMultiVersions {
// TODO: Remove these deprecated classes or pull them in here if this is
// only test using them.
TimestampTestBase.doTestDelete(table, new FlushCache() {
+ @Override
public void flushcache() throws IOException {
UTIL.getHBaseCluster().flushcache();
}
@@ -109,6 +110,7 @@ public class TestMultiVersions {
// Perhaps drop and readd the table between tests so the former does
// not pollute this latter? Or put into separate tests.
TimestampTestBase.doTestTimestampScanning(table, new FlushCache() {
+ @Override
public void flushcache() throws IOException {
UTIL.getMiniHBaseCluster().flushcache();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerName.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerName.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerName.java
index c1b5dac..cfc5c2f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerName.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerName.java
@@ -94,9 +94,8 @@ public class TestServerName {
ServerName.valueOf("www.example.org", 1234, 5678).toString());
assertEquals(sn.toString(),
ServerName.valueOf("www.example.org:1234", 5678).toString());
- assertEquals(sn.toString(),
- "www.example.org" + ServerName.SERVERNAME_SEPARATOR + "1234" +
- ServerName.SERVERNAME_SEPARATOR + "5678");
+ assertEquals("www.example.org" + ServerName.SERVERNAME_SEPARATOR + "1234"
+ + ServerName.SERVERNAME_SEPARATOR + "5678", sn.toString());
}
@Test
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java
index 2639821..4aeedb9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerSideScanMetricsFromClientSide.java
@@ -205,7 +205,8 @@ public class TestServerSideScanMetricsFromClientSide {
}
// The filter should filter out all rows, but we still expect to see every row.
- Filter filter = new RowFilter(CompareOperator.EQUAL, new BinaryComparator("xyz".getBytes()));
+ Filter filter =
+ new RowFilter(CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("xyz")));
scan = new Scan(baseScan);
scan.setFilter(filter);
testMetric(scan, ServerSideScanMetrics.COUNT_OF_ROWS_SCANNED_KEY_METRIC_NAME, ROWS.length);
@@ -255,7 +256,8 @@ public class TestServerSideScanMetricsFromClientSide {
testRowsFilteredMetric(baseScan, null, 0);
// Row filter doesn't match any row key. All rows should be filtered
- Filter filter = new RowFilter(CompareOperator.EQUAL, new BinaryComparator("xyz".getBytes()));
+ Filter filter =
+ new RowFilter(CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("xyz")));
testRowsFilteredMetric(baseScan, filter, ROWS.length);
// Filter will return results containing only the first key. Number of entire rows filtered
@@ -269,7 +271,7 @@ public class TestServerSideScanMetricsFromClientSide {
testRowsFilteredMetric(baseScan, filter, 0);
// Column prefix will NOT find any matching qualifier on any row. All rows should be filtered
- filter = new ColumnPrefixFilter("xyz".getBytes());
+ filter = new ColumnPrefixFilter(Bytes.toBytes("xyz"));
testRowsFilteredMetric(baseScan, filter, ROWS.length);
// Matching column value should exist in each row. No rows should be filtered.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
index fba429a..f81a36d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
@@ -421,7 +421,7 @@ public class TestAdmin1 {
this.admin.createTable(htd);
Table table = TEST_UTIL.getConnection().getTable(htd.getTableName());
TableDescriptor confirmedHtd = table.getDescriptor();
- assertEquals(TableDescriptor.COMPARATOR.compare(htd, confirmedHtd), 0);
+ assertEquals(0, TableDescriptor.COMPARATOR.compare(htd, confirmedHtd));
MetaTableAccessor.fullScanMetaAndPrint(TEST_UTIL.getConnection());
table.close();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
index 05b8edc..57bd158 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
@@ -303,7 +303,7 @@ public class TestAdmin2 {
TableName tableName = TableName
.valueOf("testTableNotFoundExceptionWithoutAnyTables");
Table ht = TEST_UTIL.getConnection().getTable(tableName);
- ht.get(new Get("e".getBytes()));
+ ht.get(new Get(Bytes.toBytes("e")));
}
@Test (timeout=300000)
@@ -582,8 +582,9 @@ public class TestAdmin2 {
}
// Before the fix for HBASE-6146, the below table creation was failing as the hbase:meta table
// actually getting disabled by the disableTable() call.
- HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName().getBytes()));
- HColumnDescriptor hcd = new HColumnDescriptor("cf1".getBytes());
+ HTableDescriptor htd =
+ new HTableDescriptor(TableName.valueOf(Bytes.toBytes(name.getMethodName())));
+ HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes("cf1"));
htd.addFamily(hcd);
TEST_UTIL.getHBaseAdmin().createTable(htd);
}
@@ -695,13 +696,13 @@ public class TestAdmin2 {
assertTrue(decommissionedRegionServers.isEmpty());
final TableName tableName = TableName.valueOf(name.getMethodName());
- TEST_UTIL.createMultiRegionTable(tableName, "f".getBytes(), 6);
+ TEST_UTIL.createMultiRegionTable(tableName, Bytes.toBytes("f"), 6);
ArrayList<ServerName> clusterRegionServers =
new ArrayList<>(admin.getClusterMetrics(EnumSet.of(Option.LIVE_SERVERS))
.getLiveServerMetrics().keySet());
- assertEquals(clusterRegionServers.size(), 3);
+ assertEquals(3, clusterRegionServers.size());
HashMap<ServerName, List<RegionInfo>> serversToDecommssion = new HashMap<>();
// Get a server that has regions. We will decommission two of the servers,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi2.java
index ab8ebb5..3344c4b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi2.java
@@ -56,6 +56,7 @@ public class TestAsyncClusterAdminApi2 extends TestAsyncAdminBase {
}
@Before
+ @Override
public void setUp() throws Exception {
TEST_UTIL.startMiniCluster(1, 3);
ASYNC_CONN = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
@@ -63,6 +64,7 @@ public class TestAsyncClusterAdminApi2 extends TestAsyncAdminBase {
}
@After
+ @Override
public void tearDown() throws Exception {
IOUtils.closeQuietly(ASYNC_CONN);
TEST_UTIL.shutdownMiniCluster();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java
index 8c2b060..8968b39 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java
@@ -49,7 +49,7 @@ public class TestAsyncDecommissionAdminApi extends TestAsyncAdminBase {
new ArrayList<>(admin.getClusterMetrics(EnumSet.of(Option.LIVE_SERVERS)).get()
.getLiveServerMetrics().keySet());
- assertEquals(clusterRegionServers.size(), 2);
+ assertEquals(2, clusterRegionServers.size());
HashMap<ServerName, List<RegionInfo>> serversToDecommssion = new HashMap<>();
// Get a server that has regions. We will decommission one of the servers,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java
index 7a2c00f..d50e039 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java
@@ -90,7 +90,7 @@ public class TestAsyncProcedureAdminApi extends TestAsyncAdminBase {
byte[] result = admin.execProcedureWithReturn(SimpleMasterProcedureManager.SIMPLE_SIGNATURE,
"myTest2", new HashMap<>()).get();
assertArrayEquals("Incorrect return data from execProcedure",
- SimpleMasterProcedureManager.SIMPLE_DATA.getBytes(), result);
+ Bytes.toBytes(SimpleMasterProcedureManager.SIMPLE_DATA), result);
}
@Test
[4/9] hbase git commit: HBASE-19811 Fix findbugs and error-prone
warnings in hbase-server (branch-2)
Posted by st...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureSchedulerPerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureSchedulerPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureSchedulerPerformanceEvaluation.java
index 21d914a..e5d3a79 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureSchedulerPerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureSchedulerPerformanceEvaluation.java
@@ -102,6 +102,7 @@ public class MasterProcedureSchedulerPerformanceEvaluation extends AbstractHBase
this.hri = hri;
}
+ @Override
public Procedure newProcedure(long procId) {
return new RegionProcedure(procId, hri);
}
@@ -132,6 +133,7 @@ public class MasterProcedureSchedulerPerformanceEvaluation extends AbstractHBase
this.tableName = tableName;
}
+ @Override
public Procedure newProcedure(long procId) {
return new TableProcedure(procId, tableName);
}
@@ -196,6 +198,7 @@ public class MasterProcedureSchedulerPerformanceEvaluation extends AbstractHBase
private final AtomicLong completed = new AtomicLong(0);
private class AddProcsWorker extends Thread {
+ @Override
public void run() {
final Random rand = new Random(System.currentTimeMillis());
long procId = procIds.incrementAndGet();
@@ -209,6 +212,7 @@ public class MasterProcedureSchedulerPerformanceEvaluation extends AbstractHBase
}
private class PollAndLockWorker extends Thread {
+ @Override
public void run() {
while (completed.get() < numOps) {
// With lock/unlock being ~100ns, and no other workload, 1000ns wait seams reasonable.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java
index 8dec59d..4adab53 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java
@@ -99,7 +99,7 @@ public class TestModifyNamespaceProcedure {
// Before modify
NamespaceDescriptor currentNsDescriptor =
UTIL.getAdmin().getNamespaceDescriptor(nsd.getName());
- assertEquals(currentNsDescriptor.getConfigurationValue(nsKey1), nsValue1before);
+ assertEquals(nsValue1before, currentNsDescriptor.getConfigurationValue(nsKey1));
assertNull(currentNsDescriptor.getConfigurationValue(nsKey2));
// Update
@@ -115,8 +115,8 @@ public class TestModifyNamespaceProcedure {
// Verify the namespace is updated.
currentNsDescriptor =
UTIL.getAdmin().getNamespaceDescriptor(nsd.getName());
- assertEquals(nsd.getConfigurationValue(nsKey1), nsValue1after);
- assertEquals(currentNsDescriptor.getConfigurationValue(nsKey2), nsValue2);
+ assertEquals(nsValue1after, nsd.getConfigurationValue(nsKey1));
+ assertEquals(nsValue2, currentNsDescriptor.getConfigurationValue(nsKey2));
}
@Test(timeout=60000)
@@ -219,7 +219,7 @@ public class TestModifyNamespaceProcedure {
// Validate
NamespaceDescriptor currentNsDescriptor =
UTIL.getAdmin().getNamespaceDescriptor(nsd.getName());
- assertEquals(currentNsDescriptor.getConfigurationValue(nsKey), nsValue);
+ assertEquals(nsValue, currentNsDescriptor.getConfigurationValue(nsKey));
}
@Test(timeout = 60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java
index 8b58646..24a6bc5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -103,7 +104,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
currentHtd = UTIL.getAdmin().getTableDescriptor(tableName);
assertEquals(2, currentHtd.getFamiliesKeys().size());
- assertTrue(currentHtd.hasFamily(cf2.getBytes()));
+ assertTrue(currentHtd.hasFamily(Bytes.toBytes(cf2)));
// Test 2: Modify the table descriptor offline
UTIL.getAdmin().disableTable(tableName);
@@ -119,7 +120,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
ProcedureTestingUtility.assertProcNotFailed(procExec.getResult(procId2));
currentHtd = UTIL.getAdmin().getTableDescriptor(tableName);
- assertTrue(currentHtd.hasFamily(cf3.getBytes()));
+ assertTrue(currentHtd.hasFamily(Bytes.toBytes(cf3)));
assertEquals(3, currentHtd.getFamiliesKeys().size());
}
@@ -137,7 +138,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
// Test 1: Modify the table descriptor
HTableDescriptor htd = new HTableDescriptor(UTIL.getAdmin().getTableDescriptor(tableName));
- htd.removeFamily(cf2.getBytes());
+ htd.removeFamily(Bytes.toBytes(cf2));
long procId = ProcedureTestingUtility.submitAndWait(
procExec, new ModifyTableProcedure(procExec.getEnvironment(), htd));
@@ -145,7 +146,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
currentHtd = UTIL.getAdmin().getTableDescriptor(tableName);
assertEquals(2, currentHtd.getFamiliesKeys().size());
- assertFalse(currentHtd.hasFamily(cf2.getBytes()));
+ assertFalse(currentHtd.hasFamily(Bytes.toBytes(cf2)));
// Test 2: Modify the table descriptor offline
UTIL.getAdmin().disableTable(tableName);
@@ -153,7 +154,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
HTableDescriptor htd2 =
new HTableDescriptor(UTIL.getAdmin().getTableDescriptor(tableName));
- htd2.removeFamily(cf3.getBytes());
+ htd2.removeFamily(Bytes.toBytes(cf3));
// Disable Sanity check
htd2.setConfiguration("hbase.table.sanity.checks", Boolean.FALSE.toString());
@@ -164,12 +165,12 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
currentHtd = UTIL.getAdmin().getTableDescriptor(tableName);
assertEquals(1, currentHtd.getFamiliesKeys().size());
- assertFalse(currentHtd.hasFamily(cf3.getBytes()));
+ assertFalse(currentHtd.hasFamily(Bytes.toBytes(cf3)));
//Removing the last family will fail
HTableDescriptor htd3 =
new HTableDescriptor(UTIL.getAdmin().getTableDescriptor(tableName));
- htd3.removeFamily(cf1.getBytes());
+ htd3.removeFamily(Bytes.toBytes(cf1));
long procId3 =
ProcedureTestingUtility.submitAndWait(procExec,
new ModifyTableProcedure(procExec.getEnvironment(), htd3));
@@ -179,7 +180,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
assertTrue("expected DoNotRetryIOException, got " + cause,
cause instanceof DoNotRetryIOException);
assertEquals(1, currentHtd.getFamiliesKeys().size());
- assertTrue(currentHtd.hasFamily(cf1.getBytes()));
+ assertTrue(currentHtd.hasFamily(Bytes.toBytes(cf1)));
}
@Test(timeout=60000)
@@ -202,7 +203,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
boolean newCompactionEnableOption = htd.isCompactionEnabled() ? false : true;
htd.setCompactionEnabled(newCompactionEnableOption);
htd.addFamily(new HColumnDescriptor(cf2));
- htd.removeFamily(cf3.getBytes());
+ htd.removeFamily(Bytes.toBytes(cf3));
htd.setRegionReplication(3);
// Start the Modify procedure && kill the executor
@@ -240,7 +241,7 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
boolean newCompactionEnableOption = htd.isCompactionEnabled() ? false : true;
htd.setCompactionEnabled(newCompactionEnableOption);
htd.addFamily(new HColumnDescriptor(cf2));
- htd.removeFamily(cf3.getBytes());
+ htd.removeFamily(Bytes.toBytes(cf3));
// Start the Modify procedure && kill the executor
long procId = procExec.submitProcedure(
@@ -253,8 +254,8 @@ public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
HTableDescriptor currentHtd = UTIL.getAdmin().getTableDescriptor(tableName);
assertEquals(newCompactionEnableOption, currentHtd.isCompactionEnabled());
assertEquals(2, currentHtd.getFamiliesKeys().size());
- assertTrue(currentHtd.hasFamily(cf2.getBytes()));
- assertFalse(currentHtd.hasFamily(cf3.getBytes()));
+ assertTrue(currentHtd.hasFamily(Bytes.toBytes(cf2)));
+ assertFalse(currentHtd.hasFamily(Bytes.toBytes(cf3)));
// cf2 should be added cf3 should be removed
MasterProcedureTestingUtility.validateTableCreation(UTIL.getHBaseCluster().getMaster(),
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
index 2834b8f..2140d5e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
@@ -135,7 +135,7 @@ public class TestWALProcedureStoreOnHDFS {
final AtomicInteger reCount = new AtomicInteger(0);
Thread[] thread = new Thread[store.getNumThreads() * 2 + 1];
for (int i = 0; i < thread.length; ++i) {
- final long procId = i + 1;
+ final long procId = i + 1L;
thread[i] = new Thread(() -> {
try {
LOG.debug("[S] INSERT " + procId);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
index 60c9c4b..382625c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
@@ -203,6 +203,7 @@ public class TestSnapshotFileCache {
}
class SnapshotFiles implements SnapshotFileCache.SnapshotFileInspector {
+ @Override
public Collection<String> filesUnderSnapshot(final Path snapshotDir) throws IOException {
Collection<String> files = new HashSet<>();
files.addAll(SnapshotReferenceUtil.getHFileNames(UTIL.getConfiguration(), fs, snapshotDir));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
index ad6c58e..8f81946 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
@@ -114,7 +114,8 @@ public class TestSnapshotHFileCleaner {
assertFalse(cleaner.isFileDeletable(fs.getFileStatus(refFile)));
}
- class SnapshotFiles implements SnapshotFileCache.SnapshotFileInspector {
+ static class SnapshotFiles implements SnapshotFileCache.SnapshotFileInspector {
+ @Override
public Collection<String> filesUnderSnapshot(final Path snapshotDir) throws IOException {
Collection<String> files = new HashSet<>();
files.addAll(SnapshotReferenceUtil.getHFileNames(TEST_UTIL.getConfiguration(), fs, snapshotDir));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
index 32e6522..65f50c9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
@@ -18,7 +18,7 @@
*/
package org.apache.hadoop.hbase.mob;
-import junit.framework.TestCase;
+import static org.junit.Assert.assertEquals;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -34,13 +34,15 @@ import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert;
+import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Category(SmallTests.class)
-public class TestCachedMobFile extends TestCase{
+public class TestCachedMobFile {
static final Logger LOG = LoggerFactory.getLogger(TestCachedMobFile.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private Configuration conf = TEST_UTIL.getConfiguration();
@@ -50,10 +52,12 @@ public class TestCachedMobFile extends TestCase{
private static final long EXPECTED_REFERENCE_ZERO = 0;
private static final long EXPECTED_REFERENCE_ONE = 1;
private static final long EXPECTED_REFERENCE_TWO = 2;
+ @Rule
+ public TestName testName = new TestName();
@Test
public void testOpenClose() throws Exception {
- String caseName = getName();
+ String caseName = testName.getMethodName();
Path testDir = TEST_UTIL.getDataTestDir();
FileSystem fs = testDir.getFileSystem(conf);
HFileContext meta = new HFileContextBuilder().withBlockSize(8*1024).build();
@@ -61,21 +65,21 @@ public class TestCachedMobFile extends TestCase{
.withOutputDir(testDir).withFileContext(meta).build();
MobTestUtil.writeStoreFile(writer, caseName);
CachedMobFile cachedMobFile = CachedMobFile.create(fs, writer.getPath(), conf, cacheConf);
- Assert.assertEquals(EXPECTED_REFERENCE_ZERO, cachedMobFile.getReferenceCount());
+ assertEquals(EXPECTED_REFERENCE_ZERO, cachedMobFile.getReferenceCount());
cachedMobFile.open();
- Assert.assertEquals(EXPECTED_REFERENCE_ONE, cachedMobFile.getReferenceCount());
+ assertEquals(EXPECTED_REFERENCE_ONE, cachedMobFile.getReferenceCount());
cachedMobFile.open();
- Assert.assertEquals(EXPECTED_REFERENCE_TWO, cachedMobFile.getReferenceCount());
+ assertEquals(EXPECTED_REFERENCE_TWO, cachedMobFile.getReferenceCount());
cachedMobFile.close();
- Assert.assertEquals(EXPECTED_REFERENCE_ONE, cachedMobFile.getReferenceCount());
+ assertEquals(EXPECTED_REFERENCE_ONE, cachedMobFile.getReferenceCount());
cachedMobFile.close();
- Assert.assertEquals(EXPECTED_REFERENCE_ZERO, cachedMobFile.getReferenceCount());
+ assertEquals(EXPECTED_REFERENCE_ZERO, cachedMobFile.getReferenceCount());
}
@SuppressWarnings("SelfComparison")
@Test
public void testCompare() throws Exception {
- String caseName = getName();
+ String caseName = testName.getMethodName();
Path testDir = TEST_UTIL.getDataTestDir();
FileSystem fs = testDir.getFileSystem(conf);
Path outputDir1 = new Path(testDir, FAMILY1);
@@ -86,16 +90,16 @@ public class TestCachedMobFile extends TestCase{
CachedMobFile cachedMobFile1 = CachedMobFile.create(fs, writer1.getPath(), conf, cacheConf);
Path outputDir2 = new Path(testDir, FAMILY2);
StoreFileWriter writer2 = new StoreFileWriter.Builder(conf, cacheConf, fs)
- .withOutputDir(outputDir2)
- .withFileContext(meta)
- .build();
+ .withOutputDir(outputDir2)
+ .withFileContext(meta)
+ .build();
MobTestUtil.writeStoreFile(writer2, caseName);
CachedMobFile cachedMobFile2 = CachedMobFile.create(fs, writer2.getPath(), conf, cacheConf);
cachedMobFile1.access(1);
cachedMobFile2.access(2);
- Assert.assertEquals(cachedMobFile1.compareTo(cachedMobFile2), 1);
- Assert.assertEquals(cachedMobFile2.compareTo(cachedMobFile1), -1);
- Assert.assertEquals(cachedMobFile1.compareTo(cachedMobFile1), 0);
+ assertEquals(1, cachedMobFile1.compareTo(cachedMobFile2));
+ assertEquals(-1, cachedMobFile2.compareTo(cachedMobFile1));
+ assertEquals(0, cachedMobFile1.compareTo(cachedMobFile1));
}
@Test
@@ -105,7 +109,7 @@ public class TestCachedMobFile extends TestCase{
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs)
.withOutputDir(testDir).withFileContext(meta).build();
- String caseName = getName();
+ String caseName = testName.getMethodName();
MobTestUtil.writeStoreFile(writer, caseName);
CachedMobFile cachedMobFile = CachedMobFile.create(fs, writer.getPath(), conf, cacheConf);
byte[] family = Bytes.toBytes(caseName);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java
index f894fb2..cc2aa5c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java
@@ -18,7 +18,9 @@
*/
package org.apache.hadoop.hbase.mob;
-import junit.framework.TestCase;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -36,17 +38,21 @@ import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Category(SmallTests.class)
-public class TestMobFile extends TestCase {
+public class TestMobFile {
static final Logger LOG = LoggerFactory.getLogger(TestMobFile.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private Configuration conf = TEST_UTIL.getConfiguration();
private CacheConfig cacheConf = new CacheConfig(conf);
+ @Rule
+ public TestName testName = new TestName();
@Test
public void testReadKeyValue() throws Exception {
@@ -57,7 +63,7 @@ public class TestMobFile extends TestCase {
.withOutputDir(testDir)
.withFileContext(meta)
.build();
- String caseName = getName();
+ String caseName = testName.getMethodName();
MobTestUtil.writeStoreFile(writer, caseName);
MobFile mobFile =
@@ -110,7 +116,7 @@ public class TestMobFile extends TestCase {
.withOutputDir(testDir)
.withFileContext(meta)
.build();
- MobTestUtil.writeStoreFile(writer, getName());
+ MobTestUtil.writeStoreFile(writer, testName.getMethodName());
MobFile mobFile =
new MobFile(new HStoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java
index 42e6528..5077728 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java
@@ -18,11 +18,12 @@
*/
package org.apache.hadoop.hbase.mob;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
import java.io.IOException;
import java.util.Date;
-import junit.framework.TestCase;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -38,13 +39,15 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.After;
+import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Category(SmallTests.class)
-public class TestMobFileCache extends TestCase {
+public class TestMobFileCache {
static final Logger LOG = LoggerFactory.getLogger(TestMobFileCache.class);
private HBaseTestingUtility UTIL;
private HRegion region;
@@ -73,7 +76,7 @@ public class TestMobFileCache extends TestCase {
private static final byte[] QF2 = Bytes.toBytes("qf2");
private static final byte[] QF3 = Bytes.toBytes("qf3");
- @Override
+ @Before
public void setUp() throws Exception {
UTIL = HBaseTestingUtility.createLocalHTU();
conf = UTIL.getConfiguration();
@@ -93,8 +96,8 @@ public class TestMobFileCache extends TestCase {
region = UTIL.createLocalHRegion(htd, null, null);
}
- @Override
- protected void tearDown() throws Exception {
+ @After
+ public void tearDown() throws Exception {
region.close();
region.getFilesystem().delete(UTIL.getDataTestDir(), true);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java
index 8478e20..5ca73a6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java
@@ -18,26 +18,31 @@
*/
package org.apache.hadoop.hbase.mob;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertTrue;
+
import java.util.Date;
import java.util.Random;
import java.util.UUID;
-import junit.framework.TestCase;
-
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.MD5Hash;
+import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
-public class TestMobFileName extends TestCase {
+public class TestMobFileName {
private String uuid;
private Date date;
private String dateStr;
private byte[] startKey;
+ @Before
public void setUp() {
Random random = new Random();
uuid = UUID.randomUUID().toString().replaceAll("-", "");
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
index 94680f2..214fe49 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
@@ -146,7 +146,7 @@ public class TestNamespaceAuditor {
.addConfiguration(TableNamespaceManager.KEY_MAX_TABLES, "2").build();
ADMIN.createNamespace(nspDesc);
assertNotNull("Namespace descriptor found null.", ADMIN.getNamespaceDescriptor(nsp));
- assertEquals(ADMIN.listNamespaceDescriptors().length, 3);
+ assertEquals(3, ADMIN.listNamespaceDescriptors().length);
HColumnDescriptor fam1 = new HColumnDescriptor("fam1");
HTableDescriptor tableDescOne =
@@ -592,7 +592,7 @@ public class TestNamespaceAuditor {
.build();
ADMIN.createNamespace(nspDesc);
assertNotNull("Namespace descriptor found null.", ADMIN.getNamespaceDescriptor(nsp));
- assertEquals(ADMIN.listNamespaceDescriptors().length, 3);
+ assertEquals(3, ADMIN.listNamespaceDescriptors().length);
HColumnDescriptor fam1 = new HColumnDescriptor("fam1");
HTableDescriptor tableDescOne =
new HTableDescriptor(TableName.valueOf(nsp + TableName.NAMESPACE_DELIM + "table1"));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
index 0603b21..5ceb8e6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
@@ -52,7 +52,7 @@ public class TestProcedure {
when(coord.getRpcs()).thenReturn(comms); // make it not null
}
- class LatchedProcedure extends Procedure {
+ static class LatchedProcedure extends Procedure {
CountDownLatch startedAcquireBarrier = new CountDownLatch(1);
CountDownLatch startedDuringBarrier = new CountDownLatch(1);
CountDownLatch completedProcedure = new CountDownLatch(1);
@@ -93,6 +93,7 @@ public class TestProcedure {
final LatchedProcedure procspy = spy(proc);
// coordinator: start the barrier procedure
new Thread() {
+ @Override
public void run() {
procspy.call();
}
@@ -139,6 +140,7 @@ public class TestProcedure {
final LatchedProcedure procspy = spy(proc);
// start the barrier procedure
new Thread() {
+ @Override
public void run() {
procspy.call();
}
@@ -192,6 +194,7 @@ public class TestProcedure {
// start the barrier procedure
Thread t = new Thread() {
+ @Override
public void run() {
procspy.call();
}
@@ -214,6 +217,7 @@ public class TestProcedure {
// start the barrier procedure
Thread t = new Thread() {
+ @Override
public void run() {
procspy.call();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
index 1678a50..e2b1a79 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
@@ -213,6 +213,7 @@ public class TestProcedureCoordinator {
final Procedure spy = spy(task);
AcquireBarrierAnswer prepare = new AcquireBarrierAnswer(procName, cohort) {
+ @Override
public void doWork() {
// then do some fun where we commit before all nodes have prepared
// "one" commits before anyone else is done
@@ -288,7 +289,7 @@ public class TestProcedureCoordinator {
inorder.verify(controller).sendGlobalBarrierReached(eq(task), anyListOf(String.class));
}
- private abstract class OperationAnswer implements Answer<Void> {
+ private static abstract class OperationAnswer implements Answer<Void> {
private boolean ran = false;
public void ensureRan() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
index 1f5cbe6..c139862 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
@@ -244,7 +244,7 @@ public class TestZKProcedure {
Subprocedure r = ((Subprocedure) invocation.getMock());
LOG.error("Remote commit failure, not propagating error:" + remoteCause);
comms.receiveAbortProcedure(r.getName(), remoteCause);
- assertEquals(r.isComplete(), true);
+ assertEquals(true, r.isComplete());
// don't complete the error phase until the coordinator has gotten the error
// notification (which ensures that we never progress past prepare)
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java
index a44ad74..ce0d2f2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java
@@ -142,7 +142,7 @@ public class TestQuotaStatusRPCs {
assertTrue(
"Observed table usage was " + snapshot.getUsage(),
snapshot.getUsage() >= tableSize);
- assertEquals(snapshot.getLimit(), sizeLimit);
+ assertEquals(sizeLimit, snapshot.getLimit());
SpaceQuotaStatus pbStatus = snapshot.getQuotaStatus();
assertFalse(pbStatus.isInViolation());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
index 300268f..9ecde78 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
@@ -267,6 +267,7 @@ public class TestSuperUserQuotaPermissions {
private <T> T doAsUser(UserGroupInformation ugi, Callable<T> task) throws Exception {
return ugi.doAs(new PrivilegedExceptionAction<T>() {
+ @Override
public T run() throws Exception {
return task.call();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
index 82e1755..57948b6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
@@ -51,7 +51,7 @@ public class EncodedSeekPerformanceTest {
/** Use this benchmark with default options */
public EncodedSeekPerformanceTest() {
configuration.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.5f);
- randomizer = new Random(42l);
+ randomizer = new Random(42L);
numberOfSeeks = DEFAULT_NUMBER_OF_SEEKS;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockHStoreFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockHStoreFile.java
index e159678..8199b07 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockHStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MockHStoreFile.java
@@ -111,11 +111,13 @@ public class MockHStoreFile extends HStoreFile {
this.entryCount = entryCount;
}
+ @Override
public OptionalLong getMinimumTimestamp() {
return timeRangeTracker == null ? OptionalLong.empty()
: OptionalLong.of(timeRangeTracker.getMin());
}
+ @Override
public OptionalLong getMaximumTimestamp() {
return timeRangeTracker == null ? OptionalLong.empty()
: OptionalLong.of(timeRangeTracker.getMax());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/StatefulStoreMockMaker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/StatefulStoreMockMaker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/StatefulStoreMockMaker.java
index 5af7d96..f89be43 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/StatefulStoreMockMaker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/StatefulStoreMockMaker.java
@@ -46,6 +46,7 @@ public class StatefulStoreMockMaker {
return 0;
}
private class CancelAnswer implements Answer<Object> {
+ @Override
public CompactionContext answer(InvocationOnMock invocation) throws Throwable {
cancelCompaction(invocation.getArgument(0));
return null;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
index 774888c..c8a75f0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
@@ -364,9 +364,9 @@ public class TestAtomicOperation {
assertEquals(0, failures.get());
Get g = new Get(row);
Result result = region.get(g);
- assertEquals(result.getValue(fam1, qual1).length, 10000);
- assertEquals(result.getValue(fam1, qual2).length, 10000);
- assertEquals(result.getValue(fam2, qual3).length, 10000);
+ assertEquals(10000, result.getValue(fam1, qual1).length);
+ assertEquals(10000, result.getValue(fam1, qual2).length);
+ assertEquals(10000, result.getValue(fam2, qual3).length);
}
/**
* Test multi-threaded row mutations.
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
index 06cbf7a..a0babe8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
@@ -50,6 +50,7 @@ public class TestBlocksScanned extends HBaseTestCase {
private static HBaseTestingUtility TEST_UTIL = null;
+ @Override
@Before
public void setUp() throws Exception {
super.setUp();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
index f3c5da6..0121aef 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java
@@ -112,6 +112,7 @@ public class TestBulkLoad {
argThat(bulkLogWalEdit(WALEdit.BULK_LOAD, tableName.toBytes(),
familyName, storeFileNames)),
anyBoolean())).thenAnswer(new Answer() {
+ @Override
public Object answer(InvocationOnMock invocation) {
WALKeyImpl walKey = invocation.getArgument(1);
MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
@@ -137,6 +138,7 @@ public class TestBulkLoad {
when(log.append(any(),
any(), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
anyBoolean())).thenAnswer(new Answer() {
+ @Override
public Object answer(InvocationOnMock invocation) {
WALKeyImpl walKey = invocation.getArgument(1);
MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
@@ -156,6 +158,7 @@ public class TestBulkLoad {
when(log.append(any(),
any(), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
anyBoolean())).thenAnswer(new Answer() {
+ @Override
public Object answer(InvocationOnMock invocation) {
WALKeyImpl walKey = invocation.getArgument(1);
MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
@@ -176,6 +179,7 @@ public class TestBulkLoad {
when(log.append(any(),
any(), argThat(bulkLogWalEditType(WALEdit.BULK_LOAD)),
anyBoolean())).thenAnswer(new Answer() {
+ @Override
public Object answer(InvocationOnMock invocation) {
WALKeyImpl walKey = invocation.getArgument(1);
MultiVersionConcurrencyControl mvcc = walKey.getMvcc();
@@ -281,7 +285,7 @@ public class TestBulkLoad {
writer.append(new KeyValue(CellUtil.createCell(randomBytes,
family,
randomBytes,
- 0l,
+ 0L,
KeyValue.Type.Put.getCode(),
randomBytes)));
} finally {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java
index 0717b4d..734b930 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java
@@ -18,25 +18,25 @@
*/
package org.apache.hadoop.hbase.regionserver;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
import java.lang.management.ManagementFactory;
-
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.SortedSet;
-import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
-
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
-
-
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
@@ -48,11 +48,9 @@ import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
-import static org.junit.Assert.assertTrue;
-
@Category({RegionServerTests.class, SmallTests.class})
@RunWith(Parameterized.class)
-public class TestCellFlatSet extends TestCase {
+public class TestCellFlatSet {
@Parameterized.Parameters
public static Object[] data() {
return new Object[] { "SMALL_CHUNKS", "NORMAL_CHUNKS" }; // test with different chunk sizes
@@ -77,25 +75,22 @@ public class TestCellFlatSet extends TestCase {
public TestCellFlatSet(String chunkType){
long globalMemStoreLimit = (long) (ManagementFactory.getMemoryMXBean().getHeapMemoryUsage()
.getMax() * MemorySizeUtil.getGlobalMemStoreHeapPercent(CONF, false));
- if (chunkType == "NORMAL_CHUNKS") {
+ if (chunkType.equals("NORMAL_CHUNKS")) {
chunkCreator = ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false,
globalMemStoreLimit, 0.2f, MemStoreLAB.POOL_INITIAL_SIZE_DEFAULT, null);
- assertTrue(chunkCreator != null);
+ assertNotNull(chunkCreator);
smallChunks = false;
} else {
// chunkCreator with smaller chunk size, so only 3 cell-representations can accommodate a chunk
chunkCreator = ChunkCreator.initialize(SMALL_CHUNK_SIZE, false,
globalMemStoreLimit, 0.2f, MemStoreLAB.POOL_INITIAL_SIZE_DEFAULT, null);
- assertTrue(chunkCreator != null);
+ assertNotNull(chunkCreator);
smallChunks = true;
}
}
@Before
- @Override
public void setUp() throws Exception {
- super.setUp();
-
// create array of Cells to bass to the CellFlatMap under CellSet
final byte[] one = Bytes.toBytes(15);
final byte[] two = Bytes.toBytes(25);
@@ -126,7 +121,7 @@ public class TestCellFlatSet extends TestCase {
ascCCM = setUpCellChunkMap(true);
descCCM = setUpCellChunkMap(false);
- if (smallChunks == true) { // check jumbo chunks as well
+ if (smallChunks) { // check jumbo chunks as well
ascCCM = setUpJumboCellChunkMap(true);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java
index 4fa06b0..a58a22e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java
@@ -36,6 +36,7 @@ public class TestCellSkipListSet extends TestCase {
private final CellSet csls =
new CellSet(CellComparatorImpl.COMPARATOR);
+ @Override
protected void setUp() throws Exception {
super.setUp();
this.csls.clear();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
index c0ba621..87e4aff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java
@@ -56,6 +56,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
/**
@@ -197,7 +198,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore {
Cell nr = ((CompactingMemStore)this.memstore).getNextRow(new KeyValue(Bytes.toBytes(i),
System.currentTimeMillis()));
if (i + 1 == ROW_COUNT) {
- assertEquals(nr, null);
+ assertNull(nr);
} else {
assertTrue(CellComparator.getInstance().compareRows(nr,
new KeyValue(Bytes.toBytes(i + 1), System.currentTimeMillis())) == 0);
@@ -317,7 +318,7 @@ public class TestCompactingMemStore extends TestDefaultMemStore {
EnvironmentEdgeForMemstoreTest edge = new EnvironmentEdgeForMemstoreTest();
EnvironmentEdgeManager.injectEdge(edge);
long t = memstore.timeOfOldestEdit();
- assertEquals(t, Long.MAX_VALUE);
+ assertEquals(Long.MAX_VALUE, t);
// test the case that the timeOfOldestEdit is updated after a KV add
memstore.add(KeyValueTestUtil.create("r", "f", "q", 100, "v"), null);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java
index 1a04c8e..030658e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java
@@ -454,7 +454,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore
count++;
}
}
- assertEquals("the count should be ", count, 150);
+ assertEquals("the count should be ", 150, count);
for(int i = 0; i < scanners.size(); i++) {
scanners.get(i).close();
}
@@ -481,7 +481,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore
} finally {
itr.close();
}
- assertEquals("the count should be ", cnt, 150);
+ assertEquals("the count should be ", 150, cnt);
}
private void addRowsByKeysWith50Cols(AbstractMemStore hmc, String[] keys) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
index afe3228..f017617 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
@@ -116,7 +116,8 @@ public class TestCompaction {
// Increment the least significant character so we get to next row.
secondRowBytes[START_KEY_BYTES.length - 1]++;
thirdRowBytes = START_KEY_BYTES.clone();
- thirdRowBytes[START_KEY_BYTES.length - 1] += 2;
+ thirdRowBytes[START_KEY_BYTES.length - 1] =
+ (byte) (thirdRowBytes[START_KEY_BYTES.length - 1] + 2);
}
@Before
@@ -264,7 +265,7 @@ public class TestCompaction {
FileSystem fs = store.getFileSystem();
// default compaction policy created one and only one new compacted file
Path dstPath = store.getRegionFileSystem().createTempName();
- FSDataOutputStream stream = fs.create(dstPath, null, true, 512, (short)3, (long)1024, null);
+ FSDataOutputStream stream = fs.create(dstPath, null, true, 512, (short)3, 1024L, null);
stream.writeChars("CORRUPT FILE!!!!");
stream.close();
Path origPath = store.getRegionFileSystem().commitStoreFile(
@@ -390,7 +391,7 @@ public class TestCompaction {
class StoreMockMaker extends StatefulStoreMockMaker {
public ArrayList<HStoreFile> compacting = new ArrayList<>();
public ArrayList<HStoreFile> notCompacting = new ArrayList<>();
- private ArrayList<Integer> results;
+ private final ArrayList<Integer> results;
public StoreMockMaker(ArrayList<Integer> results) {
this.results = results;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java
index d2e1866..e9f381e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveConcurrentClose.java
@@ -125,6 +125,7 @@ public class TestCompactionArchiveConcurrentClose {
// now run the cleaner with a concurrent close
Thread cleanerThread = new Thread() {
+ @Override
public void run() {
cleaner.chore();
}
@@ -138,6 +139,7 @@ public class TestCompactionArchiveConcurrentClose {
}
final AtomicReference<Exception> closeException = new AtomicReference<>();
Thread closeThread = new Thread() {
+ @Override
public void run() {
// wait for the chore to complete and call close
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java
index 112fe4d..0dafd80 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java
@@ -141,7 +141,7 @@ public class TestCompactionFileNotFound {
}
});
// Split at this point should not result in the RS being aborted
- assertEquals(util.getMiniHBaseCluster().getLiveRegionServerThreads().size(), 3);
+ assertEquals(3, util.getMiniHBaseCluster().getLiveRegionServerThreads().size());
} finally {
if (admin != null) {
admin.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
index b89fb0e..53e04e0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
@@ -575,7 +575,7 @@ public class TestDefaultMemStore {
Cell nr = ((DefaultMemStore) this.memstore).getNextRow(new KeyValue(Bytes.toBytes(i),
System.currentTimeMillis()));
if (i + 1 == ROW_COUNT) {
- assertEquals(nr, null);
+ assertNull(nr);
} else {
assertTrue(CellComparatorImpl.COMPARATOR.compareRows(nr,
new KeyValue(Bytes.toBytes(i + 1), System.currentTimeMillis())) == 0);
@@ -863,7 +863,7 @@ public class TestDefaultMemStore {
EnvironmentEdgeManager.injectEdge(edge);
DefaultMemStore memstore = new DefaultMemStore();
long t = memstore.timeOfOldestEdit();
- assertEquals(t, Long.MAX_VALUE);
+ assertEquals(Long.MAX_VALUE, t);
// test the case that the timeOfOldestEdit is updated after a KV add
memstore.add(KeyValueTestUtil.create("r", "f", "q", 100, "v"), null);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
index 642b471..aa38b7a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
@@ -168,7 +169,7 @@ public class TestGetClosestAtOrBefore {
byte [] metaKey = HRegionInfo.createRegionName(
tableb, tofindBytes,
HConstants.NINES, false);
- LOG.info("find=" + new String(metaKey));
+ LOG.info("find=" + new String(metaKey, StandardCharsets.UTF_8));
Result r = UTIL.getClosestRowBefore(mr, metaKey, HConstants.CATALOG_FAMILY);
if (answer == -1) {
assertNull(r);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 3c11b31..ee11075 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -43,6 +43,7 @@ import static org.mockito.Mockito.when;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.math.BigDecimal;
+import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
@@ -669,7 +670,7 @@ public class TestHRegion {
MonitoredTask status = TaskMonitor.get().createStatus(method);
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (HStore store : region.getStores()) {
- maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(), minSeqId - 1);
+ maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), minSeqId - 1);
}
long seqId = region.replayRecoveredEditsIfAny(regiondir, maxSeqIdInStores, null, status);
assertEquals(maxSeqId, seqId);
@@ -721,7 +722,7 @@ public class TestHRegion {
MonitoredTask status = TaskMonitor.get().createStatus(method);
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (HStore store : region.getStores()) {
- maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(), recoverSeqId - 1);
+ maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), recoverSeqId - 1);
}
long seqId = region.replayRecoveredEditsIfAny(regiondir, maxSeqIdInStores, null, status);
assertEquals(maxSeqId, seqId);
@@ -766,7 +767,7 @@ public class TestHRegion {
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (HStore store : region.getStores()) {
- maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(), minSeqId);
+ maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), minSeqId);
}
long seqId = region.replayRecoveredEditsIfAny(regiondir, maxSeqIdInStores, null, null);
assertEquals(minSeqId, seqId);
@@ -824,7 +825,7 @@ public class TestHRegion {
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
MonitoredTask status = TaskMonitor.get().createStatus(method);
for (HStore store : region.getStores()) {
- maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(), recoverSeqId - 1);
+ maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), recoverSeqId - 1);
}
long seqId = region.replayRecoveredEditsIfAny(regiondir, maxSeqIdInStores, null, status);
assertEquals(maxSeqId, seqId);
@@ -1070,7 +1071,7 @@ public class TestHRegion {
}
}
- class IsFlushWALMarker implements ArgumentMatcher<WALEdit> {
+ static class IsFlushWALMarker implements ArgumentMatcher<WALEdit> {
volatile FlushAction[] actions;
public IsFlushWALMarker(FlushAction... actions) {
this.actions = actions;
@@ -2192,7 +2193,7 @@ public class TestHRegion {
deleteMap.put(family, kvs);
region.delete(deleteMap, Durability.SYNC_WAL);
} catch (Exception e) {
- assertTrue("Family " + new String(family) + " does not exist", false);
+ fail("Family " + new String(family, StandardCharsets.UTF_8) + " does not exist");
}
// testing non existing family
@@ -2205,7 +2206,8 @@ public class TestHRegion {
} catch (Exception e) {
ok = true;
}
- assertEquals("Family " + new String(family) + " does exist", true, ok);
+ assertEquals("Family " + new String(family, StandardCharsets.UTF_8) + " does exist",
+ true, ok);
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
@@ -3467,18 +3469,18 @@ public class TestHRegion {
List<Cell> results = new ArrayList<>();
assertTrue(s.next(results));
- assertEquals(results.size(), 1);
+ assertEquals(1, results.size());
results.clear();
assertTrue(s.next(results));
- assertEquals(results.size(), 3);
+ assertEquals(3, results.size());
assertTrue("orderCheck", CellUtil.matchingFamily(results.get(0), cf_alpha));
assertTrue("orderCheck", CellUtil.matchingFamily(results.get(1), cf_essential));
assertTrue("orderCheck", CellUtil.matchingFamily(results.get(2), cf_joined));
results.clear();
assertFalse(s.next(results));
- assertEquals(results.size(), 0);
+ assertEquals(0, results.size());
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
@@ -3564,16 +3566,19 @@ public class TestHRegion {
while (true) {
boolean more = s.next(results, scannerContext);
if ((index >> 1) < 5) {
- if (index % 2 == 0)
- assertEquals(results.size(), 3);
- else
- assertEquals(results.size(), 1);
- } else
- assertEquals(results.size(), 1);
+ if (index % 2 == 0) {
+ assertEquals(3, results.size());
+ } else {
+ assertEquals(1, results.size());
+ }
+ } else {
+ assertEquals(1, results.size());
+ }
results.clear();
index++;
- if (!more)
+ if (!more) {
break;
+ }
}
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
@@ -4448,7 +4453,7 @@ public class TestHRegion {
// after all increment finished, the row will increment to 20*100 = 2000
int threadNum = 20;
int incCounter = 100;
- long expected = threadNum * incCounter;
+ long expected = (long) threadNum * incCounter;
Thread[] incrementers = new Thread[threadNum];
Thread flushThread = new Thread(flusher);
for (int i = 0; i < threadNum; i++) {
@@ -4470,7 +4475,7 @@ public class TestHRegion {
List<Cell> kvs = res.getColumnCells(Incrementer.family, Incrementer.qualifier);
// we just got the latest version
- assertEquals(kvs.size(), 1);
+ assertEquals(1, kvs.size());
Cell kv = kvs.get(0);
assertEquals(expected, Bytes.toLong(kv.getValueArray(), kv.getValueOffset()));
this.region = null;
@@ -4561,7 +4566,7 @@ public class TestHRegion {
List<Cell> kvs = res.getColumnCells(Appender.family, Appender.qualifier);
// we just got the latest version
- assertEquals(kvs.size(), 1);
+ assertEquals(1, kvs.size());
Cell kv = kvs.get(0);
byte[] appendResult = new byte[kv.getValueLength()];
System.arraycopy(kv.getValueArray(), kv.getValueOffset(), appendResult, 0, kv.getValueLength());
@@ -6150,7 +6155,7 @@ public class TestHRegion {
r = region.get(new Get(row));
byte[] val = r.getValue(fam1, q1);
assertNotNull(val);
- assertEquals(Bytes.toLong(val), 1L);
+ assertEquals(1L, Bytes.toLong(val));
// Increment with a TTL of 5 seconds
Increment incr = new Increment(row).addColumn(fam1, q1, 1L);
@@ -6161,7 +6166,7 @@ public class TestHRegion {
r = region.get(new Get(row));
val = r.getValue(fam1, q1);
assertNotNull(val);
- assertEquals(Bytes.toLong(val), 2L);
+ assertEquals(2L, Bytes.toLong(val));
// Increment time to T+25 seconds
edge.incrementTime(5000);
@@ -6170,7 +6175,7 @@ public class TestHRegion {
r = region.get(new Get(row));
val = r.getValue(fam1, q1);
assertNotNull(val);
- assertEquals(Bytes.toLong(val), 1L);
+ assertEquals(1L, Bytes.toLong(val));
// Increment time to T+30 seconds
edge.incrementTime(5000);
@@ -6199,14 +6204,14 @@ public class TestHRegion {
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
- assertEquals(c.getTimestamp(), 10L);
+ assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
region.increment(inc);
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
- assertEquals(c.getTimestamp(), 11L);
- assertEquals(Bytes.toLong(c.getValueArray(), c.getValueOffset(), c.getValueLength()), 2L);
+ assertEquals(11L, c.getTimestamp());
+ assertEquals(2L, Bytes.toLong(c.getValueArray(), c.getValueOffset(), c.getValueLength()));
}
@Test
@@ -6224,13 +6229,13 @@ public class TestHRegion {
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
- assertEquals(c.getTimestamp(), 10L);
+ assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
region.append(a);
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
- assertEquals(c.getTimestamp(), 11L);
+ assertEquals(11L, c.getTimestamp());
byte[] expected = new byte[qual1.length*2];
System.arraycopy(qual1, 0, expected, 0, qual1.length);
@@ -6255,7 +6260,7 @@ public class TestHRegion {
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
- assertEquals(c.getTimestamp(), 10L);
+ assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
p = new Put(row);
@@ -6264,7 +6269,7 @@ public class TestHRegion {
region.checkAndMutate(row, fam1, qual1, CompareOperator.EQUAL, new BinaryComparator(qual1), p, false);
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
- assertEquals(c.getTimestamp(), 10L);
+ assertEquals(10L, c.getTimestamp());
assertTrue(Bytes.equals(c.getValueArray(), c.getValueOffset(), c.getValueLength(),
qual2, 0, qual2.length));
@@ -6304,9 +6309,9 @@ public class TestHRegion {
};
OperationStatus[] status = region.batchMutate(mutations);
- assertEquals(status[0].getOperationStatusCode(), OperationStatusCode.SUCCESS);
- assertEquals(status[1].getOperationStatusCode(), OperationStatusCode.SANITY_CHECK_FAILURE);
- assertEquals(status[2].getOperationStatusCode(), OperationStatusCode.SUCCESS);
+ assertEquals(OperationStatusCode.SUCCESS, status[0].getOperationStatusCode());
+ assertEquals(OperationStatusCode.SANITY_CHECK_FAILURE, status[1].getOperationStatusCode());
+ assertEquals(OperationStatusCode.SUCCESS, status[2].getOperationStatusCode());
// test with a row lock held for a long time
@@ -6347,8 +6352,8 @@ public class TestHRegion {
// this will wait for the row lock, and it will eventually succeed
OperationStatus[] status = region.batchMutate(mutations);
- assertEquals(status[0].getOperationStatusCode(), OperationStatusCode.SUCCESS);
- assertEquals(status[1].getOperationStatusCode(), OperationStatusCode.SUCCESS);
+ assertEquals(OperationStatusCode.SUCCESS, status[0].getOperationStatusCode());
+ assertEquals(OperationStatusCode.SUCCESS, status[1].getOperationStatusCode());
return null;
}
});
@@ -6374,7 +6379,7 @@ public class TestHRegion {
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
- assertEquals(c.getTimestamp(), 10L);
+ assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
p = new Put(row);
@@ -6386,7 +6391,7 @@ public class TestHRegion {
new BinaryComparator(qual1), rm, false));
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
- assertEquals(c.getTimestamp(), 10L);
+ assertEquals(10L, c.getTimestamp());
LOG.info("c value " +
Bytes.toStringBinary(c.getValueArray(), c.getValueOffset(), c.getValueLength()));
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
index 3c25f6b..dcd7ff7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
@@ -232,7 +232,7 @@ public class TestHRegionReplayEvents {
// flush region
FlushResultImpl flush = (FlushResultImpl)secondaryRegion.flush(true);
- assertEquals(flush.result, FlushResultImpl.Result.CANNOT_FLUSH);
+ assertEquals(FlushResultImpl.Result.CANNOT_FLUSH, flush.result);
verifyData(secondaryRegion, 0, 1000, cq, families);
@@ -1207,13 +1207,13 @@ public class TestHRegionReplayEvents {
// primary region is empty at this point. Request a flush with writeFlushRequestWalMarker=false
FlushResultImpl result = primaryRegion.flushcache(true, false, FlushLifeCycleTracker.DUMMY);
assertNotNull(result);
- assertEquals(result.result, FlushResultImpl.Result.CANNOT_FLUSH_MEMSTORE_EMPTY);
+ assertEquals(FlushResultImpl.Result.CANNOT_FLUSH_MEMSTORE_EMPTY, result.result);
assertFalse(result.wroteFlushWalMarker);
// request flush again, but this time with writeFlushRequestWalMarker = true
result = primaryRegion.flushcache(true, true, FlushLifeCycleTracker.DUMMY);
assertNotNull(result);
- assertEquals(result.result, FlushResultImpl.Result.CANNOT_FLUSH_MEMSTORE_EMPTY);
+ assertEquals(FlushResultImpl.Result.CANNOT_FLUSH_MEMSTORE_EMPTY, result.result);
assertTrue(result.wroteFlushWalMarker);
List<FlushDescriptor> flushes = Lists.newArrayList();
@@ -1644,7 +1644,7 @@ public class TestHRegionReplayEvents {
hFileFactory.withFileContext(new HFileContext());
HFile.Writer writer = hFileFactory.create();
try {
- writer.append(new KeyValue(CellUtil.createCell(valueBytes, family, valueBytes, 0l,
+ writer.append(new KeyValue(CellUtil.createCell(valueBytes, family, valueBytes, 0L,
KeyValue.Type.Put.getCode(), valueBytes)));
} finally {
writer.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
index b6b39bc..034d9d7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
@@ -186,6 +186,7 @@ public class TestHRegionServerBulkLoad {
this.tableName = tableName;
}
+ @Override
public void doAnAction() throws Exception {
long iteration = numBulkLoads.getAndIncrement();
Path dir = UTIL.getDataTestDirOnTestFS(String.format("bulkLoad_%08d",
@@ -295,6 +296,7 @@ public class TestHRegionServerBulkLoad {
table = UTIL.getConnection().getTable(TABLE_NAME);
}
+ @Override
public void doAnAction() throws Exception {
Scan s = new Scan();
for (byte[] family : targetFamilies) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java
index b8d3ec7..288333b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java
@@ -756,7 +756,7 @@ public class TestHStore {
}
@Override
- public void write(byte[] buf, int offset, int length) throws IOException {
+ public synchronized void write(byte[] buf, int offset, int length) throws IOException {
System.err.println("faulty stream write at pos " + getPos());
injectFault();
super.write(buf, offset, length);
@@ -1551,7 +1551,7 @@ public class TestHStore {
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(5).build(), hook);
}
- private class MyStore extends HStore {
+ private static class MyStore extends HStore {
private final MyStoreHook hook;
MyStore(final HRegion region, final ColumnFamilyDescriptor family, final Configuration
@@ -1576,7 +1576,7 @@ public class TestHStore {
}
}
- private abstract class MyStoreHook {
+ private abstract static class MyStoreHook {
void getScanners(MyStore store) throws IOException {
}
@@ -1595,7 +1595,7 @@ public class TestHStore {
MyStore store = initMyStore(name.getMethodName(), conf, new MyStoreHook() {});
MemStoreSizing memStoreSizing = new MemStoreSizing();
long ts = System.currentTimeMillis();
- long seqID = 1l;
+ long seqID = 1L;
// Add some data to the region and do some flushes
for (int i = 1; i < 10; i++) {
store.add(createCell(Bytes.toBytes("row" + i), qf1, ts, seqID++, Bytes.toBytes("")),
@@ -1663,6 +1663,7 @@ public class TestHStore {
return this.heap;
}
+ @Override
public void run() {
scanner.trySwitchToStreamRead();
heap = scanner.heap;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
index 31f16ea..1a38d7f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
@@ -90,11 +90,13 @@ public class TestHStoreFile extends HBaseTestCase {
private static final int CKBYTES = 512;
private static String TEST_FAMILY = "cf";
+ @Override
@Before
public void setUp() throws Exception {
super.setUp();
}
+ @Override
@After
public void tearDown() throws Exception {
super.tearDown();
@@ -505,8 +507,8 @@ public class TestHStoreFile extends HBaseTestCase {
long now = System.currentTimeMillis();
for (int i = 0; i < 2000; i += 2) {
String row = String.format(localFormatter, i);
- KeyValue kv = new KeyValue(row.getBytes(), "family".getBytes(),
- "col".getBytes(), now, "value".getBytes());
+ KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"),
+ Bytes.toBytes("col"), now, Bytes.toBytes("value"));
writer.append(kv);
}
writer.close();
@@ -523,12 +525,13 @@ public class TestHStoreFile extends HBaseTestCase {
for (int i = 0; i < 2000; i++) {
String row = String.format(localFormatter, i);
TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
- columns.add("family:col".getBytes());
+ columns.add(Bytes.toBytes("family:col"));
- Scan scan = new Scan(row.getBytes(),row.getBytes());
- scan.addColumn("family".getBytes(), "family:col".getBytes());
+ Scan scan = new Scan(Bytes.toBytes(row),Bytes.toBytes(row));
+ scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes("family:col"));
HStore store = mock(HStore.class);
- when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
+ when(store.getColumnFamilyDescriptor())
+ .thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
if (i % 2 == 0) {
if (!exists) falseNeg++;
@@ -592,8 +595,8 @@ public class TestHStoreFile extends HBaseTestCase {
long now = System.currentTimeMillis();
for (int i = 0; i < 2000; i += 2) {
String row = String.format(localFormatter, i);
- KeyValue kv = new KeyValue(row.getBytes(), "family".getBytes(),
- "col".getBytes(), now, KeyValue.Type.DeleteFamily, "value".getBytes());
+ KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"),
+ Bytes.toBytes("col"), now, KeyValue.Type.DeleteFamily, Bytes.toBytes("value"));
writer.append(kv);
}
writer.close();
@@ -696,9 +699,8 @@ public class TestHStoreFile extends HBaseTestCase {
String row = String.format(localFormatter, i);
String col = String.format(localFormatter, j);
for (int k= 0; k < versions; ++k) { // versions
- KeyValue kv = new KeyValue(row.getBytes(),
- "family".getBytes(), ("col" + col).getBytes(),
- now-k, Bytes.toBytes((long)-1));
+ KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"),
+ Bytes.toBytes("col" + col), now-k, Bytes.toBytes(-1L));
writer.append(kv);
}
}
@@ -713,7 +715,8 @@ public class TestHStoreFile extends HBaseTestCase {
assertEquals(expKeys[x], reader.generalBloomFilter.getKeyCount());
HStore store = mock(HStore.class);
- when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
+ when(store.getColumnFamilyDescriptor())
+ .thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
// check false positives rate
int falsePos = 0;
int falseNeg = 0;
@@ -722,10 +725,10 @@ public class TestHStoreFile extends HBaseTestCase {
String row = String.format(localFormatter, i);
String col = String.format(localFormatter, j);
TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
- columns.add(("col" + col).getBytes());
+ columns.add(Bytes.toBytes("col" + col));
- Scan scan = new Scan(row.getBytes(),row.getBytes());
- scan.addColumn("family".getBytes(), ("col"+col).getBytes());
+ Scan scan = new Scan(Bytes.toBytes(row),Bytes.toBytes(row));
+ scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes(("col"+col)));
boolean exists =
scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java
index 24ab7a1..d9956de 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java
@@ -88,6 +88,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
return actual;
}
+ @Override
@Before
public void setUp() throws Exception {
super.setUp();
@@ -185,7 +186,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
for (KeyValueScanner scanner : scanners) {
// Verify that close is called and only called once for each scanner
assertTrue(((SeekTestScanner) scanner).isClosed());
- assertEquals(((SeekTestScanner) scanner).getClosedNum(), 1);
+ assertEquals(1, ((SeekTestScanner) scanner).getClosedNum());
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
index 2684bdf..0840a63 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
@@ -108,7 +108,8 @@ public class TestMajorCompaction {
// Increment the least significant character so we get to next row.
secondRowBytes[START_KEY_BYTES.length - 1]++;
thirdRowBytes = START_KEY_BYTES.clone();
- thirdRowBytes[START_KEY_BYTES.length - 1] += 2;
+ thirdRowBytes[START_KEY_BYTES.length - 1] =
+ (byte) (thirdRowBytes[START_KEY_BYTES.length - 1] + 2);
}
@Before
@@ -277,7 +278,7 @@ public class TestMajorCompaction {
// Force major compaction.
r.compact(true);
- assertEquals(r.getStore(COLUMN_FAMILY_TEXT).getStorefiles().size(), 1);
+ assertEquals(1, r.getStore(COLUMN_FAMILY_TEXT).getStorefiles().size());
result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
assertTrue("Second row should still be deleted", result.isEmpty());
@@ -398,8 +399,8 @@ public class TestMajorCompaction {
private void createSmallerStoreFile(final HRegion region) throws IOException {
Table loader = new RegionAsTable(region);
- HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY), ("" +
- "bbb").getBytes(), null);
+ HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY), Bytes.toBytes("" +
+ "bbb"), null);
region.flush(true);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java
index 8d74d8d..795fa2f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java
@@ -62,8 +62,8 @@ public class TestMemStoreLAB {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
- ChunkCreator.initialize(1 * 1024, false, 50*1024000l, 0.2f, MemStoreLAB.POOL_INITIAL_SIZE_DEFAULT,
- null);
+ ChunkCreator.initialize(1 * 1024, false, 50 * 1024000L, 0.2f,
+ MemStoreLAB.POOL_INITIAL_SIZE_DEFAULT, null);
}
@AfterClass
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
index 98b0761..4ae92a4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
@@ -76,7 +76,8 @@ public class TestMinorCompaction {
// Increment the least significant character so we get to next row.
secondRowBytes[START_KEY_BYTES.length - 1]++;
thirdRowBytes = START_KEY_BYTES.clone();
- thirdRowBytes[START_KEY_BYTES.length - 1] += 2;
+ thirdRowBytes[START_KEY_BYTES.length - 1] =
+ (byte) (thirdRowBytes[START_KEY_BYTES.length - 1] + 2);
col1 = Bytes.toBytes("column1");
col2 = Bytes.toBytes("column2");
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java
index 084c4c91..a09c8cd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java
@@ -46,6 +46,7 @@ public class TestMultiVersionConcurrencyControl {
private Random rnd = new Random();
public boolean failed = false;
+ @Override
public void run() {
while (!finished.get()) {
MultiVersionConcurrencyControl.WriteEntry e =
@@ -82,6 +83,7 @@ public class TestMultiVersionConcurrencyControl {
final AtomicBoolean readerFailed = new AtomicBoolean(false);
final AtomicLong failedAt = new AtomicLong();
Runnable reader = new Runnable() {
+ @Override
public void run() {
long prev = mvcc.getReadPoint();
while (!finished.get()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b1269ec5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java
index a8b0f22..82b0251 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java
@@ -24,30 +24,31 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
+import org.apache.hadoop.hbase.ipc.PriorityFunction;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.ipc.PriorityFunction;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
+import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.Mockito;
-
-import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
-import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
* Tests that verify certain RPCs get a higher QoS.
@@ -85,7 +86,7 @@ public class TestPriorityRpc {
RegionSpecifier regionSpecifier = regionSpecifierBuilder.build();
getRequestBuilder.setRegion(regionSpecifier);
Get.Builder getBuilder = Get.newBuilder();
- getBuilder.setRow(UnsafeByteOperations.unsafeWrap("somerow".getBytes()));
+ getBuilder.setRow(UnsafeByteOperations.unsafeWrap(Bytes.toBytes("somerow")));
getRequestBuilder.setGet(getBuilder.build());
GetRequest getRequest = getRequestBuilder.build();
RequestHeader header = headerBuilder.build();
@@ -96,7 +97,8 @@ public class TestPriorityRpc {
RegionInfo mockRegionInfo = Mockito.mock(RegionInfo.class);
Mockito.when(mockRpc.getRegion(Mockito.any())).thenReturn(mockRegion);
Mockito.when(mockRegion.getRegionInfo()).thenReturn(mockRegionInfo);
- Mockito.when(mockRegionInfo.getTable()).thenReturn(RegionInfoBuilder.FIRST_META_REGIONINFO.getTable());
+ Mockito.when(mockRegionInfo.getTable())
+ .thenReturn(RegionInfoBuilder.FIRST_META_REGIONINFO.getTable());
// Presume type.
((AnnotationReadingPriorityFunction)priority).setRegionServer(mockRS);
assertEquals(HConstants.SYSTEMTABLE_QOS, priority.getPriority(header, getRequest,