You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2017/11/18 01:22:33 UTC
[05/30] hbase git commit: HBASE-19239 Fix findbugs and error-prone
issues
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
index dd06995..9bc18a4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
@@ -87,10 +87,12 @@ public class SkipFilter extends FilterBase {
return filter.transformCell(v);
}
+ @Override
public boolean filterRow() {
return filterRow;
}
+ @Override
public boolean hasFilterRow() {
return true;
}
@@ -98,6 +100,7 @@ public class SkipFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte[] toByteArray() throws IOException {
FilterProtos.SkipFilter.Builder builder =
FilterProtos.SkipFilter.newBuilder();
@@ -131,6 +134,7 @@ public class SkipFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof SkipFilter)) return false;
@@ -139,6 +143,7 @@ public class SkipFilter extends FilterBase {
return getFilter().areSerializedFieldsEqual(other.getFilter());
}
+ @Override
public boolean isFamilyEssential(byte[] name) throws IOException {
return filter.isFamilyEssential(name);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
index 1f0043c..6c872f3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
@@ -71,6 +71,7 @@ public class SubstringComparator extends ByteArrayComparable {
/**
* @return The comparator serialized using pb
*/
+ @Override
public byte [] toByteArray() {
ComparatorProtos.SubstringComparator.Builder builder =
ComparatorProtos.SubstringComparator.newBuilder();
@@ -100,6 +101,7 @@ public class SubstringComparator extends ByteArrayComparable {
* @return true if and only if the fields of the comparator that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
if (other == this) return true;
if (!(other instanceof SubstringComparator)) return false;
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
index be5a0f6..f28560b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
@@ -124,6 +124,7 @@ public class TimestampsFilter extends FilterBase {
*
* @throws IOException This will never happen.
*/
+ @Override
public Cell getNextCellHint(Cell currentCell) throws IOException {
if (!canHint) {
return null;
@@ -168,6 +169,7 @@ public class TimestampsFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte[] toByteArray() {
FilterProtos.TimestampsFilter.Builder builder =
FilterProtos.TimestampsFilter.newBuilder();
@@ -199,6 +201,7 @@ public class TimestampsFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof TimestampsFilter)) return false;
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
index 5a46d7a..952d64e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
@@ -77,6 +77,7 @@ public class ValueFilter extends CompareFilter {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.ValueFilter.Builder builder =
FilterProtos.ValueFilter.newBuilder();
@@ -116,6 +117,7 @@ public class ValueFilter extends CompareFilter {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof ValueFilter)) return false;
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
index 7263e1b..1cefe46 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
@@ -52,6 +52,7 @@ public class WhileMatchFilter extends FilterBase {
return filter;
}
+ @Override
public void reset() throws IOException {
this.filter.reset();
}
@@ -99,6 +100,7 @@ public class WhileMatchFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte[] toByteArray() throws IOException {
FilterProtos.WhileMatchFilter.Builder builder =
FilterProtos.WhileMatchFilter.newBuilder();
@@ -132,6 +134,7 @@ public class WhileMatchFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof WhileMatchFilter)) return false;
@@ -140,6 +143,7 @@ public class WhileMatchFilter extends FilterBase {
return getFilter().areSerializedFieldsEqual(other.getFilter());
}
+ @Override
public boolean isFamilyEssential(byte[] name) throws IOException {
return filter.isFamilyEssential(name);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
index caa19b8..ebbf9e0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
@@ -242,7 +242,7 @@ public abstract class AbstractRpcClient<T extends RpcConnection> implements RpcC
return null;
}
try {
- return (Codec) Class.forName(className).newInstance();
+ return (Codec) Class.forName(className).getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new RuntimeException("Failed getting codec " + className, e);
}
@@ -270,7 +270,7 @@ public abstract class AbstractRpcClient<T extends RpcConnection> implements RpcC
return null;
}
try {
- return (CompressionCodec) Class.forName(className).newInstance();
+ return (CompressionCodec) Class.forName(className).getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new RuntimeException("Failed getting compressor " + className, e);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
index d27602e..4a83fdd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
@@ -67,6 +67,7 @@ public class BlockingRpcClient extends AbstractRpcClient<BlockingRpcConnection>
* Creates a connection. Can be overridden by a subclass for testing.
* @param remoteId - the ConnectionId to use for the connection creation.
*/
+ @Override
protected BlockingRpcConnection createConnection(ConnectionId remoteId) throws IOException {
return new BlockingRpcConnection(this, remoteId);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java
index 33fc880..cf84c5a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java
@@ -57,20 +57,49 @@ public class ConnectionId {
}
@Override
- public boolean equals(Object obj) {
- if (obj instanceof ConnectionId) {
- ConnectionId id = (ConnectionId) obj;
- return address.equals(id.address) &&
- ((ticket != null && ticket.equals(id.ticket)) ||
- (ticket == id.ticket)) &&
- this.serviceName == id.serviceName;
- }
- return false;
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((address == null) ? 0 : address.hashCode());
+ result = prime * result + ((serviceName == null) ? 0 : serviceName.hashCode());
+ result = prime * result + ((ticket == null) ? 0 : ticket.hashCode());
+ return result;
}
- @Override // simply use the default Object#hashcode() ?
- public int hashCode() {
- return hashCode(ticket,serviceName,address);
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ ConnectionId other = (ConnectionId) obj;
+ if (address == null) {
+ if (other.address != null) {
+ return false;
+ }
+ } else if (!address.equals(other.address)) {
+ return false;
+ }
+ if (serviceName == null) {
+ if (other.serviceName != null) {
+ return false;
+ }
+ } else if (!serviceName.equals(other.serviceName)) {
+ return false;
+ }
+ if (ticket == null) {
+ if (other.ticket != null) {
+ return false;
+ }
+ } else if (!ticket.equals(other.ticket)) {
+ return false;
+ }
+ return true;
}
public static int hashCode(User ticket, String serviceName, InetSocketAddress address){
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
index 5b8498d..ab7c2a3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
@@ -446,10 +446,9 @@ public final class ResponseConverter {
public static Map<String, Long> getScanMetrics(ScanResponse response) {
Map<String, Long> metricMap = new HashMap<String, Long>();
- if (response == null || !response.hasScanMetrics() || response.getScanMetrics() == null) {
+ if (response == null || !response.hasScanMetrics()) {
return metricMap;
}
-
ScanMetrics metrics = response.getScanMetrics();
int numberOfMetrics = metrics.getMetricsCount();
for (int i = 0; i < numberOfMetrics; i++) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
index 70e4356..7b6b546 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
@@ -67,6 +67,7 @@ public final class QuotaRetriever implements Closeable, Iterable<QuotaSettings>
}
}
+ @Override
public void close() throws IOException {
if (this.table != null) {
this.table.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java
index a4a9720..73a8f30 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class RegionServerRunningException extends IOException {
- private static final long serialVersionUID = 1L << 31 - 1L;
+ private static final long serialVersionUID = (1L << 31) - 1L;
/** Default Constructor */
public RegionServerRunningException() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
index c2999ec..6fefb36 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
@@ -39,14 +39,12 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.hadoop.hbase.replication.ReplicationPeer.PeerState;
-import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.ZKConfig;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.KeeperException.NoNodeException;
/**
* This class provides an implementation of the ReplicationPeers interface using Zookeeper. The
@@ -80,14 +78,12 @@ public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements Re
// Map of peer clusters keyed by their id
private Map<String, ReplicationPeerZKImpl> peerClusters;
private final ReplicationQueuesClient queuesClient;
- private Abortable abortable;
private static final Log LOG = LogFactory.getLog(ReplicationPeersZKImpl.class);
public ReplicationPeersZKImpl(final ZooKeeperWatcher zk, final Configuration conf,
final ReplicationQueuesClient queuesClient, Abortable abortable) {
super(zk, conf, abortable);
- this.abortable = abortable;
this.peerClusters = new ConcurrentHashMap<String, ReplicationPeerZKImpl>();
this.queuesClient = queuesClient;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
index f9f2d43..8e4871d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
@@ -102,6 +102,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements
* Called when a new node has been created.
* @param path full path of the new node
*/
+ @Override
public void nodeCreated(String path) {
refreshListIfRightPath(path);
}
@@ -110,6 +111,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements
* Called when a node has been deleted
* @param path full path of the deleted node
*/
+ @Override
public void nodeDeleted(String path) {
if (stopper.isStopped()) {
return;
@@ -128,6 +130,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements
* Called when an existing node has a child node added or removed.
* @param path full path of the node whose children have changed
*/
+ @Override
public void nodeChildrenChanged(String path) {
if (stopper.isStopped()) {
return;
@@ -159,6 +162,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements
* Called when a node has been deleted
* @param path full path of the deleted node
*/
+ @Override
public void nodeDeleted(String path) {
List<String> peers = refreshPeersList(path);
if (peers == null) {
@@ -177,6 +181,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements
* Called when an existing node has a child node added or removed.
* @param path full path of the node whose children have changed
*/
+ @Override
public void nodeChildrenChanged(String path) {
List<String> peers = refreshPeersList(path);
if (peers == null) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index 54c1701..b26dcac 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hbase.security;
+import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.TreeMap;
@@ -67,15 +68,15 @@ public class SaslUtil {
}
static String encodeIdentifier(byte[] identifier) {
- return new String(Base64.encodeBase64(identifier));
+ return new String(Base64.encodeBase64(identifier), StandardCharsets.UTF_8);
}
static byte[] decodeIdentifier(String identifier) {
- return Base64.decodeBase64(identifier.getBytes());
+ return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8));
}
static char[] encodePassword(byte[] password) {
- return new String(Base64.encodeBase64(password)).toCharArray();
+ return new String(Base64.encodeBase64(password), StandardCharsets.UTF_8).toCharArray();
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
index 3a01ace..c904eef 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
@@ -48,7 +48,7 @@ public class Permission extends VersionedWritable {
public enum Action {
READ('R'), WRITE('W'), EXEC('X'), CREATE('C'), ADMIN('A');
- private byte code;
+ private final byte code;
Action(char code) {
this.code = (byte)code;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
index fd1a9d5..b06acbd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
@@ -132,6 +132,7 @@ public class VisibilityClient {
BlockingRpcCallback<VisibilityLabelsResponse> rpcCallback =
new BlockingRpcCallback<VisibilityLabelsResponse>();
+ @Override
public VisibilityLabelsResponse call(VisibilityLabelsService service)
throws IOException {
VisibilityLabelsRequest.Builder builder = VisibilityLabelsRequest.newBuilder();
@@ -217,6 +218,7 @@ public class VisibilityClient {
BlockingRpcCallback<GetAuthsResponse> rpcCallback =
new BlockingRpcCallback<GetAuthsResponse>();
+ @Override
public GetAuthsResponse call(VisibilityLabelsService service) throws IOException {
GetAuthsRequest.Builder getAuthReqBuilder = GetAuthsRequest.newBuilder();
getAuthReqBuilder.setUser(ByteStringer.wrap(Bytes.toBytes(user)));
@@ -268,6 +270,7 @@ public class VisibilityClient {
BlockingRpcCallback<ListLabelsResponse> rpcCallback =
new BlockingRpcCallback<ListLabelsResponse>();
+ @Override
public ListLabelsResponse call(VisibilityLabelsService service) throws IOException {
ListLabelsRequest.Builder listAuthLabelsReqBuilder = ListLabelsRequest.newBuilder();
if (regex != null) {
@@ -332,6 +335,7 @@ public class VisibilityClient {
BlockingRpcCallback<VisibilityLabelsResponse> rpcCallback =
new BlockingRpcCallback<VisibilityLabelsResponse>();
+ @Override
public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException {
SetAuthsRequest.Builder setAuthReqBuilder = SetAuthsRequest.newBuilder();
setAuthReqBuilder.setUser(ByteStringer.wrap(Bytes.toBytes(user)));
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
index 67aaffd..e474b1e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
@@ -298,7 +298,7 @@ public class PoolMap<K, V> implements Map<K, V> {
* the type of the resource
*/
@SuppressWarnings("serial")
- public class ReusablePool<R> extends ConcurrentLinkedQueue<R> implements Pool<R> {
+ public static class ReusablePool<R> extends ConcurrentLinkedQueue<R> implements Pool<R> {
private int maxSize;
public ReusablePool(int maxSize) {
@@ -342,7 +342,7 @@ public class PoolMap<K, V> implements Map<K, V> {
*
*/
@SuppressWarnings("serial")
- class RoundRobinPool<R> extends CopyOnWriteArrayList<R> implements Pool<R> {
+ static class RoundRobinPool<R> extends CopyOnWriteArrayList<R> implements Pool<R> {
private int maxSize;
private int nextResource = 0;
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java
index 20791de..8075a7a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java
@@ -30,5 +30,6 @@ public class EmptyWatcher implements Watcher {
public static final EmptyWatcher instance = new EmptyWatcher();
private EmptyWatcher() {}
+ @Override
public void process(WatchedEvent event) {}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
index 226796a..6d1772d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
@@ -41,6 +41,7 @@ import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
@@ -159,7 +160,7 @@ public class HQuorumPeer {
}
File myIdFile = new File(dataDir, "myid");
- PrintWriter w = new PrintWriter(myIdFile);
+ PrintWriter w = new PrintWriter(myIdFile, StandardCharsets.UTF_8.name());
w.println(myId);
w.close();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java
index cc0f5f2..62dc17d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java
@@ -43,6 +43,7 @@ import org.apache.zookeeper.KeeperException;
public class ZKLeaderManager extends ZooKeeperListener {
private static final Log LOG = LogFactory.getLog(ZKLeaderManager.class);
+ private final Object lock = new Object();
private final AtomicBoolean leaderExists = new AtomicBoolean();
private String leaderZNode;
private byte[] nodeId;
@@ -85,14 +86,14 @@ public class ZKLeaderManager extends ZooKeeperListener {
private void handleLeaderChange() {
try {
- synchronized(leaderExists) {
+ synchronized(lock) {
if (ZKUtil.watchAndCheckExists(watcher, leaderZNode)) {
LOG.info("Found new leader for znode: "+leaderZNode);
leaderExists.set(true);
} else {
LOG.info("Leader change, but no new leader found");
leaderExists.set(false);
- leaderExists.notifyAll();
+ lock.notifyAll();
}
}
} catch (KeeperException ke) {
@@ -136,10 +137,10 @@ public class ZKLeaderManager extends ZooKeeperListener {
}
// wait for next chance
- synchronized(leaderExists) {
+ synchronized(lock) {
while (leaderExists.get() && !candidate.isStopped()) {
try {
- leaderExists.wait();
+ lock.wait();
} catch (InterruptedException ie) {
LOG.debug("Interrupted waiting on leader", ie);
}
@@ -153,7 +154,7 @@ public class ZKLeaderManager extends ZooKeeperListener {
*/
public void stepDownAsLeader() {
try {
- synchronized(leaderExists) {
+ synchronized(lock) {
if (!leaderExists.get()) {
return;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
index d874768..4f1d87c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
@@ -19,11 +19,14 @@
package org.apache.hadoop.hbase.zookeeper;
import java.io.BufferedReader;
+import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.net.InetSocketAddress;
import java.net.Socket;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Deque;
@@ -1954,9 +1957,11 @@ public class ZKUtil {
socket.connect(sockAddr, timeout);
socket.setSoTimeout(timeout);
- PrintWriter out = new PrintWriter(socket.getOutputStream(), true);
- BufferedReader in = new BufferedReader(new InputStreamReader(
- socket.getInputStream()));
+ PrintWriter out = new PrintWriter(new BufferedWriter(
+ new OutputStreamWriter(socket.getOutputStream(), StandardCharsets.UTF_8)),
+ true);
+ BufferedReader in = new BufferedReader(
+ new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8));
out.println("stat");
out.flush();
ArrayList<String> res = new ArrayList<String>();
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
index 1966253..a4f9260 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
@@ -19,6 +19,9 @@ package org.apache.hadoop.hbase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.io.compress.Compression;
@@ -77,7 +80,8 @@ public class TestHColumnDescriptor {
public void testHColumnDescriptorShouldThrowIAEWhenFamiliyNameEmpty()
throws Exception {
try {
- new HColumnDescriptor("".getBytes());
+ new HColumnDescriptor("".getBytes(StandardCharsets.UTF_8));
+ fail("Did not throw");
} catch (IllegalArgumentException e) {
assertEquals("Family name can not be empty", e.getLocalizedMessage());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
index d126994..23d2946 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
@@ -100,7 +100,7 @@ public class TestHTableDescriptor {
assertEquals(v, deserializedHtd.getMaxFileSize());
assertTrue(deserializedHtd.isReadOnly());
assertEquals(Durability.ASYNC_WAL, deserializedHtd.getDurability());
- assertEquals(deserializedHtd.getRegionReplication(), 2);
+ assertEquals(2, deserializedHtd.getRegionReplication());
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
index 0e0fbb0..b19ba36 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
@@ -62,7 +62,7 @@ public class TestInterfaceAudienceAnnotations {
private static final Log LOG = LogFactory.getLog(TestInterfaceAudienceAnnotations.class);
/** Selects classes with generated in their package name */
- class GeneratedClassFilter implements ClassFinder.ClassFilter {
+ static class GeneratedClassFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class<?> c) {
return c.getPackage().getName().contains("generated");
@@ -181,7 +181,7 @@ public class TestInterfaceAudienceAnnotations {
}
/** Selects classes that are declared public */
- class PublicClassFilter implements ClassFinder.ClassFilter {
+ static class PublicClassFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class<?> c) {
int mod = c.getModifiers();
@@ -190,7 +190,7 @@ public class TestInterfaceAudienceAnnotations {
}
/** Selects paths (jars and class dirs) only from the main code, not test classes */
- class MainCodeResourcePathFilter implements ClassFinder.ResourcePathFilter {
+ static class MainCodeResourcePathFilter implements ClassFinder.ResourcePathFilter {
@Override
public boolean isCandidatePath(String resourcePath, boolean isJar) {
return !resourcePath.contains("test-classes") &&
@@ -207,7 +207,7 @@ public class TestInterfaceAudienceAnnotations {
* - enclosing class is not an interface
* - name starts with "__CLR"
*/
- class CloverInstrumentationFilter implements ClassFinder.ClassFilter {
+ static class CloverInstrumentationFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class<?> clazz) {
boolean clover = false;
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
index 8c0b7df..e0d09a6 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
@@ -24,8 +24,10 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.io.InterruptedIOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
@@ -93,10 +95,10 @@ public class TestAsyncProcess {
private final static Log LOG = LogFactory.getLog(TestAsyncProcess.class);
private static final TableName DUMMY_TABLE =
TableName.valueOf("DUMMY_TABLE");
- private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes();
- private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes();
- private static final byte[] DUMMY_BYTES_3 = "DUMMY_BYTES_3".getBytes();
- private static final byte[] FAILS = "FAILS".getBytes();
+ private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] DUMMY_BYTES_3 = "DUMMY_BYTES_3".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] FAILS = "FAILS".getBytes(StandardCharsets.UTF_8);
private static final Configuration conf = new Configuration();
private static ServerName sn = ServerName.valueOf("s1:1,1");
@@ -353,7 +355,8 @@ public class TestAsyncProcess {
return inc.getAndIncrement();
}
}
- class MyAsyncProcessWithReplicas extends MyAsyncProcess {
+
+ static class MyAsyncProcessWithReplicas extends MyAsyncProcess {
private Set<byte[]> failures = new TreeSet<byte[]>(new Bytes.ByteArrayComparator());
private long primarySleepMs = 0, replicaSleepMs = 0;
private Map<ServerName, Long> customPrimarySleepMs = new HashMap<ServerName, Long>();
@@ -625,7 +628,13 @@ public class TestAsyncProcess {
Random rn = new Random();
final long limit = 10 * 1024 * 1024;
final int requestCount = 1 + (int) (rn.nextDouble() * 3);
- long putsHeapSize = Math.abs(rn.nextLong()) % limit;
+ long n = rn.nextLong();
+ if (n < 0) {
+ n = -n;
+ } else if (n == 0) {
+ n = 1;
+ }
+ long putsHeapSize = n % limit;
long maxHeapSizePerRequest = putsHeapSize / requestCount;
LOG.info("[testSubmitRandomSizeRequest] maxHeapSizePerRequest=" + maxHeapSizePerRequest +
", putsHeapSize=" + putsHeapSize);
@@ -747,7 +756,7 @@ public class TestAsyncProcess {
final AsyncRequestFuture ars = ap.submit(DUMMY_TABLE, puts, false, cb, false);
Assert.assertTrue(puts.isEmpty());
ars.waitUntilDone();
- Assert.assertEquals(updateCalled.get(), 1);
+ Assert.assertEquals(1, updateCalled.get());
}
@Test
@@ -759,12 +768,12 @@ public class TestAsyncProcess {
puts.add(createPut(1, true));
for (int i = 0; i != ap.maxConcurrentTasksPerRegion; ++i) {
- ap.incTaskCounters(Arrays.asList(hri1.getRegionName()), sn);
+ ap.incTaskCounters(Collections.singletonList(hri1.getRegionName()), sn);
}
ap.submit(DUMMY_TABLE, puts, false, null, false);
Assert.assertEquals(puts.size(), 1);
- ap.decTaskCounters(Arrays.asList(hri1.getRegionName()), sn);
+ ap.decTaskCounters(Collections.singletonList(hri1.getRegionName()), sn);
ap.submit(DUMMY_TABLE, puts, false, null, false);
Assert.assertEquals(0, puts.size());
}
@@ -945,7 +954,7 @@ public class TestAsyncProcess {
final AsyncProcess ap = new MyAsyncProcess(createHConnection(), conf, false);
for (int i = 0; i < 1000; i++) {
- ap.incTaskCounters(Arrays.asList("dummy".getBytes()), sn);
+ ap.incTaskCounters(Collections.singletonList("dummy".getBytes(StandardCharsets.UTF_8)), sn);
}
final Thread myThread = Thread.currentThread();
@@ -976,7 +985,7 @@ public class TestAsyncProcess {
public void run() {
Threads.sleep(sleepTime);
while (ap.tasksInProgress.get() > 0) {
- ap.decTaskCounters(Arrays.asList("dummy".getBytes()), sn);
+ ap.decTaskCounters(Collections.singletonList("dummy".getBytes(StandardCharsets.UTF_8)), sn);
}
}
};
@@ -1336,13 +1345,13 @@ public class TestAsyncProcess {
} catch (RetriesExhaustedException expected) {
}
- Assert.assertEquals(res[0], success);
- Assert.assertEquals(res[1], success);
- Assert.assertEquals(res[2], success);
- Assert.assertEquals(res[3], success);
- Assert.assertEquals(res[4], failure);
- Assert.assertEquals(res[5], success);
- Assert.assertEquals(res[6], failure);
+ Assert.assertEquals(success, res[0]);
+ Assert.assertEquals(success, res[1]);
+ Assert.assertEquals(success, res[2]);
+ Assert.assertEquals(success, res[3]);
+ Assert.assertEquals(failure, res[4]);
+ Assert.assertEquals(success, res[5]);
+ Assert.assertEquals(failure, res[6]);
}
@Test
public void testErrorsServers() throws IOException {
@@ -1479,7 +1488,7 @@ public class TestAsyncProcess {
ht.batch(gets, new Object[gets.size()]);
- Assert.assertEquals(ap.nbActions.get(), NB_REGS);
+ Assert.assertEquals(NB_REGS, ap.nbActions.get());
Assert.assertEquals("1 multi response per server", 2, ap.nbMultiResponse.get());
Assert.assertEquals("1 thread per server", 2, con.nbThreads.get());
@@ -1487,7 +1496,7 @@ public class TestAsyncProcess {
for (int i =0; i<NB_REGS; i++){
if (con.usedRegions[i]) nbReg++;
}
- Assert.assertEquals("nbReg=" + nbReg, nbReg, NB_REGS);
+ Assert.assertEquals("nbReg=" + nbReg, NB_REGS, nbReg);
}
@Test
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
index c594d6e..173bd85 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
@@ -125,8 +125,8 @@ public class TestClientExponentialBackoff {
update(stats, 0, 98, 0);
backoffTime = backoff.getBackoffTime(server, regionname, stats);
- assertEquals("We should be using max backoff when at high watermark", backoffTime,
- ExponentialClientBackoffPolicy.DEFAULT_MAX_BACKOFF);
+ assertEquals("We should be using max backoff when at high watermark",
+ ExponentialClientBackoffPolicy.DEFAULT_MAX_BACKOFF, backoffTime);
}
@Test
@@ -149,8 +149,8 @@ public class TestClientExponentialBackoff {
update(stats, 0, 0, 100);
backoffTime = backoff.getBackoffTime(server, regionname, stats);
- assertEquals("under heavy compaction pressure", backoffTime,
- ExponentialClientBackoffPolicy.DEFAULT_MAX_BACKOFF);
+ assertEquals("under heavy compaction pressure",
+ ExponentialClientBackoffPolicy.DEFAULT_MAX_BACKOFF, backoffTime);
}
private void update(ServerStatistics stats, int load) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
index 1a66fbe..3171422 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientScanner.java
@@ -30,6 +30,7 @@ import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.when;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@@ -127,7 +128,8 @@ public class TestClientScanner {
@SuppressWarnings("unchecked")
public void testNoResultsHint() throws IOException {
final Result[] results = new Result[1];
- KeyValue kv1 = new KeyValue("row".getBytes(), "cf".getBytes(), "cq".getBytes(), 1,
+ KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8),
+ "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1,
Type.Maximum);
results[0] = Result.create(new Cell[] {kv1});
@@ -188,7 +190,8 @@ public class TestClientScanner {
@SuppressWarnings("unchecked")
public void testSizeLimit() throws IOException {
final Result[] results = new Result[1];
- KeyValue kv1 = new KeyValue("row".getBytes(), "cf".getBytes(), "cq".getBytes(), 1,
+ KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8),
+ "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1,
Type.Maximum);
results[0] = Result.create(new Cell[] {kv1});
@@ -246,9 +249,14 @@ public class TestClientScanner {
@Test
@SuppressWarnings("unchecked")
public void testCacheLimit() throws IOException {
- KeyValue kv1 = new KeyValue("row1".getBytes(), "cf".getBytes(), "cq".getBytes(), 1,
- Type.Maximum), kv2 = new KeyValue("row2".getBytes(), "cf".getBytes(), "cq".getBytes(), 1,
- Type.Maximum), kv3 = new KeyValue("row3".getBytes(), "cf".getBytes(), "cq".getBytes(), 1,
+ KeyValue kv1 = new KeyValue("row1".getBytes(StandardCharsets.UTF_8),
+ "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1,
+ Type.Maximum),
+ kv2 = new KeyValue("row2".getBytes(StandardCharsets.UTF_8),
+ "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1,
+ Type.Maximum),
+ kv3 = new KeyValue("row3".getBytes(StandardCharsets.UTF_8),
+ "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1,
Type.Maximum);
final Result[] results = new Result[] {Result.create(new Cell[] {kv1}),
Result.create(new Cell[] {kv2}), Result.create(new Cell[] {kv3})};
@@ -322,7 +330,8 @@ public class TestClientScanner {
@SuppressWarnings("unchecked")
public void testNoMoreResults() throws IOException {
final Result[] results = new Result[1];
- KeyValue kv1 = new KeyValue("row".getBytes(), "cf".getBytes(), "cq".getBytes(), 1,
+ KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8),
+ "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1,
Type.Maximum);
results[0] = Result.create(new Cell[] {kv1});
@@ -381,12 +390,14 @@ public class TestClientScanner {
@SuppressWarnings("unchecked")
public void testMoreResults() throws IOException {
final Result[] results1 = new Result[1];
- KeyValue kv1 = new KeyValue("row".getBytes(), "cf".getBytes(), "cq".getBytes(), 1,
+ KeyValue kv1 = new KeyValue("row".getBytes(StandardCharsets.UTF_8),
+ "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1,
Type.Maximum);
results1[0] = Result.create(new Cell[] {kv1});
final Result[] results2 = new Result[1];
- KeyValue kv2 = new KeyValue("row2".getBytes(), "cf".getBytes(), "cq".getBytes(), 1,
+ KeyValue kv2 = new KeyValue("row2".getBytes(StandardCharsets.UTF_8),
+ "cf".getBytes(StandardCharsets.UTF_8), "cq".getBytes(StandardCharsets.UTF_8), 1,
Type.Maximum);
results2[0] = Result.create(new Cell[] {kv2});
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java
index 4348100..4e78555 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDelayingRunner.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.client;
import static org.junit.Assert.*;
+import java.nio.charset.StandardCharsets;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -33,8 +34,8 @@ public class TestDelayingRunner {
private static final TableName DUMMY_TABLE =
TableName.valueOf("DUMMY_TABLE");
- private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes();
- private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes();
+ private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes(StandardCharsets.UTF_8);
private static HRegionInfo hri1 =
new HRegionInfo(DUMMY_TABLE, DUMMY_BYTES_1, DUMMY_BYTES_2, false, 1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
index 1e81f28..3f67247 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
@@ -30,6 +30,7 @@ import org.junit.Test;
import java.io.IOException;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
@@ -92,13 +93,14 @@ public class TestOperation {
private static String COL_NAME_1 = "col1";
private static ColumnPrefixFilter COL_PRE_FILTER =
- new ColumnPrefixFilter(COL_NAME_1.getBytes());
+ new ColumnPrefixFilter(COL_NAME_1.getBytes(StandardCharsets.UTF_8));
private static String STR_COL_PRE_FILTER =
COL_PRE_FILTER.getClass().getSimpleName() + " " + COL_NAME_1;
private static String COL_NAME_2 = "col2";
private static ColumnRangeFilter CR_FILTER = new ColumnRangeFilter(
- COL_NAME_1.getBytes(), true, COL_NAME_2.getBytes(), false);
+ COL_NAME_1.getBytes(StandardCharsets.UTF_8), true,
+ COL_NAME_2.getBytes(StandardCharsets.UTF_8), false);
private static String STR_CR_FILTER = CR_FILTER.getClass().getSimpleName()
+ " [" + COL_NAME_1 + ", " + COL_NAME_2 + ")";
@@ -117,25 +119,28 @@ public class TestOperation {
private static String STOP_ROW_KEY = "stop";
private static InclusiveStopFilter IS_FILTER =
- new InclusiveStopFilter(STOP_ROW_KEY.getBytes());
+ new InclusiveStopFilter(STOP_ROW_KEY.getBytes(StandardCharsets.UTF_8));
private static String STR_IS_FILTER =
IS_FILTER.getClass().getSimpleName() + " " + STOP_ROW_KEY;
private static String PREFIX = "prefix";
private static PrefixFilter PREFIX_FILTER =
- new PrefixFilter(PREFIX.getBytes());
+ new PrefixFilter(PREFIX.getBytes(StandardCharsets.UTF_8));
private static String STR_PREFIX_FILTER = "PrefixFilter " + PREFIX;
private static byte[][] PREFIXES = {
- "0".getBytes(), "1".getBytes(), "2".getBytes()};
+ "0".getBytes(StandardCharsets.UTF_8), "1".getBytes(StandardCharsets.UTF_8),
+ "2".getBytes(StandardCharsets.UTF_8)};
private static MultipleColumnPrefixFilter MCP_FILTER =
new MultipleColumnPrefixFilter(PREFIXES);
private static String STR_MCP_FILTER =
MCP_FILTER.getClass().getSimpleName() + " (3/3): [0, 1, 2]";
private static byte[][] L_PREFIXES = {
- "0".getBytes(), "1".getBytes(), "2".getBytes(), "3".getBytes(),
- "4".getBytes(), "5".getBytes(), "6".getBytes(), "7".getBytes()};
+ "0".getBytes(StandardCharsets.UTF_8), "1".getBytes(StandardCharsets.UTF_8),
+ "2".getBytes(StandardCharsets.UTF_8), "3".getBytes(StandardCharsets.UTF_8),
+ "4".getBytes(StandardCharsets.UTF_8), "5".getBytes(StandardCharsets.UTF_8),
+ "6".getBytes(StandardCharsets.UTF_8), "7".getBytes(StandardCharsets.UTF_8)};
private static MultipleColumnPrefixFilter L_MCP_FILTER =
new MultipleColumnPrefixFilter(L_PREFIXES);
private static String STR_L_MCP_FILTER =
@@ -165,7 +170,7 @@ public class TestOperation {
FIRST_KEY_ONLY_FILTER.getClass().getSimpleName();
private static CompareOp CMP_OP = CompareOp.EQUAL;
- private static byte[] CMP_VALUE = "value".getBytes();
+ private static byte[] CMP_VALUE = "value".getBytes(StandardCharsets.UTF_8);
private static BinaryComparator BC = new BinaryComparator(CMP_VALUE);
private static DependentColumnFilter DC_FILTER =
new DependentColumnFilter(FAMILY, QUALIFIER, true, CMP_OP, BC);
@@ -449,4 +454,3 @@ public class TestOperation {
}
}
-
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
index 6385c27..823e855 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
@@ -68,7 +68,7 @@ public class TestSnapshotFromAdmin {
ignoreExpectedTime += HConstants.RETRY_BACKOFF[i] * pauseTime;
}
// the correct wait time, capping at the maxTime/tries + fudge room
- final long time = pauseTime * 3 + ((maxWaitTime / numRetries) * 3) + 300;
+ final long time = pauseTime * 3L + ((maxWaitTime / numRetries) * 3) + 300;
assertTrue("Capped snapshot wait time isn't less that the uncapped backoff time "
+ "- further testing won't prove anything.", time < ignoreExpectedTime);
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
index 12b3661..f66e47c 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
@@ -32,6 +32,7 @@ import com.google.common.base.Strings;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
@@ -100,8 +101,10 @@ public class TestHBaseSaslRpcClient {
@Test
public void testSaslClientCallbackHandler() throws UnsupportedCallbackException {
final Token<? extends TokenIdentifier> token = createTokenMock();
- when(token.getIdentifier()).thenReturn(DEFAULT_USER_NAME.getBytes());
- when(token.getPassword()).thenReturn(DEFAULT_USER_PASSWORD.getBytes());
+ when(token.getIdentifier())
+ .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8));
+ when(token.getPassword())
+ .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8));
final NameCallback nameCallback = mock(NameCallback.class);
final PasswordCallback passwordCallback = mock(PasswordCallback.class);
@@ -120,8 +123,10 @@ public class TestHBaseSaslRpcClient {
@Test
public void testSaslClientCallbackHandlerWithException() {
final Token<? extends TokenIdentifier> token = createTokenMock();
- when(token.getIdentifier()).thenReturn(DEFAULT_USER_NAME.getBytes());
- when(token.getPassword()).thenReturn(DEFAULT_USER_PASSWORD.getBytes());
+ when(token.getIdentifier())
+ .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8));
+ when(token.getPassword())
+ .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8));
final SaslClientCallbackHandler saslClCallbackHandler = new SaslClientCallbackHandler(token);
try {
saslClCallbackHandler.handle(new Callback[] { mock(TextOutputCallback.class) });
@@ -291,8 +296,10 @@ public class TestHBaseSaslRpcClient {
throws IOException {
Token<? extends TokenIdentifier> token = createTokenMock();
if (!Strings.isNullOrEmpty(principal) && !Strings.isNullOrEmpty(password)) {
- when(token.getIdentifier()).thenReturn(DEFAULT_USER_NAME.getBytes());
- when(token.getPassword()).thenReturn(DEFAULT_USER_PASSWORD.getBytes());
+ when(token.getIdentifier())
+ .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8));
+ when(token.getPassword())
+ .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8));
}
return token;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/3444059a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
index 9990cd1..f5a7a34 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
@@ -23,7 +23,6 @@ import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -51,7 +50,7 @@ public class TestZKUtil {
String node = "/hbase/testUnsecure";
ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false);
List<ACL> aclList = ZKUtil.createACL(watcher, node, false);
- Assert.assertEquals(aclList.size(), 1);
+ Assert.assertEquals(1, aclList.size());
Assert.assertTrue(aclList.contains(Ids.OPEN_ACL_UNSAFE.iterator().next()));
}
@@ -62,7 +61,7 @@ public class TestZKUtil {
String node = "/hbase/testSecuritySingleSuperuser";
ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false);
List<ACL> aclList = ZKUtil.createACL(watcher, node, true);
- Assert.assertEquals(aclList.size(), 2); // 1+1, since ACL will be set for the creator by default
+ Assert.assertEquals(2, aclList.size()); // 1+1, since ACL will be set for the creator by default
Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user1"))));
Assert.assertTrue(aclList.contains(Ids.CREATOR_ALL_ACL.iterator().next()));
}
@@ -74,7 +73,7 @@ public class TestZKUtil {
String node = "/hbase/testCreateACL";
ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false);
List<ACL> aclList = ZKUtil.createACL(watcher, node, true);
- Assert.assertEquals(aclList.size(), 4); // 3+1, since ACL will be set for the creator by default
+ Assert.assertEquals(4, aclList.size()); // 3+1, since ACL will be set for the creator by default
Assert.assertFalse(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "@group1"))));
Assert.assertFalse(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "@group2"))));
Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user1"))));
@@ -90,13 +89,14 @@ public class TestZKUtil {
String node = "/hbase/testCreateACL";
ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false);
List<ACL> aclList = ZKUtil.createACL(watcher, node, true);
- Assert.assertEquals(aclList.size(), 3); // 3, since service user the same as one of superuser
+ Assert.assertEquals(3, aclList.size()); // 3, since service user the same as one of superuser
Assert.assertFalse(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "@group1"))));
Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("auth", ""))));
Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user5"))));
Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user6"))));
}
+ @Test
public void testInterruptedDuringAction()
throws ZooKeeperConnectionException, IOException, KeeperException, InterruptedException {
final RecoverableZooKeeper recoverableZk = Mockito.mock(RecoverableZooKeeper.class);