You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2013/07/22 09:43:03 UTC
svn commit: r1505615 - in
/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common:
./ src/main/java/ src/main/java/org/apache/hadoop/io/retry/
src/main/java/org/apache/hadoop/ipc/ src/test/core/
src/test/java/org/apache/hadoop/ipc/
Author: szetszwo
Date: Mon Jul 22 07:43:02 2013
New Revision: 1505615
URL: http://svn.apache.org/r1505615
Log:
svn merge -c 1505610 from trunk for HADOOP-9754. Remove unnecessary "throws IOException/InterruptedException", and fix generic and other javac warnings.
Modified:
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/ (props changed)
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed)
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/core/ (props changed)
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java
Propchange: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common:r1505610
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt Mon Jul 22 07:43:02 2013
@@ -178,6 +178,9 @@ Release 2.1.0-beta - 2013-07-02
HADOOP-9751. Add clientId and retryCount to RpcResponseHeaderProto.
(szetszwo)
+ HADOOP-9754. Remove unnecessary "throws IOException/InterruptedException",
+ and fix generic and other javac warnings. (szetszwo)
+
OPTIMIZATIONS
HADOOP-9150. Avoid unnecessary DNS resolution attempts for logical URIs
Propchange: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1505610
Propchange: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1505610
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java Mon Jul 22 07:43:02 2013
@@ -36,10 +36,10 @@ public class RetryProxy {
* @param retryPolicy the policy for retrying method call failures
* @return the retry proxy
*/
- public static Object create(Class<?> iface, Object implementation,
+ public static <T> Object create(Class<T> iface, T implementation,
RetryPolicy retryPolicy) {
return RetryProxy.create(iface,
- new DefaultFailoverProxyProvider(iface, implementation),
+ new DefaultFailoverProxyProvider<T>(iface, implementation),
retryPolicy);
}
@@ -53,8 +53,8 @@ public class RetryProxy {
* @param retryPolicy the policy for retrying or failing over method call failures
* @return the retry proxy
*/
- public static Object create(Class<?> iface, FailoverProxyProvider proxyProvider,
- RetryPolicy retryPolicy) {
+ public static <T> Object create(Class<T> iface,
+ FailoverProxyProvider<T> proxyProvider, RetryPolicy retryPolicy) {
return Proxy.newProxyInstance(
proxyProvider.getInterface().getClassLoader(),
new Class<?>[] { iface },
@@ -73,10 +73,10 @@ public class RetryProxy {
* @param methodNameToPolicyMap a map of method names to retry policies
* @return the retry proxy
*/
- public static Object create(Class<?> iface, Object implementation,
+ public static <T> Object create(Class<T> iface, T implementation,
Map<String,RetryPolicy> methodNameToPolicyMap) {
return create(iface,
- new DefaultFailoverProxyProvider(iface, implementation),
+ new DefaultFailoverProxyProvider<T>(iface, implementation),
methodNameToPolicyMap,
RetryPolicies.TRY_ONCE_THEN_FAIL);
}
@@ -92,7 +92,8 @@ public class RetryProxy {
* @param methodNameToPolicyMapa map of method names to retry policies
* @return the retry proxy
*/
- public static Object create(Class<?> iface, FailoverProxyProvider proxyProvider,
+ public static <T> Object create(Class<T> iface,
+ FailoverProxyProvider<T> proxyProvider,
Map<String,RetryPolicy> methodNameToPolicyMap,
RetryPolicy defaultPolicy) {
return Proxy.newProxyInstance(
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Mon Jul 22 07:43:02 2013
@@ -401,7 +401,7 @@ public class Client {
if (LOG.isDebugEnabled())
LOG.debug("Use " + authMethod + " authentication for protocol "
- + protocol.getSimpleName());
+ + (protocol == null? null: protocol.getSimpleName()));
this.setName("IPC Client (" + socketFactory.hashCode() +") connection to " +
server.toString() +
@@ -652,7 +652,7 @@ public class Client {
* a header to the server and starts
* the connection thread that waits for responses.
*/
- private synchronized void setupIOstreams() throws InterruptedException {
+ private synchronized void setupIOstreams() {
if (socket != null || shouldCloseConnection.get()) {
return;
}
@@ -1203,7 +1203,7 @@ public class Client {
* for RPC_BUILTIN
*/
public Writable call(Writable param, InetSocketAddress address)
- throws InterruptedException, IOException {
+ throws IOException {
return call(RPC.RpcKind.RPC_BUILTIN, param, address);
}
@@ -1215,7 +1215,7 @@ public class Client {
*/
@Deprecated
public Writable call(RPC.RpcKind rpcKind, Writable param, InetSocketAddress address)
- throws InterruptedException, IOException {
+ throws IOException {
return call(rpcKind, param, address, null);
}
@@ -1229,8 +1229,7 @@ public class Client {
*/
@Deprecated
public Writable call(RPC.RpcKind rpcKind, Writable param, InetSocketAddress addr,
- UserGroupInformation ticket)
- throws InterruptedException, IOException {
+ UserGroupInformation ticket) throws IOException {
ConnectionId remoteId = ConnectionId.getConnectionId(addr, null, ticket, 0,
conf);
return call(rpcKind, param, remoteId);
@@ -1248,8 +1247,7 @@ public class Client {
@Deprecated
public Writable call(RPC.RpcKind rpcKind, Writable param, InetSocketAddress addr,
Class<?> protocol, UserGroupInformation ticket,
- int rpcTimeout)
- throws InterruptedException, IOException {
+ int rpcTimeout) throws IOException {
ConnectionId remoteId = ConnectionId.getConnectionId(addr, protocol,
ticket, rpcTimeout, conf);
return call(rpcKind, param, remoteId);
@@ -1263,8 +1261,7 @@ public class Client {
*/
public Writable call(Writable param, InetSocketAddress addr,
Class<?> protocol, UserGroupInformation ticket,
- int rpcTimeout, Configuration conf)
- throws InterruptedException, IOException {
+ int rpcTimeout, Configuration conf) throws IOException {
ConnectionId remoteId = ConnectionId.getConnectionId(addr, protocol,
ticket, rpcTimeout, conf);
return call(RPC.RpcKind.RPC_BUILTIN, param, remoteId);
@@ -1278,7 +1275,7 @@ public class Client {
public Writable call(Writable param, InetSocketAddress addr,
Class<?> protocol, UserGroupInformation ticket,
int rpcTimeout, int serviceClass, Configuration conf)
- throws InterruptedException, IOException {
+ throws IOException {
ConnectionId remoteId = ConnectionId.getConnectionId(addr, protocol,
ticket, rpcTimeout, conf);
return call(RPC.RpcKind.RPC_BUILTIN, param, remoteId, serviceClass);
@@ -1294,8 +1291,7 @@ public class Client {
*/
public Writable call(RPC.RpcKind rpcKind, Writable param, InetSocketAddress addr,
Class<?> protocol, UserGroupInformation ticket,
- int rpcTimeout, Configuration conf)
- throws InterruptedException, IOException {
+ int rpcTimeout, Configuration conf) throws IOException {
ConnectionId remoteId = ConnectionId.getConnectionId(addr, protocol,
ticket, rpcTimeout, conf);
return call(rpcKind, param, remoteId);
@@ -1305,8 +1301,8 @@ public class Client {
* Same as {link {@link #call(RPC.RpcKind, Writable, ConnectionId)}
* except the rpcKind is RPC_BUILTIN
*/
- public Writable call(Writable param, ConnectionId remoteId)
- throws InterruptedException, IOException {
+ public Writable call(Writable param, ConnectionId remoteId)
+ throws IOException {
return call(RPC.RpcKind.RPC_BUILTIN, param, remoteId);
}
@@ -1322,7 +1318,7 @@ public class Client {
* threw an exception.
*/
public Writable call(RPC.RpcKind rpcKind, Writable rpcRequest,
- ConnectionId remoteId) throws InterruptedException, IOException {
+ ConnectionId remoteId) throws IOException {
return call(rpcKind, rpcRequest, remoteId, RPC.RPC_SERVICE_CLASS_DEFAULT);
}
@@ -1339,8 +1335,7 @@ public class Client {
* threw an exception.
*/
public Writable call(RPC.RpcKind rpcKind, Writable rpcRequest,
- ConnectionId remoteId, int serviceClass)
- throws InterruptedException, IOException {
+ ConnectionId remoteId, int serviceClass) throws IOException {
final Call call = createCall(rpcKind, rpcRequest);
Connection connection = getConnection(remoteId, call, serviceClass);
try {
@@ -1399,8 +1394,7 @@ public class Client {
/** Get a connection from the pool, or create a new one and add it to the
* pool. Connections to a given ConnectionId are reused. */
private Connection getConnection(ConnectionId remoteId,
- Call call, int serviceClass)
- throws IOException, InterruptedException {
+ Call call, int serviceClass) throws IOException {
if (!running.get()) {
// the client is stopped
throw new IOException("The client is stopped");
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java Mon Jul 22 07:43:02 2013
@@ -192,7 +192,6 @@ public class ProtobufRpcEngine implement
}
RequestHeaderProto rpcRequestHeader = constructRpcRequestHeader(method);
- RpcResponseWrapper val = null;
if (LOG.isTraceEnabled()) {
LOG.trace(Thread.currentThread().getId() + ": Call -> " +
@@ -202,6 +201,7 @@ public class ProtobufRpcEngine implement
Message theRequest = (Message) args[1];
+ final RpcResponseWrapper val;
try {
val = (RpcResponseWrapper) client.call(RPC.RpcKind.RPC_PROTOCOL_BUFFER,
new RpcRequestWrapper(rpcRequestHeader, theRequest), remoteId);
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java Mon Jul 22 07:43:02 2013
@@ -913,7 +913,7 @@ public class RPC {
// Register protocol and its impl for rpc calls
void registerProtocolAndImpl(RpcKind rpcKind, Class<?> protocolClass,
- Object protocolImpl) throws IOException {
+ Object protocolImpl) {
String protocolName = RPC.getProtocolName(protocolClass);
long version;
@@ -943,8 +943,6 @@ public class RPC {
}
}
-
- @SuppressWarnings("unused") // will be useful later.
VerProtocolImpl[] getSupportedProtocolVersions(RPC.RpcKind rpcKind,
String protocolName) {
VerProtocolImpl[] resultk =
@@ -999,8 +997,7 @@ public class RPC {
initProtocolMetaInfo(conf);
}
- private void initProtocolMetaInfo(Configuration conf)
- throws IOException {
+ private void initProtocolMetaInfo(Configuration conf) {
RPC.setProtocolEngine(conf, ProtocolMetaInfoPB.class,
ProtobufRpcEngine.class);
ProtocolMetaInfoServerSideTranslatorPB xlator =
@@ -1018,7 +1015,7 @@ public class RPC {
* @return the server (for convenience)
*/
public Server addProtocol(RpcKind rpcKind, Class<?> protocolClass,
- Object protocolImpl) throws IOException {
+ Object protocolImpl) {
registerProtocolAndImpl(rpcKind, protocolClass, protocolImpl);
return this;
}
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Mon Jul 22 07:43:02 2013
@@ -909,11 +909,7 @@ public abstract class Server {
}
for(Call call : calls) {
- try {
- doPurge(call, now);
- } catch (IOException e) {
- LOG.warn("Error in purging old calls " + e);
- }
+ doPurge(call, now);
}
} catch (OutOfMemoryError e) {
//
@@ -958,7 +954,7 @@ public abstract class Server {
// Remove calls that have been pending in the responseQueue
// for a long time.
//
- private void doPurge(Call call, long now) throws IOException {
+ private void doPurge(Call call, long now) {
LinkedList<Call> responseQueue = call.connection.responseQueue;
synchronized (responseQueue) {
Iterator<Call> iter = responseQueue.listIterator(0);
@@ -1514,7 +1510,7 @@ public abstract class Server {
}
private AuthProtocol initializeAuthContext(int authType)
- throws IOException, InterruptedException {
+ throws IOException {
AuthProtocol authProtocol = AuthProtocol.valueOf(authType);
if (authProtocol == null) {
IOException ioe = new IpcException("Unknown auth protocol:" + authType);
@@ -1986,7 +1982,7 @@ public abstract class Server {
this.serviceClass = serviceClass;
}
- private synchronized void close() throws IOException {
+ private synchronized void close() {
disposeSasl();
data = null;
dataLengthBuffer = null;
@@ -2262,10 +2258,7 @@ public abstract class Server {
if (connectionList.remove(connection))
numConnections--;
}
- try {
- connection.close();
- } catch (IOException e) {
- }
+ connection.close();
}
/**
Propchange: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1505610
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java Mon Jul 22 07:43:02 2013
@@ -189,7 +189,7 @@ public class MiniRPCBenchmark {
MiniProtocol client = null;
try {
long start = Time.now();
- client = (MiniProtocol) RPC.getProxy(MiniProtocol.class,
+ client = RPC.getProxy(MiniProtocol.class,
MiniProtocol.versionID, addr, conf);
long end = Time.now();
return end - start;
@@ -211,7 +211,7 @@ public class MiniRPCBenchmark {
client = proxyUserUgi.doAs(new PrivilegedExceptionAction<MiniProtocol>() {
@Override
public MiniProtocol run() throws IOException {
- MiniProtocol p = (MiniProtocol) RPC.getProxy(MiniProtocol.class,
+ MiniProtocol p = RPC.getProxy(MiniProtocol.class,
MiniProtocol.versionID, addr, conf);
Token<TestDelegationTokenIdentifier> token;
token = p.getDelegationToken(new Text(RENEWER));
@@ -239,7 +239,7 @@ public class MiniRPCBenchmark {
client = currentUgi.doAs(new PrivilegedExceptionAction<MiniProtocol>() {
@Override
public MiniProtocol run() throws IOException {
- return (MiniProtocol) RPC.getProxy(MiniProtocol.class,
+ return RPC.getProxy(MiniProtocol.class,
MiniProtocol.versionID, addr, conf);
}
});
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java Mon Jul 22 07:43:02 2013
@@ -31,7 +31,6 @@ import org.apache.commons.cli.HelpFormat
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.ipc.RPC.Server;
@@ -55,7 +54,7 @@ import com.google.protobuf.BlockingServi
* Benchmark for protobuf RPC.
* Run with --help option for usage.
*/
-public class RPCCallBenchmark implements Tool, Configurable {
+public class RPCCallBenchmark implements Tool {
private Configuration conf;
private AtomicLong callCount = new AtomicLong(0);
private static ThreadMXBean threadBean =
@@ -403,7 +402,7 @@ public class RPCCallBenchmark implements
}
};
} else if (opts.rpcEngine == WritableRpcEngine.class) {
- final TestProtocol proxy = (TestProtocol)RPC.getProxy(
+ final TestProtocol proxy = RPC.getProxy(
TestProtocol.class, TestProtocol.versionID, addr, conf);
return new RpcServiceWrapper() {
@Override
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java Mon Jul 22 07:43:02 2013
@@ -902,7 +902,7 @@ public class TestIPC {
}
private void assertRetriesOnSocketTimeouts(Configuration conf,
- int maxTimeoutRetries) throws IOException, InterruptedException {
+ int maxTimeoutRetries) throws IOException {
SocketFactory mockFactory = Mockito.mock(SocketFactory.class);
doThrow(new ConnectTimeoutException("fake")).when(mockFactory).createSocket();
Client client = new Client(IntWritable.class, conf, mockFactory);
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java Mon Jul 22 07:43:02 2013
@@ -64,7 +64,7 @@ public class TestMultipleProtocolServer
public static final long versionID = 0L;
void hello() throws IOException;
}
- interface Bar extends Mixin, VersionedProtocol {
+ interface Bar extends Mixin {
public static final long versionID = 0L;
int echo(int i) throws IOException;
}
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java Mon Jul 22 07:43:02 2013
@@ -57,7 +57,7 @@ public class TestRPCCompatibility {
void ping() throws IOException;
}
- public interface TestProtocol1 extends VersionedProtocol, TestProtocol0 {
+ public interface TestProtocol1 extends TestProtocol0 {
String echo(String value) throws IOException;
}
@@ -123,7 +123,7 @@ public class TestRPCCompatibility {
}
@After
- public void tearDown() throws IOException {
+ public void tearDown() {
if (proxy != null) {
RPC.stopProxy(proxy.getProxy());
proxy = null;
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java Mon Jul 22 07:43:02 2013
@@ -334,7 +334,7 @@ public class TestSaslRPC {
TestSaslProtocol proxy = null;
try {
- proxy = (TestSaslProtocol) RPC.getProxy(TestSaslProtocol.class,
+ proxy = RPC.getProxy(TestSaslProtocol.class,
TestSaslProtocol.versionID, addr, conf);
//QOP must be auth
Assert.assertEquals(SaslRpcServer.SASL_PROPS.get(Sasl.QOP), "auth");
@@ -415,20 +415,20 @@ public class TestSaslRPC {
TestSaslProtocol proxy2 = null;
TestSaslProtocol proxy3 = null;
try {
- proxy1 = (TestSaslProtocol) RPC.getProxy(TestSaslProtocol.class,
+ proxy1 = RPC.getProxy(TestSaslProtocol.class,
TestSaslProtocol.versionID, addr, newConf);
proxy1.getAuthMethod();
Client client = WritableRpcEngine.getClient(conf);
Set<ConnectionId> conns = client.getConnectionIds();
assertEquals("number of connections in cache is wrong", 1, conns.size());
// same conf, connection should be re-used
- proxy2 = (TestSaslProtocol) RPC.getProxy(TestSaslProtocol.class,
+ proxy2 = RPC.getProxy(TestSaslProtocol.class,
TestSaslProtocol.versionID, addr, newConf);
proxy2.getAuthMethod();
assertEquals("number of connections in cache is wrong", 1, conns.size());
// different conf, new connection should be set up
newConf.set(SERVER_PRINCIPAL_KEY, SERVER_PRINCIPAL_2);
- proxy3 = (TestSaslProtocol) RPC.getProxy(TestSaslProtocol.class,
+ proxy3 = RPC.getProxy(TestSaslProtocol.class,
TestSaslProtocol.versionID, addr, newConf);
proxy3.getAuthMethod();
ConnectionId[] connsArray = conns.toArray(new ConnectionId[0]);
@@ -468,7 +468,7 @@ public class TestSaslRPC {
InetSocketAddress addr = NetUtils.getConnectAddress(server);
try {
- proxy = (TestSaslProtocol) RPC.getProxy(TestSaslProtocol.class,
+ proxy = RPC.getProxy(TestSaslProtocol.class,
TestSaslProtocol.versionID, addr, newConf);
proxy.ping();
} finally {
@@ -488,7 +488,7 @@ public class TestSaslRPC {
}
@Test
- public void testSaslPlainServerBadPassword() throws IOException {
+ public void testSaslPlainServerBadPassword() {
SaslException e = null;
try {
runNegotiation(
@@ -824,7 +824,7 @@ public class TestSaslRPC {
public String run() throws IOException {
TestSaslProtocol proxy = null;
try {
- proxy = (TestSaslProtocol) RPC.getProxy(TestSaslProtocol.class,
+ proxy = RPC.getProxy(TestSaslProtocol.class,
TestSaslProtocol.versionID, addr, clientConf);
proxy.ping();
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java?rev=1505615&r1=1505614&r2=1505615&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java (original)
+++ hadoop/common/branches/branch-2.1-beta/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java Mon Jul 22 07:43:02 2013
@@ -118,7 +118,7 @@ public class TestServer {
}
@Test
- public void testExceptionsHandler() throws IOException {
+ public void testExceptionsHandler() {
Server.ExceptionsHandler handler = new Server.ExceptionsHandler();
handler.addTerseExceptions(IOException.class);
handler.addTerseExceptions(RpcServerException.class, IpcException.class);