You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2014/03/20 02:12:18 UTC
svn commit: r1579494 - in
/hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common: ./
src/main/java/org/apache/hadoop/ipc/
src/main/java/org/apache/hadoop/security/
Author: arp
Date: Thu Mar 20 01:12:17 2014
New Revision: 1579494
URL: http://svn.apache.org/r1579494
Log:
HADOOP-10070. Merging r1570777 from branch-2 to branch-2.4
Modified:
hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/CHANGES.txt
hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java
hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
Modified: hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1579494&r1=1579493&r2=1579494&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/CHANGES.txt Thu Mar 20 01:12:17 2014
@@ -106,6 +106,9 @@ Release 2.4.0 - UNRELEASED
HADOOP-10407. Fix the javac warnings in org.apache.hadoop.ipc package.
(szetszwo)
+ HADOOP-10070. RPC client doesn't use per-connection conf to determine
+ server's expected Kerberos principal name. (atm)
+
BREAKDOWN OF HADOOP-10184 SUBTASKS AND RELATED JIRAS
HADOOP-10185. FileSystem API for ACLs. (cnauroth)
Modified: hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1579494&r1=1579493&r2=1579494&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Thu Mar 20 01:12:17 2014
@@ -542,8 +542,11 @@ public class Client {
private synchronized AuthMethod setupSaslConnection(final InputStream in2,
final OutputStream out2) throws IOException {
+ // Do not use Client.conf here! We must use ConnectionId.conf, since the
+ // Client object is cached and shared between all RPC clients, even those
+ // for separate services.
saslRpcClient = new SaslRpcClient(remoteId.getTicket(),
- remoteId.getProtocol(), remoteId.getAddress(), conf);
+ remoteId.getProtocol(), remoteId.getAddress(), remoteId.conf);
return saslRpcClient.saslConnect(in2, out2);
}
@@ -1481,21 +1484,31 @@ public class Client {
private final boolean doPing; //do we need to send ping message
private final int pingInterval; // how often sends ping to the server in msecs
private String saslQop; // here for testing
+ private final Configuration conf; // used to get the expected kerberos principal name
ConnectionId(InetSocketAddress address, Class<?> protocol,
- UserGroupInformation ticket, int rpcTimeout, int maxIdleTime,
- RetryPolicy connectionRetryPolicy, int maxRetriesOnSocketTimeouts,
- boolean tcpNoDelay, boolean doPing, int pingInterval) {
+ UserGroupInformation ticket, int rpcTimeout,
+ RetryPolicy connectionRetryPolicy, Configuration conf) {
this.protocol = protocol;
this.address = address;
this.ticket = ticket;
this.rpcTimeout = rpcTimeout;
- this.maxIdleTime = maxIdleTime;
this.connectionRetryPolicy = connectionRetryPolicy;
- this.maxRetriesOnSocketTimeouts = maxRetriesOnSocketTimeouts;
- this.tcpNoDelay = tcpNoDelay;
- this.doPing = doPing;
- this.pingInterval = pingInterval;
+
+ this.maxIdleTime = conf.getInt(
+ CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY,
+ CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_DEFAULT);
+ this.maxRetriesOnSocketTimeouts = conf.getInt(
+ CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_KEY,
+ CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_DEFAULT);
+ this.tcpNoDelay = conf.getBoolean(
+ CommonConfigurationKeysPublic.IPC_CLIENT_TCPNODELAY_KEY,
+ CommonConfigurationKeysPublic.IPC_CLIENT_TCPNODELAY_DEFAULT);
+ this.doPing = conf.getBoolean(
+ CommonConfigurationKeys.IPC_CLIENT_PING_KEY,
+ CommonConfigurationKeys.IPC_CLIENT_PING_DEFAULT);
+ this.pingInterval = (doPing ? Client.getPingInterval(conf) : 0);
+ this.conf = conf;
}
InetSocketAddress getAddress() {
@@ -1573,19 +1586,8 @@ public class Client {
max, retryInterval, TimeUnit.MILLISECONDS);
}
- boolean doPing =
- conf.getBoolean(CommonConfigurationKeys.IPC_CLIENT_PING_KEY, true);
return new ConnectionId(addr, protocol, ticket, rpcTimeout,
- conf.getInt(CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY,
- CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_DEFAULT),
- connectionRetryPolicy,
- conf.getInt(
- CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_KEY,
- CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_DEFAULT),
- conf.getBoolean(CommonConfigurationKeysPublic.IPC_CLIENT_TCPNODELAY_KEY,
- CommonConfigurationKeysPublic.IPC_CLIENT_TCPNODELAY_DEFAULT),
- doPing,
- (doPing ? Client.getPingInterval(conf) : 0));
+ connectionRetryPolicy, conf);
}
static boolean isEqual(Object a, Object b) {
Modified: hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java?rev=1579494&r1=1579493&r2=1579494&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java (original)
+++ hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java Thu Mar 20 01:12:17 2014
@@ -59,6 +59,9 @@ public class ClientCache {
} else {
client.incCount();
}
+ if (Client.LOG.isDebugEnabled()) {
+ Client.LOG.debug("getting client out of cache: " + client);
+ }
return client;
}
@@ -90,13 +93,23 @@ public class ClientCache {
* A RPC client is closed only when its reference count becomes zero.
*/
public void stopClient(Client client) {
+ if (Client.LOG.isDebugEnabled()) {
+ Client.LOG.debug("stopping client from cache: " + client);
+ }
synchronized (this) {
client.decCount();
if (client.isZeroReference()) {
+ if (Client.LOG.isDebugEnabled()) {
+ Client.LOG.debug("removing client from cache: " + client);
+ }
clients.remove(client.getSocketFactory());
}
}
if (client.isZeroReference()) {
+ if (Client.LOG.isDebugEnabled()) {
+ Client.LOG.debug("stopping actual client because no more references remain: "
+ + client);
+ }
client.stop();
}
}
Modified: hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java?rev=1579494&r1=1579493&r2=1579494&view=diff
==============================================================================
--- hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java (original)
+++ hadoop/common/branches/branch-2.4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java Thu Mar 20 01:12:17 2014
@@ -312,6 +312,10 @@ public class SaslRpcClient {
// check that the server advertised principal matches our conf
String confPrincipal = SecurityUtil.getServerPrincipal(
conf.get(serverKey), serverAddr.getAddress());
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("getting serverKey: " + serverKey + " conf value: " + conf.get(serverKey)
+ + " principal: " + confPrincipal);
+ }
if (confPrincipal == null || confPrincipal.isEmpty()) {
throw new IllegalArgumentException(
"Failed to specify server's Kerberos principal name");