You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cn...@apache.org on 2013/12/20 02:01:22 UTC
svn commit: r1552467 - in
/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common: ./
src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/crypto/
src/main/java/org/apache/hadoop/crypto/key/
src/main/java/org/apache/hadoop/io/ret...
Author: cnauroth
Date: Fri Dec 20 01:01:18 2013
New Revision: 1552467
URL: http://svn.apache.org/r1552467
Log:
Merge trunk to HDFS-4685.
Added:
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/
- copied from r1552465, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/
- copied from r1552465, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
- copied unchanged from r1552465, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
- copied unchanged from r1552465, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java
- copied unchanged from r1552465, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/UserProvider.java
- copied unchanged from r1552465, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/UserProvider.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory
- copied unchanged from r1552465, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.crypto.key.KeyProviderFactory
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/
- copied from r1552465, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/
- copied from r1552465, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java
- copied unchanged from r1552465, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
- copied unchanged from r1552465, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
Modified:
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed)
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/pom.xml
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/docs/ (props changed)
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/core/ (props changed)
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1552467&r1=1552466&r2=1552467&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt Fri Dec 20 01:01:18 2013
@@ -105,6 +105,9 @@ Trunk (Unreleased)
HADOOP-9833 move slf4j to version 1.7.5 (Kousuke Saruta via stevel)
+ HADOOP-10141. Create KeyProvider API to separate encryption key storage
+ from the applications. (omalley)
+
BUG FIXES
HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -282,9 +285,6 @@ Trunk (Unreleased)
HADOOP-10044 Improve the javadoc of rpc code (sanjay Radia)
- HADOOP-8753. LocalDirAllocator throws "ArithmeticException: / by zero" when
- there is no available space on configured local dir. (Benoy Antony via hitesh)
-
OPTIMIZATIONS
HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -400,12 +400,18 @@ Release 2.4.0 - UNRELEASED
HADOOP-10102. Update commons IO from 2.1 to 2.4 (Akira Ajisaka via stevel)
+ HADOOP-10168. fix javadoc of ReflectionUtils#copy. (Thejas Nair via suresh)
+
+ HADOOP-10164. Allow UGI to login with a known Subject (bobby)
+
OPTIMIZATIONS
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
- HADOOP-10047. Add a direct-buffer based apis for compression. (Gopal V
- via acmurthy)
+ HADOOP-10047. Add a direct-buffer based apis for compression. (Gopal V
+ via acmurthy)
+
+ HADOOP-10172. Cache SASL server factories (daryn)
BUG FIXES
@@ -470,6 +476,16 @@ Release 2.4.0 - UNRELEASED
HADOOP-10058. TestMetricsSystemImpl#testInitFirstVerifyStopInvokedImmediately
fails on trunk (Chen He via jeagles)
+ HADOOP-8753. LocalDirAllocator throws "ArithmeticException: / by zero" when
+ there is no available space on configured local dir. (Benoy Antony via hitesh)
+
+ HADOOP-10106. Incorrect thread name in RPC log messages. (Ming Ma via jing9)
+
+ HADOOP-9611 mvn-rpmbuild against google-guice > 3.0 yields missing cglib
+ dependency (Timothy St. Clair via stevel)
+
+ HADOOP-10171. TestRPC fails intermittently on jkd7 (Mit Desai via jeagles)
+
Release 2.3.0 - UNRELEASED
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt:r1551915
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1551332-1552465
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/pom.xml?rev=1552467&r1=1552466&r2=1552467&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/pom.xml Fri Dec 20 01:01:18 2013
@@ -210,6 +210,10 @@
<scope>compile</scope>
</dependency>
<dependency>
+ <groupId>com.google.code.gson</groupId>
+ <artifactId>gson</artifactId>
+ </dependency>
+ <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<scope>compile</scope>
Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1551332-1552465
Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1551332-1552465
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java?rev=1552467&r1=1552466&r2=1552467&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java Fri Dec 20 01:01:18 2013
@@ -68,7 +68,14 @@ public class RetryPolicies {
* </p>
*/
public static final RetryPolicy RETRY_FOREVER = new RetryForever();
-
+
+ /**
+ * <p>
+ * Keep failing over forever
+ * </p>
+ */
+ public static final RetryPolicy FAILOVER_FOREVER = new FailoverForever();
+
/**
* <p>
* Keep trying a limited number of times, waiting a fixed time between attempts,
@@ -166,6 +173,14 @@ public class RetryPolicies {
return RetryAction.RETRY;
}
}
+
+ static class FailoverForever implements RetryPolicy {
+ @Override
+ public RetryAction shouldRetry(Exception e, int retries, int failovers,
+ boolean isIdempotentOrAtMostOnce) throws Exception {
+ return RetryAction.FAILOVER_AND_RETRY;
+ }
+ }
/**
* Retry up to maxRetries.
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1552467&r1=1552466&r2=1552467&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Fri Dec 20 01:01:18 2013
@@ -551,14 +551,14 @@ public abstract class Server {
@Override
public void run() {
- LOG.info("Starting " + getName());
+ LOG.info("Starting " + Thread.currentThread().getName());
try {
doRunLoop();
} finally {
try {
readSelector.close();
} catch (IOException ioe) {
- LOG.error("Error closing read selector in " + this.getName(), ioe);
+ LOG.error("Error closing read selector in " + Thread.currentThread().getName(), ioe);
}
}
}
@@ -589,7 +589,7 @@ public abstract class Server {
}
} catch (InterruptedException e) {
if (running) { // unexpected -- log it
- LOG.info(getName() + " unexpectedly interrupted", e);
+ LOG.info(Thread.currentThread().getName() + " unexpectedly interrupted", e);
}
} catch (IOException ex) {
LOG.error("Error in Reader", ex);
@@ -620,7 +620,7 @@ public abstract class Server {
@Override
public void run() {
- LOG.info(getName() + ": starting");
+ LOG.info(Thread.currentThread().getName() + ": starting");
SERVER.set(Server.this);
connectionManager.startIdleScan();
while (running) {
@@ -652,7 +652,7 @@ public abstract class Server {
closeCurrentConnection(key, e);
}
}
- LOG.info("Stopping " + this.getName());
+ LOG.info("Stopping " + Thread.currentThread().getName());
synchronized (this) {
try {
@@ -710,14 +710,14 @@ public abstract class Server {
try {
count = c.readAndProcess();
} catch (InterruptedException ieo) {
- LOG.info(getName() + ": readAndProcess caught InterruptedException", ieo);
+ LOG.info(Thread.currentThread().getName() + ": readAndProcess caught InterruptedException", ieo);
throw ieo;
} catch (Exception e) {
// a WrappedRpcServerException is an exception that has been sent
// to the client, so the stacktrace is unnecessary; any other
// exceptions are unexpected internal server errors and thus the
// stacktrace should be logged
- LOG.info(getName() + ": readAndProcess from client " +
+ LOG.info(Thread.currentThread().getName() + ": readAndProcess from client " +
c.getHostAddress() + " threw exception [" + e + "]",
(e instanceof WrappedRpcServerException) ? null : e);
count = -1; //so that the (count < 0) block is executed
@@ -740,7 +740,7 @@ public abstract class Server {
try {
acceptChannel.socket().close();
} catch (IOException e) {
- LOG.info(getName() + ":Exception in closing listener socket. " + e);
+ LOG.info(Thread.currentThread().getName() + ":Exception in closing listener socket. " + e);
}
}
for (Reader r : readers) {
@@ -773,16 +773,16 @@ public abstract class Server {
@Override
public void run() {
- LOG.info(getName() + ": starting");
+ LOG.info(Thread.currentThread().getName() + ": starting");
SERVER.set(Server.this);
try {
doRunLoop();
} finally {
- LOG.info("Stopping " + this.getName());
+ LOG.info("Stopping " + Thread.currentThread().getName());
try {
writeSelector.close();
} catch (IOException ioe) {
- LOG.error("Couldn't close write selector in " + this.getName(), ioe);
+ LOG.error("Couldn't close write selector in " + Thread.currentThread().getName(), ioe);
}
}
}
@@ -803,7 +803,7 @@ public abstract class Server {
doAsyncWrite(key);
}
} catch (IOException e) {
- LOG.info(getName() + ": doAsyncWrite threw exception " + e);
+ LOG.info(Thread.currentThread().getName() + ": doAsyncWrite threw exception " + e);
}
}
long now = Time.now();
@@ -918,7 +918,7 @@ public abstract class Server {
call = responseQueue.removeFirst();
SocketChannel channel = call.connection.channel;
if (LOG.isDebugEnabled()) {
- LOG.debug(getName() + ": responding to " + call);
+ LOG.debug(Thread.currentThread().getName() + ": responding to " + call);
}
//
// Send as much data as we can in the non-blocking fashion
@@ -937,7 +937,7 @@ public abstract class Server {
done = false; // more calls pending to be sent.
}
if (LOG.isDebugEnabled()) {
- LOG.debug(getName() + ": responding to " + call
+ LOG.debug(Thread.currentThread().getName() + ": responding to " + call
+ " Wrote " + numBytes + " bytes.");
}
} else {
@@ -965,7 +965,7 @@ public abstract class Server {
}
}
if (LOG.isDebugEnabled()) {
- LOG.debug(getName() + ": responding to " + call
+ LOG.debug(Thread.currentThread().getName() + ": responding to " + call
+ " Wrote partial " + numBytes + " bytes.");
}
}
@@ -973,7 +973,7 @@ public abstract class Server {
}
} finally {
if (error && call != null) {
- LOG.warn(getName()+", call " + call + ": output error");
+ LOG.warn(Thread.currentThread().getName()+", call " + call + ": output error");
done = true; // error. no more data for this channel.
closeConnection(call.connection);
}
@@ -2011,7 +2011,7 @@ public abstract class Server {
@Override
public void run() {
- LOG.debug(getName() + ": starting");
+ LOG.debug(Thread.currentThread().getName() + ": starting");
SERVER.set(Server.this);
ByteArrayOutputStream buf =
new ByteArrayOutputStream(INITIAL_RESP_BUF_SIZE);
@@ -2019,7 +2019,7 @@ public abstract class Server {
try {
final Call call = callQueue.take(); // pop the queue; maybe blocked here
if (LOG.isDebugEnabled()) {
- LOG.debug(getName() + ": " + call + " for RpcKind " + call.rpcKind);
+ LOG.debug(Thread.currentThread().getName() + ": " + call + " for RpcKind " + call.rpcKind);
}
String errorClass = null;
String error = null;
@@ -2052,7 +2052,7 @@ public abstract class Server {
if (e instanceof UndeclaredThrowableException) {
e = e.getCause();
}
- String logMsg = getName() + ", call " + call + ": error: " + e;
+ String logMsg = Thread.currentThread().getName() + ", call " + call + ": error: " + e;
if (e instanceof RuntimeException || e instanceof Error) {
// These exception types indicate something is probably wrong
// on the server side, as opposed to just a normal exceptional
@@ -2101,13 +2101,13 @@ public abstract class Server {
}
} catch (InterruptedException e) {
if (running) { // unexpected -- log it
- LOG.info(getName() + " unexpectedly interrupted", e);
+ LOG.info(Thread.currentThread().getName() + " unexpectedly interrupted", e);
}
} catch (Exception e) {
- LOG.info(getName() + " caught an exception", e);
+ LOG.info(Thread.currentThread().getName() + " caught an exception", e);
}
}
- LOG.debug(getName() + ": exiting");
+ LOG.debug(Thread.currentThread().getName() + ": exiting");
}
}
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java?rev=1552467&r1=1552466&r2=1552467&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java Fri Dec 20 01:01:18 2013
@@ -133,7 +133,15 @@ public class Credentials implements Writ
public void addSecretKey(Text alias, byte[] key) {
secretKeysMap.put(alias, key);
}
-
+
+ /**
+ * Remove the key for a given alias.
+ * @param alias the alias for the key
+ */
+ public void removeSecretKey(Text alias) {
+ secretKeysMap.remove(alias);
+ }
+
/**
* Convenience method for reading a token storage file, and loading the Tokens
* therein in the passed UGI
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1552467&r1=1552466&r2=1552467&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java Fri Dec 20 01:01:18 2013
@@ -25,6 +25,10 @@ import java.io.DataOutput;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.security.Security;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.TreeMap;
@@ -38,6 +42,7 @@ import javax.security.sasl.RealmCallback
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
+import javax.security.sasl.SaslServerFactory;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
@@ -63,6 +68,7 @@ public class SaslRpcServer {
public static final String SASL_DEFAULT_REALM = "default";
public static final Map<String, String> SASL_PROPS =
new TreeMap<String, String>();
+ private static SaslServerFactory saslFactory;
public static enum QualityOfProtection {
AUTHENTICATION("auth"),
@@ -151,7 +157,7 @@ public class SaslRpcServer {
new PrivilegedExceptionAction<SaslServer>() {
@Override
public SaslServer run() throws SaslException {
- return Sasl.createSaslServer(mechanism, protocol, serverId,
+ return saslFactory.createSaslServer(mechanism, protocol, serverId,
SaslRpcServer.SASL_PROPS, callback);
}
});
@@ -180,6 +186,7 @@ public class SaslRpcServer {
SASL_PROPS.put(Sasl.QOP, saslQOP.getSaslQop());
SASL_PROPS.put(Sasl.SERVER_AUTH, "true");
Security.addProvider(new SaslPlainServer.SecurityProvider());
+ saslFactory = new FastSaslServerFactory(SASL_PROPS);
}
static String encodeIdentifier(byte[] identifier) {
@@ -363,4 +370,47 @@ public class SaslRpcServer {
}
}
}
+
+ // Sasl.createSaslServer is 100-200X slower than caching the factories!
+ private static class FastSaslServerFactory implements SaslServerFactory {
+ private final Map<String,List<SaslServerFactory>> factoryCache =
+ new HashMap<String,List<SaslServerFactory>>();
+
+ FastSaslServerFactory(Map<String,?> props) {
+ final Enumeration<SaslServerFactory> factories =
+ Sasl.getSaslServerFactories();
+ while (factories.hasMoreElements()) {
+ SaslServerFactory factory = factories.nextElement();
+ for (String mech : factory.getMechanismNames(props)) {
+ if (!factoryCache.containsKey(mech)) {
+ factoryCache.put(mech, new ArrayList<SaslServerFactory>());
+ }
+ factoryCache.get(mech).add(factory);
+ }
+ }
+ }
+
+ @Override
+ public SaslServer createSaslServer(String mechanism, String protocol,
+ String serverName, Map<String,?> props, CallbackHandler cbh)
+ throws SaslException {
+ SaslServer saslServer = null;
+ List<SaslServerFactory> factories = factoryCache.get(mechanism);
+ if (factories != null) {
+ for (SaslServerFactory factory : factories) {
+ saslServer = factory.createSaslServer(
+ mechanism, protocol, serverName, props, cbh);
+ if (saslServer != null) {
+ break;
+ }
+ }
+ }
+ return saslServer;
+ }
+
+ @Override
+ public String[] getMechanismNames(Map<String, ?> props) {
+ return factoryCache.keySet().toArray(new String[0]);
+ }
+ }
}
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1552467&r1=1552466&r2=1552467&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Fri Dec 20 01:01:18 2013
@@ -477,7 +477,7 @@ public class UserGroupInformation {
private static final AppConfigurationEntry[] SIMPLE_CONF =
new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HADOOP_LOGIN};
-
+
private static final AppConfigurationEntry[] USER_KERBEROS_CONF =
new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, USER_KERBEROS_LOGIN,
HADOOP_LOGIN};
@@ -682,44 +682,59 @@ public class UserGroupInformation {
public synchronized
static UserGroupInformation getLoginUser() throws IOException {
if (loginUser == null) {
- ensureInitialized();
- try {
- Subject subject = new Subject();
- LoginContext login =
- newLoginContext(authenticationMethod.getLoginAppName(),
- subject, new HadoopConfiguration());
- login.login();
- UserGroupInformation realUser = new UserGroupInformation(subject);
- realUser.setLogin(login);
- realUser.setAuthenticationMethod(authenticationMethod);
- realUser = new UserGroupInformation(login.getSubject());
- // If the HADOOP_PROXY_USER environment variable or property
- // is specified, create a proxy user as the logged in user.
- String proxyUser = System.getenv(HADOOP_PROXY_USER);
- if (proxyUser == null) {
- proxyUser = System.getProperty(HADOOP_PROXY_USER);
- }
- loginUser = proxyUser == null ? realUser : createProxyUser(proxyUser, realUser);
-
- String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
- if (fileLocation != null) {
- // Load the token storage file and put all of the tokens into the
- // user. Don't use the FileSystem API for reading since it has a lock
- // cycle (HADOOP-9212).
- Credentials cred = Credentials.readTokenStorageFile(
- new File(fileLocation), conf);
- loginUser.addCredentials(cred);
- }
- loginUser.spawnAutoRenewalThreadForUserCreds();
- } catch (LoginException le) {
- LOG.debug("failure to login", le);
- throw new IOException("failure to login", le);
+ loginUserFromSubject(null);
+ }
+ return loginUser;
+ }
+
+ /**
+ * Log in a user using the given subject
+ * @parma subject the subject to use when logging in a user, or null to
+ * create a new subject.
+ * @throws IOException if login fails
+ */
+ @InterfaceAudience.Public
+ @InterfaceStability.Evolving
+ public synchronized
+ static void loginUserFromSubject(Subject subject) throws IOException {
+ ensureInitialized();
+ try {
+ if (subject == null) {
+ subject = new Subject();
+ }
+ LoginContext login =
+ newLoginContext(authenticationMethod.getLoginAppName(),
+ subject, new HadoopConfiguration());
+ login.login();
+ UserGroupInformation realUser = new UserGroupInformation(subject);
+ realUser.setLogin(login);
+ realUser.setAuthenticationMethod(authenticationMethod);
+ realUser = new UserGroupInformation(login.getSubject());
+ // If the HADOOP_PROXY_USER environment variable or property
+ // is specified, create a proxy user as the logged in user.
+ String proxyUser = System.getenv(HADOOP_PROXY_USER);
+ if (proxyUser == null) {
+ proxyUser = System.getProperty(HADOOP_PROXY_USER);
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("UGI loginUser:"+loginUser);
+ loginUser = proxyUser == null ? realUser : createProxyUser(proxyUser, realUser);
+
+ String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
+ if (fileLocation != null) {
+ // Load the token storage file and put all of the tokens into the
+ // user. Don't use the FileSystem API for reading since it has a lock
+ // cycle (HADOOP-9212).
+ Credentials cred = Credentials.readTokenStorageFile(
+ new File(fileLocation), conf);
+ loginUser.addCredentials(cred);
}
+ loginUser.spawnAutoRenewalThreadForUserCreds();
+ } catch (LoginException le) {
+ LOG.debug("failure to login", le);
+ throw new IOException("failure to login", le);
}
- return loginUser;
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("UGI loginUser:"+loginUser);
+ }
}
@InterfaceAudience.Private
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java?rev=1552467&r1=1552466&r2=1552467&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java Fri Dec 20 01:01:18 2013
@@ -275,8 +275,9 @@ public class ReflectionUtils {
/**
* Make a copy of the writable object using serialization to a buffer
- * @param dst the object to copy from
- * @param src the object to copy into, which is destroyed
+ * @param src the object to copy from
+ * @param dst the object to copy into, which is destroyed
+ * @return dst param (the copy)
* @throws IOException
*/
@SuppressWarnings("unchecked")
Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1551332-1552465
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java?rev=1552467&r1=1552466&r2=1552467&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java Fri Dec 20 01:01:18 2013
@@ -957,6 +957,7 @@ public class TestRPC {
proxy.sleep(pingInterval*4);
} finally {
if (proxy != null) RPC.stopProxy(proxy);
+ server.stop();
}
}
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java?rev=1552467&r1=1552466&r2=1552467&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java Fri Dec 20 01:01:18 2013
@@ -137,7 +137,9 @@ public class TestSaslRPC {
LOG.info("Testing QOP:"+expectedQop);
LOG.info("---------------------------------");
conf = new Configuration();
- conf.set(HADOOP_SECURITY_AUTHENTICATION, KERBEROS.toString());
+ // the specific tests for kerberos will enable kerberos. forcing it
+ // for all tests will cause tests to fail if the user has a TGT
+ conf.set(HADOOP_SECURITY_AUTHENTICATION, SIMPLE.toString());
conf.set("hadoop.rpc.protection", expectedQop.name().toLowerCase());
UserGroupInformation.setConfiguration(conf);
enableSecretManager = null;