You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2012/05/04 22:23:14 UTC
svn commit: r1334158 - in
/hadoop/common/branches/HDFS-3092/hadoop-common-project: dev-support/
hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/
hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/
hado...
Author: szetszwo
Date: Fri May 4 20:22:57 2012
New Revision: 1334158
URL: http://svn.apache.org/viewvc?rev=1334158&view=rev
Log:
Merge r1332460 through r1334157 from trunk.
Added:
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputWrapper.java
- copied unchanged from r1334157, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputWrapper.java
Removed:
hadoop/common/branches/HDFS-3092/hadoop-common-project/dev-support/test-patch.properties
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/resources/test-patch.properties
Modified:
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed)
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/bin/hadoop
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/ (props changed)
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/commands_manual.xml
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FenceMethod.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/core/ (props changed)
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java Fri May 4 20:22:57 2012
@@ -26,7 +26,6 @@ import javax.security.auth.login.Configu
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import java.io.IOException;
-import java.lang.reflect.Field;
import java.net.HttpURLConnection;
import java.net.URL;
import java.security.AccessControlContext;
@@ -196,11 +195,10 @@ public class KerberosAuthenticator imple
try {
GSSManager gssManager = GSSManager.getInstance();
String servicePrincipal = "HTTP/" + KerberosAuthenticator.this.url.getHost();
-
+ Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
GSSName serviceName = gssManager.createName(servicePrincipal,
- GSSName.NT_HOSTBASED_SERVICE);
- Oid oid = KerberosUtil.getOidClassInstance(servicePrincipal,
- gssManager);
+ oid);
+ oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
gssContext = gssManager.createContext(serviceName, oid, null,
GSSContext.DEFAULT_LIFETIME);
gssContext.requestCredDeleg(true);
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java Fri May 4 20:22:57 2012
@@ -327,6 +327,8 @@ public class AuthenticationFilter implem
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain)
throws IOException, ServletException {
+ boolean unauthorizedResponse = true;
+ String unauthorizedMsg = "";
HttpServletRequest httpRequest = (HttpServletRequest) request;
HttpServletResponse httpResponse = (HttpServletResponse) response;
try {
@@ -350,6 +352,7 @@ public class AuthenticationFilter implem
newToken = true;
}
if (token != null) {
+ unauthorizedResponse = false;
if (LOG.isDebugEnabled()) {
LOG.debug("Request [{}] user [{}] authenticated", getRequestURL(httpRequest), token.getUserName());
}
@@ -378,17 +381,17 @@ public class AuthenticationFilter implem
}
filterChain.doFilter(httpRequest, httpResponse);
}
- else {
- throw new AuthenticationException("Missing AuthenticationToken");
- }
} catch (AuthenticationException ex) {
+ unauthorizedMsg = ex.toString();
+ LOG.warn("Authentication exception: " + ex.getMessage(), ex);
+ }
+ if (unauthorizedResponse) {
if (!httpResponse.isCommitted()) {
Cookie cookie = createCookie("");
cookie.setMaxAge(0);
httpResponse.addCookie(cookie);
- httpResponse.sendError(HttpServletResponse.SC_UNAUTHORIZED, ex.getMessage());
+ httpResponse.sendError(HttpServletResponse.SC_UNAUTHORIZED, unauthorizedMsg);
}
- LOG.warn("Authentication exception: " + ex.getMessage(), ex);
}
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java Fri May 4 20:22:57 2012
@@ -22,7 +22,6 @@ import java.lang.reflect.InvocationTarge
import java.lang.reflect.Method;
import org.ietf.jgss.GSSException;
-import org.ietf.jgss.GSSManager;
import org.ietf.jgss.Oid;
public class KerberosUtil {
@@ -34,8 +33,7 @@ public class KerberosUtil {
: "com.sun.security.auth.module.Krb5LoginModule";
}
- public static Oid getOidClassInstance(String servicePrincipal,
- GSSManager gssManager)
+ public static Oid getOidInstance(String oidName)
throws ClassNotFoundException, GSSException, NoSuchFieldException,
IllegalAccessException {
Class<?> oidClass;
@@ -44,7 +42,7 @@ public class KerberosUtil {
} else {
oidClass = Class.forName("sun.security.jgss.GSSUtil");
}
- Field oidField = oidClass.getDeclaredField("GSS_KRB5_MECH_OID");
+ Field oidField = oidClass.getDeclaredField(oidName);
return (Oid)oidField.get(oidClass);
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java Fri May 4 20:22:57 2012
@@ -145,10 +145,10 @@ public class TestKerberosAuthenticationH
GSSContext gssContext = null;
try {
String servicePrincipal = KerberosTestUtils.getServerPrincipal();
+ Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
GSSName serviceName = gssManager.createName(servicePrincipal,
- GSSName.NT_HOSTBASED_SERVICE);
- Oid oid = KerberosUtil.getOidClassInstance(servicePrincipal,
- gssManager);
+ oid);
+ oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
gssContext = gssManager.createContext(serviceName, oid, null,
GSSContext.DEFAULT_LIFETIME);
gssContext.requestCredDeleg(true);
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt Fri May 4 20:22:57 2012
@@ -65,6 +65,8 @@ Trunk (unreleased changes)
HADOOP-8285 Use ProtoBuf for RpcPayLoadHeader (sanjay radia)
+ HADOOP-8308. Support cross-project Jenkins builds. (tomwhite)
+
BUG FIXES
HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
@@ -124,6 +126,9 @@ Trunk (unreleased changes)
HADOOP-8312. testpatch.sh should provide a simpler way to see which
warnings changed (bobby)
+ HADOOP-8339. jenkins complaining about 16 javadoc warnings
+ (Tom White and Robert Evans via tgraves)
+
OPTIMIZATIONS
HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -158,6 +163,9 @@ Release 2.0.0 - UNRELEASED
HADOOP-8210. Common side of HDFS-3148: The client should be able
to use multiple local interfaces for data transfer. (eli)
+ HADOOP-8343. Allow configuration of authorization for JmxJsonServlet and
+ MetricsServlet (tucu)
+
IMPROVEMENTS
HADOOP-7524. Change RPC to allow multiple protocols including multuple
@@ -274,6 +282,20 @@ Release 2.0.0 - UNRELEASED
HADOOP-7549. Use JDK ServiceLoader mechanism to find FileSystem implementations. (tucu)
+ HADOOP-8185. Update namenode -format documentation and add -nonInteractive
+ and -force. (Arpit Gupta via atm)
+
+ HADOOP-8214. make hadoop script recognize a full set of deprecated commands (rvs via tucu)
+
+ HADOOP-8347. Hadoop Common logs misspell 'successful'.
+ (Philip Zeyliger via eli)
+
+ HADOOP-8350. Improve NetUtils.getInputStream to return a stream which has
+ a tunable timeout. (todd)
+
+ HADOOP-8356. FileSystem service loading mechanism should print the FileSystem
+ impl it is failing to load (tucu)
+
OPTIMIZATIONS
BUG FIXES
@@ -390,6 +412,17 @@ Release 2.0.0 - UNRELEASED
HADOOP-8325. Add a ShutdownHookManager to be used by different
components instead of the JVM shutdownhook (tucu)
+ HADOOP-8275. Range check DelegationKey length.
+ (Colin Patrick McCabe via eli)
+
+ HADOOP-8342. HDFS command fails with exception following merge of
+ HADOOP-8325 (tucu)
+
+ HADOOP-8346. Makes oid changes to make SPNEGO work. Was broken due
+ to fixes introduced by the IBM JDK compatibility patch. (ddas)
+
+ HADOOP-8355. SPNEGO filter throws/logs exception when authentication fails (tucu)
+
BREAKDOWN OF HADOOP-7454 SUBTASKS
HADOOP-7455. HA: Introduce HA Service Protocol Interface. (suresh)
@@ -442,6 +475,12 @@ Release 2.0.0 - UNRELEASED
HADOOP-8116. RetriableCommand is using RetryPolicy incorrectly after
HADOOP-7896. (atm)
+ HADOOP-8317. Update maven-assembly-plugin to 2.3 - fix build on FreeBSD
+ (Radim Kolar via bobby)
+
+ HADOOP-8172. Configuration no longer sets all keys in a deprecated key
+ list. (Anupam Seth via bobby)
+
Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1332460-1334157
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/bin/hadoop
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/bin/hadoop?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/bin/hadoop (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/bin/hadoop Fri May 4 20:22:57 2012
@@ -50,15 +50,16 @@ fi
COMMAND=$1
case $COMMAND in
#hdfs commands
- namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer|fetchdt)
+ namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer|fetchdt|oiv|dfsgroups)
echo "DEPRECATED: Use of this script to execute hdfs command is deprecated." 1>&2
echo "Instead use the hdfs command for it." 1>&2
echo "" 1>&2
#try to locate hdfs and if present, delegate to it.
+ shift
if [ -f "${HADOOP_HDFS_HOME}"/bin/hdfs ]; then
- exec "${HADOOP_HDFS_HOME}"/bin/hdfs $*
+ exec "${HADOOP_HDFS_HOME}"/bin/hdfs ${COMMAND/dfsgroups/groups} $*
elif [ -f "${HADOOP_PREFIX}"/bin/hdfs ]; then
- exec "${HADOOP_PREFIX}"/bin/hdfs $*
+ exec "${HADOOP_PREFIX}"/bin/hdfs ${COMMAND/dfsgroups/groups} $*
else
echo "HADOOP_HDFS_HOME not found!"
exit 1
@@ -66,15 +67,16 @@ case $COMMAND in
;;
#mapred commands for backwards compatibility
- pipes|job|queue)
+ pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker)
echo "DEPRECATED: Use of this script to execute mapred command is deprecated." 1>&2
echo "Instead use the mapred command for it." 1>&2
echo "" 1>&2
#try to locate mapred and if present, delegate to it.
+ shift
if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then
- exec "${HADOOP_MAPRED_HOME}"/bin/mapred $*
+ exec "${HADOOP_MAPRED_HOME}"/bin/mapred ${COMMAND/mrgroups/groups} $*
elif [ -f "${HADOOP_PREFIX}"/bin/mapred ]; then
- exec "${HADOOP_PREFIX}"/bin/mapred $*
+ exec "${HADOOP_PREFIX}"/bin/mapred ${COMMAND/mrgroups/groups} $*
else
echo "HADOOP_MAPRED_HOME not found!"
exit 1
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1332460-1334157
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/commands_manual.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/commands_manual.xml?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/commands_manual.xml (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/commands_manual.xml Fri May 4 20:22:57 2012
@@ -696,7 +696,7 @@
<a href="http://hadoop.apache.org/hdfs/docs/current/hdfs_user_guide.html#Upgrade+and+Rollback">Upgrade and Rollback</a>.
</p>
<p>
- <code>Usage: hadoop namenode [-format] | [-upgrade] | [-rollback] | [-finalize] | [-importCheckpoint] | [-checkpoint] | [-backup]</code>
+ <code>Usage: hadoop namenode [-format [-force] [-nonInteractive] [-clusterid someid]] | [-upgrade] | [-rollback] | [-finalize] | [-importCheckpoint] | [-checkpoint] | [-backup]</code>
</p>
<table>
<tr><th> COMMAND_OPTION </th><th> Description </th></tr>
@@ -714,8 +714,11 @@
<td>Start namenode in backup role, maintaining an up-to-date in-memory copy of the namespace and creating periodic checkpoints.</td>
</tr>
<tr>
- <td><code>-format</code></td>
- <td>Formats the namenode. It starts the namenode, formats it and then shut it down.</td>
+ <td><code>-format [-force] [-nonInteractive] [-clusterid someid]</code></td>
+ <td>Formats the namenode. It starts the namenode, formats it and then shuts it down. User will be prompted before formatting any non empty name directories in the local filesystem.<br/>
+ -nonInteractive: User will not be prompted for input if non empty name directories exist in the local filesystem and the format will fail.<br/>
+ -force: Formats the namenode and the user will NOT be prompted to confirm formatting of the name directories in the local filesystem. If -nonInteractive option is specified it will be ignored.<br/>
+ -clusterid: Associates the namenode with the id specified. When formatting federated namenodes use this option to make sure all namenodes are associated with the same id.</td>
</tr>
<tr>
<td><code>-upgrade</code></td>
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1332460-1334157
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfServlet.java Fri May 4 20:22:57 2012
@@ -18,7 +18,6 @@
package org.apache.hadoop.conf;
import java.io.IOException;
-import java.io.OutputStreamWriter;
import java.io.Writer;
import javax.servlet.ServletException;
@@ -57,9 +56,8 @@ public class ConfServlet extends HttpSer
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
- // Do the authorization
- if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
- response)) {
+ if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(),
+ request, response)) {
return;
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Fri May 4 20:22:57 2012
@@ -33,6 +33,7 @@ import java.io.Writer;
import java.net.InetSocketAddress;
import java.net.URL;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
@@ -269,10 +270,18 @@ public class Configuration implements It
* This is to be used only by the developers in order to add deprecation of
* keys, and attempts to call this method after loading resources once,
* would lead to <tt>UnsupportedOperationException</tt>
+ *
+ * If a key is deprecated in favor of multiple keys, they are all treated as
+ * aliases of each other, and setting any one of them resets all the others
+ * to the new value.
+ *
* @param key
* @param newKeys
* @param customMessage
+ * @deprecated use {@link addDeprecation(String key, String newKey,
+ String customMessage)} instead
*/
+ @Deprecated
public synchronized static void addDeprecation(String key, String[] newKeys,
String customMessage) {
if (key == null || key.length() == 0 ||
@@ -288,6 +297,22 @@ public class Configuration implements It
}
}
}
+
+ /**
+ * Adds the deprecated key to the deprecation map.
+ * It does not override any existing entries in the deprecation map.
+ * This is to be used only by the developers in order to add deprecation of
+ * keys, and attempts to call this method after loading resources once,
+ * would lead to <tt>UnsupportedOperationException</tt>
+ *
+ * @param key
+ * @param newKey
+ * @param customMessage
+ */
+ public synchronized static void addDeprecation(String key, String newKey,
+ String customMessage) {
+ addDeprecation(key, new String[] {newKey}, customMessage);
+ }
/**
* Adds the deprecated key to the deprecation map when no custom message
@@ -297,14 +322,35 @@ public class Configuration implements It
* keys, and attempts to call this method after loading resources once,
* would lead to <tt>UnsupportedOperationException</tt>
*
+ * If a key is deprecated in favor of multiple keys, they are all treated as
+ * aliases of each other, and setting any one of them resets all the others
+ * to the new value.
+ *
* @param key Key that is to be deprecated
* @param newKeys list of keys that take up the values of deprecated key
+ * @deprecated use {@link addDeprecation(String key, String newKey)} instead
*/
+ @Deprecated
public synchronized static void addDeprecation(String key, String[] newKeys) {
addDeprecation(key, newKeys, null);
}
/**
+ * Adds the deprecated key to the deprecation map when no custom message
+ * is provided.
+ * It does not override any existing entries in the deprecation map.
+ * This is to be used only by the developers in order to add deprecation of
+ * keys, and attempts to call this method after loading resources once,
+ * would lead to <tt>UnsupportedOperationException</tt>
+ *
+ * @param key Key that is to be deprecated
+ * @param newKey key that takes up the value of deprecated key
+ */
+ public synchronized static void addDeprecation(String key, String newKey) {
+ addDeprecation(key, new String[] {newKey}, null);
+ }
+
+ /**
* checks whether the given <code>key</code> is deprecated.
*
* @param key the parameter which is to be checked for deprecation
@@ -322,16 +368,26 @@ public class Configuration implements It
* @param name property name.
* @return alternate name.
*/
- private String getAlternateName(String name) {
- String altName;
+ private String[] getAlternateNames(String name) {
+ String oldName, altNames[] = null;
DeprecatedKeyInfo keyInfo = deprecatedKeyMap.get(name);
- if (keyInfo != null) {
- altName = (keyInfo.newKeys.length > 0) ? keyInfo.newKeys[0] : null;
- }
- else {
- altName = reverseDeprecatedKeyMap.get(name);
+ if (keyInfo == null) {
+ altNames = (reverseDeprecatedKeyMap.get(name) != null ) ?
+ new String [] {reverseDeprecatedKeyMap.get(name)} : null;
+ if(altNames != null && altNames.length > 0) {
+ //To help look for other new configs for this deprecated config
+ keyInfo = deprecatedKeyMap.get(altNames[0]);
+ }
+ }
+ if(keyInfo != null && keyInfo.newKeys.length > 0) {
+ List<String> list = new ArrayList<String>();
+ if(altNames != null) {
+ list.addAll(Arrays.asList(altNames));
+ }
+ list.addAll(Arrays.asList(keyInfo.newKeys));
+ altNames = list.toArray(new String[list.size()]);
}
- return altName;
+ return altNames;
}
/**
@@ -346,24 +402,29 @@ public class Configuration implements It
* @return the first property in the list of properties mapping
* the <code>name</code> or the <code>name</code> itself.
*/
- private String handleDeprecation(String name) {
- if (isDeprecated(name)) {
+ private String[] handleDeprecation(String name) {
+ ArrayList<String > names = new ArrayList<String>();
+ if (isDeprecated(name)) {
DeprecatedKeyInfo keyInfo = deprecatedKeyMap.get(name);
warnOnceIfDeprecated(name);
for (String newKey : keyInfo.newKeys) {
if(newKey != null) {
- name = newKey;
- break;
+ names.add(newKey);
}
}
}
- String deprecatedKey = reverseDeprecatedKeyMap.get(name);
- if (deprecatedKey != null && !getOverlay().containsKey(name) &&
- getOverlay().containsKey(deprecatedKey)) {
- getProps().setProperty(name, getOverlay().getProperty(deprecatedKey));
- getOverlay().setProperty(name, getOverlay().getProperty(deprecatedKey));
+ if(names.size() == 0) {
+ names.add(name);
}
- return name;
+ for(String n : names) {
+ String deprecatedKey = reverseDeprecatedKeyMap.get(n);
+ if (deprecatedKey != null && !getOverlay().containsKey(n) &&
+ getOverlay().containsKey(deprecatedKey)) {
+ getProps().setProperty(n, getOverlay().getProperty(deprecatedKey));
+ getOverlay().setProperty(n, getOverlay().getProperty(deprecatedKey));
+ }
+ }
+ return names.toArray(new String[names.size()]);
}
private void handleDeprecation() {
@@ -595,8 +656,12 @@ public class Configuration implements It
* or null if no such property exists.
*/
public String get(String name) {
- name = handleDeprecation(name);
- return substituteVars(getProps().getProperty(name));
+ String[] names = handleDeprecation(name);
+ String result = null;
+ for(String n : names) {
+ result = substituteVars(getProps().getProperty(n));
+ }
+ return result;
}
/**
@@ -633,8 +698,12 @@ public class Configuration implements It
* its replacing property and null if no such property exists.
*/
public String getRaw(String name) {
- name = handleDeprecation(name);
- return getProps().getProperty(name);
+ String[] names = handleDeprecation(name);
+ String result = null;
+ for(String n : names) {
+ result = getProps().getProperty(n);
+ }
+ return result;
}
/**
@@ -652,10 +721,12 @@ public class Configuration implements It
getOverlay().setProperty(name, value);
getProps().setProperty(name, value);
updatingResource.put(name, UNKNOWN_RESOURCE);
- String altName = getAlternateName(name);
- if (altName != null) {
- getOverlay().setProperty(altName, value);
- getProps().setProperty(altName, value);
+ String[] altNames = getAlternateNames(name);
+ if (altNames != null && altNames.length > 0) {
+ for(String altName : altNames) {
+ getOverlay().setProperty(altName, value);
+ getProps().setProperty(altName, value);
+ }
}
warnOnceIfDeprecated(name);
}
@@ -671,12 +742,14 @@ public class Configuration implements It
* Unset a previously set property.
*/
public synchronized void unset(String name) {
- String altName = getAlternateName(name);
+ String[] altNames = getAlternateNames(name);
getOverlay().remove(name);
getProps().remove(name);
- if (altName !=null) {
- getOverlay().remove(altName);
- getProps().remove(altName);
+ if (altNames !=null && altNames.length > 0) {
+ for(String altName : altNames) {
+ getOverlay().remove(altName);
+ getProps().remove(altName);
+ }
}
}
@@ -711,8 +784,12 @@ public class Configuration implements It
* doesn't exist.
*/
public String get(String name, String defaultValue) {
- name = handleDeprecation(name);
- return substituteVars(getProps().getProperty(name, defaultValue));
+ String[] names = handleDeprecation(name);
+ String result = null;
+ for(String n : names) {
+ result = substituteVars(getProps().getProperty(n, defaultValue));
+ }
+ return result;
}
/**
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java Fri May 4 20:22:57 2012
@@ -228,6 +228,9 @@ public class CommonConfigurationKeysPubl
public static final String HADOOP_SECURITY_AUTHORIZATION =
"hadoop.security.authorization";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+ public static final String HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN =
+ "hadoop.security.instrumentation.requires.admin";
+ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_SERVICE_USER_NAME_KEY =
"hadoop.security.service.user.name.key";
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Fri May 4 20:22:57 2012
@@ -199,7 +199,7 @@ public abstract class FileSystem extends
* @return the protocol scheme for the FileSystem.
*/
public String getScheme() {
- throw new UnsupportedOperationException("Not implemented by the FileSystem implementation");
+ throw new UnsupportedOperationException("Not implemented by the " + getClass().getSimpleName() + " FileSystem implementation");
}
/** Returns a URI whose scheme and authority identify this FileSystem.*/
@@ -2198,10 +2198,7 @@ public abstract class FileSystem extends
if (map.containsKey(key) && fs == map.get(key)) {
map.remove(key);
toAutoClose.remove(key);
- if (map.isEmpty()) {
- ShutdownHookManager.get().removeShutdownHook(clientFinalizer);
}
- }
}
synchronized void closeAll() throws IOException {
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FenceMethod.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FenceMethod.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FenceMethod.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FenceMethod.java Fri May 4 20:22:57 2012
@@ -52,7 +52,7 @@ public interface FenceMethod {
/**
* Attempt to fence the target node.
- * @param serviceAddr the address (host:ipcport) of the service to fence
+ * @param target the target of the service to fence
* @param args the configured arguments, which were checked at startup by
* {@link #checkArgs(String)}
* @return true if fencing was successful, false if unsuccessful or
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java Fri May 4 20:22:57 2012
@@ -117,12 +117,13 @@ public interface HAServiceProtocol {
/**
* Return the current status of the service. The status indicates
* the current <em>state</em> (e.g ACTIVE/STANDBY) as well as
- * some additional information. {@see HAServiceStatus}
+ * some additional information.
*
* @throws AccessControlException
* if access is denied.
* @throws IOException
* if other errors happen
+ * @see HAServiceStatus
*/
public HAServiceStatus getServiceStatus() throws AccessControlException,
IOException;
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java Fri May 4 20:22:57 2012
@@ -780,6 +780,37 @@ public class HttpServer implements Filte
}
/**
+ * Checks the user has privileges to access to instrumentation servlets.
+ * <p/>
+ * If <code>hadoop.security.instrumentation.requires.admin</code> is set to FALSE
+ * (default value) it always returns TRUE.
+ * <p/>
+ * If <code>hadoop.security.instrumentation.requires.admin</code> is set to TRUE
+ * it will check that if the current user is in the admin ACLS. If the user is
+ * in the admin ACLs it returns TRUE, otherwise it returns FALSE.
+ *
+ * @param servletContext the servlet context.
+ * @param request the servlet request.
+ * @param response the servlet response.
+ * @return TRUE/FALSE based on the logic decribed above.
+ */
+ public static boolean isInstrumentationAccessAllowed(
+ ServletContext servletContext, HttpServletRequest request,
+ HttpServletResponse response) throws IOException {
+ Configuration conf =
+ (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
+
+ boolean access = true;
+ boolean adminAccess = conf.getBoolean(
+ CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
+ false);
+ if (adminAccess) {
+ access = hasAdministratorAccess(servletContext, request, response);
+ }
+ return access;
+ }
+
+ /**
* Does the user sending the HttpServletRequest has the administrator ACLs? If
* it isn't the case, response will be modified to send an error to the user.
*
@@ -794,7 +825,6 @@ public class HttpServer implements Filte
HttpServletResponse response) throws IOException {
Configuration conf =
(Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE);
-
// If there is no authorization, anybody has administrator access.
if (!conf.getBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
@@ -834,12 +864,11 @@ public class HttpServer implements Filte
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
- response.setContentType("text/plain; charset=UTF-8");
- // Do the authorization
- if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
- response)) {
+ if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(),
+ request, response)) {
return;
}
+ response.setContentType("text/plain; charset=UTF-8");
PrintWriter out = response.getWriter();
ReflectionUtils.printThreadInfo(out, "");
out.close();
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java Fri May 4 20:22:57 2012
@@ -326,9 +326,41 @@ public final class WritableUtils {
* @return deserialized integer from stream.
*/
public static int readVInt(DataInput stream) throws IOException {
- return (int) readVLong(stream);
+ long n = readVLong(stream);
+ if ((n > Integer.MAX_VALUE) || (n < Integer.MIN_VALUE)) {
+ throw new IOException("value too long to fit in integer");
+ }
+ return (int)n;
}
-
+
+ /**
+ * Reads an integer from the input stream and returns it.
+ *
+ * This function validates that the integer is between [lower, upper],
+ * inclusive.
+ *
+ * @param stream Binary input stream
+ * @throws java.io.IOException
+ * @return deserialized integer from stream
+ */
+ public static int readVIntInRange(DataInput stream, int lower, int upper)
+ throws IOException {
+ long n = readVLong(stream);
+ if (n < lower) {
+ if (lower == 0) {
+ throw new IOException("expected non-negative integer, got " + n);
+ } else {
+ throw new IOException("expected integer greater than or equal to " +
+ lower + ", got " + n);
+ }
+ }
+ if (n > upper) {
+ throw new IOException("expected integer less or equal to " + upper +
+ ", got " + n);
+ }
+ return (int)n;
+ }
+
/**
* Given the first byte of a vint/vlong, determine the sign
* @param value the first byte
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Fri May 4 20:22:57 2012
@@ -217,7 +217,7 @@ public abstract class Server {
public static final Log AUDITLOG =
LogFactory.getLog("SecurityLogger."+Server.class.getName());
private static final String AUTH_FAILED_FOR = "Auth failed for ";
- private static final String AUTH_SUCCESSFULL_FOR = "Auth successfull for ";
+ private static final String AUTH_SUCCESSFUL_FOR = "Auth successful for ";
private static final ThreadLocal<Server> SERVER = new ThreadLocal<Server>();
@@ -1234,7 +1234,7 @@ public abstract class Server {
LOG.debug("SASL server successfully authenticated client: " + user);
}
rpcMetrics.incrAuthenticationSuccesses();
- AUDITLOG.info(AUTH_SUCCESSFULL_FOR + user);
+ AUDITLOG.info(AUTH_SUCCESSFUL_FOR + user);
saslContextEstablished = true;
}
} else {
@@ -1776,7 +1776,7 @@ public abstract class Server {
* from configuration. Otherwise the configuration will be picked up.
*
* If rpcRequestClass is null then the rpcRequestClass must have been
- * registered via {@link #registerProtocolEngine(RpcPayloadHeader.RpcKind,
+ * registered via {@link #registerProtocolEngine(RPC.RpcKind,
* Class, RPC.RpcInvoker)}
* This parameter has been retained for compatibility with existing tests
* and usage.
@@ -1990,7 +1990,7 @@ public abstract class Server {
/**
* Called for each call.
- * @deprecated Use {@link #call(RpcPayloadHeader.RpcKind, String,
+ * @deprecated Use {@link #call(RPC.RpcKind, String,
* Writable, long)} instead
*/
@Deprecated
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java Fri May 4 20:22:57 2012
@@ -148,9 +148,8 @@ public class JMXJsonServlet extends Http
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) {
try {
- // Do the authorization
- if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
- response)) {
+ if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(),
+ request, response)) {
return;
}
JsonGenerator jg = null;
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java Fri May 4 20:22:57 2012
@@ -106,9 +106,8 @@ public class MetricsServlet extends Http
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
- // Do the authorization
- if (!HttpServer.hasAdministratorAccess(getServletContext(), request,
- response)) {
+ if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(),
+ request, response)) {
return;
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java Fri May 4 20:22:57 2012
@@ -375,53 +375,44 @@ public class NetUtils {
}
/**
- * Same as getInputStream(socket, socket.getSoTimeout()).<br><br>
+ * Same as <code>getInputStream(socket, socket.getSoTimeout()).</code>
+ * <br><br>
*
- * From documentation for {@link #getInputStream(Socket, long)}:<br>
- * Returns InputStream for the socket. If the socket has an associated
- * SocketChannel then it returns a
- * {@link SocketInputStream} with the given timeout. If the socket does not
- * have a channel, {@link Socket#getInputStream()} is returned. In the later
- * case, the timeout argument is ignored and the timeout set with
- * {@link Socket#setSoTimeout(int)} applies for reads.<br><br>
- *
- * Any socket created using socket factories returned by {@link NetUtils},
- * must use this interface instead of {@link Socket#getInputStream()}.
- *
* @see #getInputStream(Socket, long)
- *
- * @param socket
- * @return InputStream for reading from the socket.
- * @throws IOException
*/
- public static InputStream getInputStream(Socket socket)
+ public static SocketInputWrapper getInputStream(Socket socket)
throws IOException {
return getInputStream(socket, socket.getSoTimeout());
}
-
+
/**
- * Returns InputStream for the socket. If the socket has an associated
- * SocketChannel then it returns a
- * {@link SocketInputStream} with the given timeout. If the socket does not
- * have a channel, {@link Socket#getInputStream()} is returned. In the later
- * case, the timeout argument is ignored and the timeout set with
- * {@link Socket#setSoTimeout(int)} applies for reads.<br><br>
+ * Return a {@link SocketInputWrapper} for the socket and set the given
+ * timeout. If the socket does not have an associated channel, then its socket
+ * timeout will be set to the specified value. Otherwise, a
+ * {@link SocketInputStream} will be created which reads with the configured
+ * timeout.
*
- * Any socket created using socket factories returned by {@link NetUtils},
+ * Any socket created using socket factories returned by {@link #NetUtils},
* must use this interface instead of {@link Socket#getInputStream()}.
- *
+ *
+ * In general, this should be called only once on each socket: see the note
+ * in {@link SocketInputWrapper#setTimeout(long)} for more information.
+ *
* @see Socket#getChannel()
*
* @param socket
- * @param timeout timeout in milliseconds. This may not always apply. zero
- * for waiting as long as necessary.
- * @return InputStream for reading from the socket.
+ * @param timeout timeout in milliseconds. zero for waiting as
+ * long as necessary.
+ * @return SocketInputWrapper for reading from the socket.
* @throws IOException
*/
- public static InputStream getInputStream(Socket socket, long timeout)
+ public static SocketInputWrapper getInputStream(Socket socket, long timeout)
throws IOException {
- return (socket.getChannel() == null) ?
- socket.getInputStream() : new SocketInputStream(socket, timeout);
+ InputStream stm = (socket.getChannel() == null) ?
+ socket.getInputStream() : new SocketInputStream(socket);
+ SocketInputWrapper w = new SocketInputWrapper(socket, stm);
+ w.setTimeout(timeout);
+ return w;
}
/**
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java Fri May 4 20:22:57 2012
@@ -247,6 +247,10 @@ abstract class SocketIOWithTimeout {
ops));
}
}
+
+ public void setTimeout(long timeoutMs) {
+ this.timeout = timeoutMs;
+ }
private static String timeoutExceptionString(SelectableChannel channel,
long timeout, int ops) {
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java Fri May 4 20:22:57 2012
@@ -28,9 +28,6 @@ import java.nio.channels.ReadableByteCha
import java.nio.channels.SelectableChannel;
import java.nio.channels.SelectionKey;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
/**
* This implements an input stream that can have a timeout while reading.
* This sets non-blocking flag on the socket channel.
@@ -40,9 +37,7 @@ import org.apache.hadoop.classification.
* IllegalBlockingModeException.
* Please use {@link SocketOutputStream} for writing.
*/
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-@InterfaceStability.Unstable
-public class SocketInputStream extends InputStream
+class SocketInputStream extends InputStream
implements ReadableByteChannel {
private Reader reader;
@@ -171,4 +166,8 @@ public class SocketInputStream extends I
public void waitForReadable() throws IOException {
reader.waitForIO(SelectionKey.OP_READ);
}
+
+ public void setTimeout(long timeoutMs) {
+ reader.setTimeout(timeoutMs);
+ }
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java Fri May 4 20:22:57 2012
@@ -59,7 +59,7 @@ public class ServiceAuthorizationManager
public static final Log AUDITLOG =
LogFactory.getLog("SecurityLogger."+ServiceAuthorizationManager.class.getName());
- private static final String AUTHZ_SUCCESSFULL_FOR = "Authorization successfull for ";
+ private static final String AUTHZ_SUCCESSFUL_FOR = "Authorization successful for ";
private static final String AUTHZ_FAILED_FOR = "Authorization failed for ";
@@ -108,7 +108,7 @@ public class ServiceAuthorizationManager
" is not authorized for protocol " + protocol +
", expected client Kerberos principal is " + clientPrincipal);
}
- AUDITLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol);
+ AUDITLOG.info(AUTHZ_SUCCESSFUL_FOR + user + " for protocol="+protocol);
}
public synchronized void refresh(Configuration conf,
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java Fri May 4 20:22:57 2012
@@ -41,6 +41,7 @@ public class DelegationKey implements Wr
private long expiryDate;
@Nullable
private byte[] keyBytes = null;
+ private static final int MAX_KEY_LEN = 1024 * 1024;
/** Default constructore required for Writable */
public DelegationKey() {
@@ -55,6 +56,10 @@ public class DelegationKey implements Wr
this.keyId = keyId;
this.expiryDate = expiryDate;
if (encodedKey != null) {
+ if (encodedKey.length > MAX_KEY_LEN) {
+ throw new RuntimeException("can't create " + encodedKey.length +
+ " byte long DelegationKey.");
+ }
this.keyBytes = encodedKey;
}
}
@@ -102,7 +107,7 @@ public class DelegationKey implements Wr
public void readFields(DataInput in) throws IOException {
keyId = WritableUtils.readVInt(in);
expiryDate = WritableUtils.readVLong(in);
- int len = WritableUtils.readVInt(in);
+ int len = WritableUtils.readVIntInRange(in, -1, MAX_KEY_LEN);
if (len == -1) {
keyBytes = null;
} else {
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c Fri May 4 20:22:57 2012
@@ -70,7 +70,7 @@ Java_org_apache_hadoop_security_JniBased
// set the name of the group for subsequent calls to getnetgrent
// note that we want to end group lokup regardless whether setnetgrent
- // was successfull or not (as long as it was called we need to call
+ // was successful or not (as long as it was called we need to call
// endnetgrent)
setnetgrentCalledFlag = 1;
if(setnetgrent(cgroup) == 1) {
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Fri May 4 20:22:57 2012
@@ -63,6 +63,15 @@
</property>
<property>
+ <name>hadoop.security.instrumentation.requires.admin</name>
+ <value>false</value>
+ <description>
+ Indicates if administrator ACLs are required to access
+ instrumentation servlets (JMX, METRICS, CONF, STACKS).
+ </description>
+</property>
+
+<property>
<name>hadoop.security.authentication</name>
<value>simple</value>
<description>Possible values are simple (no authentication), and kerberos
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1332460-1334157
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java Fri May 4 20:22:57 2012
@@ -164,7 +164,7 @@ public class TestConfigurationDeprecatio
conf.set("Y", "y");
conf.set("Z", "z");
// get old key
- assertEquals("y", conf.get("X"));
+ assertEquals("z", conf.get("X"));
}
/**
@@ -305,7 +305,7 @@ public class TestConfigurationDeprecatio
assertTrue("deprecated Key not found", dKFound);
assertTrue("new Key not found", nKFound);
}
-
+
@Test
public void testUnsetWithDeprecatedKeys() {
Configuration conf = new Configuration();
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java Fri May 4 20:22:57 2012
@@ -18,10 +18,15 @@
package org.apache.hadoop.conf;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
import java.io.ByteArrayOutputStream;
+import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.junit.Test;
import junit.framework.TestCase;
@@ -53,4 +58,49 @@ public class TestDeprecatedKeys extends
assertTrue(fileContents.contains("old.config.yet.to.be.deprecated"));
assertTrue(fileContents.contains("new.conf.to.replace.deprecated.conf"));
}
+
+ @Test
+ public void testIteratorWithDeprecatedKeysMappedToMultipleNewKeys() {
+ Configuration conf = new Configuration();
+ Configuration.addDeprecation("dK", new String[]{"nK1", "nK2"});
+ conf.set("k", "v");
+ conf.set("dK", "V");
+ assertEquals("V", conf.get("dK"));
+ assertEquals("V", conf.get("nK1"));
+ assertEquals("V", conf.get("nK2"));
+ conf.set("nK1", "VV");
+ assertEquals("VV", conf.get("dK"));
+ assertEquals("VV", conf.get("nK1"));
+ assertEquals("VV", conf.get("nK2"));
+ conf.set("nK2", "VVV");
+ assertEquals("VVV", conf.get("dK"));
+ assertEquals("VVV", conf.get("nK2"));
+ assertEquals("VVV", conf.get("nK1"));
+ boolean kFound = false;
+ boolean dKFound = false;
+ boolean nK1Found = false;
+ boolean nK2Found = false;
+ for (Map.Entry<String, String> entry : conf) {
+ if (entry.getKey().equals("k")) {
+ assertEquals("v", entry.getValue());
+ kFound = true;
+ }
+ if (entry.getKey().equals("dK")) {
+ assertEquals("VVV", entry.getValue());
+ dKFound = true;
+ }
+ if (entry.getKey().equals("nK1")) {
+ assertEquals("VVV", entry.getValue());
+ nK1Found = true;
+ }
+ if (entry.getKey().equals("nK2")) {
+ assertEquals("VVV", entry.getValue());
+ nK2Found = true;
+ }
+ }
+ assertTrue("regular Key not found", kFound);
+ assertTrue("deprecated Key not found", dKFound);
+ assertTrue("new Key 1 not found", nK1Found);
+ assertTrue("new Key 2 not found", nK2Found);
+ }
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java Fri May 4 20:22:57 2012
@@ -19,6 +19,7 @@
package org.apache.hadoop.http;
+import org.apache.hadoop.security.authorize.AccessControlList;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
@@ -70,6 +71,12 @@ public class HttpServerFunctionalTest ex
return createServer(TEST, conf);
}
+ public static HttpServer createTestServer(Configuration conf, AccessControlList adminsAcl)
+ throws IOException {
+ prepareTestWebapp();
+ return createServer(TEST, conf, adminsAcl);
+ }
+
/**
* Create but do not start the test webapp server. The test webapp dir is
* prepared/checked in advance.
@@ -132,6 +139,11 @@ public class HttpServerFunctionalTest ex
throws IOException {
return new HttpServer(webapp, "0.0.0.0", 0, true, conf);
}
+
+ public static HttpServer createServer(String webapp, Configuration conf, AccessControlList adminsAcl)
+ throws IOException {
+ return new HttpServer(webapp, "0.0.0.0", 0, true, conf, adminsAcl);
+ }
/**
* Create an HttpServer instance for the given webapp
* @param webapp the webapp to work with
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java Fri May 4 20:22:57 2012
@@ -60,7 +60,6 @@ import org.apache.hadoop.security.author
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
-import org.mockito.Mock;
import org.mockito.Mockito;
import org.mortbay.util.ajax.JSON;
@@ -360,6 +359,8 @@ public class TestHttpServer extends Http
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
true);
+ conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
+ true);
conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY,
DummyFilterInitializer.class.getName());
@@ -468,6 +469,26 @@ public class TestHttpServer extends Http
}
+ @Test
+ public void testRequiresAuthorizationAccess() throws Exception {
+ Configuration conf = new Configuration();
+ ServletContext context = Mockito.mock(ServletContext.class);
+ Mockito.when(context.getAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE)).thenReturn(conf);
+ HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+ HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
+
+ //requires admin access to instrumentation, FALSE by default
+ Assert.assertTrue(HttpServer.isInstrumentationAccessAllowed(context, request, response));
+
+ //requires admin access to instrumentation, TRUE
+ conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN, true);
+ conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
+ AccessControlList acls = Mockito.mock(AccessControlList.class);
+ Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
+ Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls);
+ Assert.assertFalse(HttpServer.isInstrumentationAccessAllowed(context, request, response));
+ }
+
@Test public void testBindAddress() throws Exception {
checkBindAddress("0.0.0.0", 0, false).stop();
// hang onto this one for a bit more testing
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java Fri May 4 20:22:57 2012
@@ -44,6 +44,26 @@ public class TestWritableUtils extends T
assertEquals(vintlen, WritableUtils.getVIntSize(val));
assertEquals(vintlen, WritableUtils.decodeVIntSize(buf.getData()[0]));
}
+
+ public static void testReadInRange(long val, int lower,
+ int upper, boolean expectSuccess) throws IOException {
+ DataOutputBuffer buf = new DataOutputBuffer();
+ DataInputBuffer inbuf = new DataInputBuffer();
+ WritableUtils.writeVLong(buf, val);
+ try {
+ inbuf.reset(buf.getData(), 0, buf.getLength());
+ long val2 = WritableUtils.readVIntInRange(inbuf, lower, upper);
+ if (!expectSuccess) {
+ fail("expected readVIntInRange to throw an exception");
+ }
+ assertEquals(val, val2);
+ } catch(IOException e) {
+ if (expectSuccess) {
+ LOG.error("unexpected exception:", e);
+ fail("readVIntInRange threw an unexpected exception");
+ }
+ }
+ }
public static void testVInt() throws Exception {
testValue(12, 1);
@@ -61,5 +81,10 @@ public class TestWritableUtils extends T
testValue(-65536, 3);
testValue(65536, 4);
testValue(-65537, 4);
+ testReadInRange(123, 122, 123, true);
+ testReadInRange(123, 0, 100, false);
+ testReadInRange(0, 0, 100, true);
+ testReadInRange(-1, 0, 100, false);
+ testReadInRange(1099511627776L, 0, Integer.MAX_VALUE, false);
}
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java Fri May 4 20:22:57 2012
@@ -25,11 +25,14 @@ import java.net.ConnectException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.NetworkInterface;
+import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
+import java.net.SocketTimeoutException;
import java.net.URI;
import java.net.UnknownHostException;
import java.util.Enumeration;
+import java.util.concurrent.TimeUnit;
import junit.framework.AssertionFailedError;
@@ -37,7 +40,11 @@ import org.apache.commons.lang.StringUti
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.NetUtilsTestResolver;
+import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
+import org.apache.hadoop.test.MultithreadedTestUtil.TestingThread;
+import org.junit.Assume;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -51,6 +58,13 @@ public class TestNetUtils {
private static final String LOCAL_PORT_NAME = Integer.toString(LOCAL_PORT);
/**
+ * Some slop around expected times when making sure timeouts behave
+ * as expected. We assume that they will be accurate to within
+ * this threshold.
+ */
+ static final long TIME_FUDGE_MILLIS = 200;
+
+ /**
* Test that we can't accidentally connect back to the connecting socket due
* to a quirk in the TCP spec.
*
@@ -81,6 +95,79 @@ public class TestNetUtils {
}
}
+ @Test
+ public void testSocketReadTimeoutWithChannel() throws Exception {
+ doSocketReadTimeoutTest(true);
+ }
+
+ @Test
+ public void testSocketReadTimeoutWithoutChannel() throws Exception {
+ doSocketReadTimeoutTest(false);
+ }
+
+
+ private void doSocketReadTimeoutTest(boolean withChannel)
+ throws IOException {
+ // Binding a ServerSocket is enough to accept connections.
+ // Rely on the backlog to accept for us.
+ ServerSocket ss = new ServerSocket(0);
+
+ Socket s;
+ if (withChannel) {
+ s = NetUtils.getDefaultSocketFactory(new Configuration())
+ .createSocket();
+ Assume.assumeNotNull(s.getChannel());
+ } else {
+ s = new Socket();
+ assertNull(s.getChannel());
+ }
+
+ SocketInputWrapper stm = null;
+ try {
+ NetUtils.connect(s, ss.getLocalSocketAddress(), 1000);
+
+ stm = NetUtils.getInputStream(s, 1000);
+ assertReadTimeout(stm, 1000);
+
+ // Change timeout, make sure it applies.
+ stm.setTimeout(1);
+ assertReadTimeout(stm, 1);
+
+ // If there is a channel, then setting the socket timeout
+ // should not matter. If there is not a channel, it will
+ // take effect.
+ s.setSoTimeout(1000);
+ if (withChannel) {
+ assertReadTimeout(stm, 1);
+ } else {
+ assertReadTimeout(stm, 1000);
+ }
+ } finally {
+ IOUtils.closeStream(stm);
+ IOUtils.closeSocket(s);
+ ss.close();
+ }
+ }
+
+ private void assertReadTimeout(SocketInputWrapper stm, int timeoutMillis)
+ throws IOException {
+ long st = System.nanoTime();
+ try {
+ stm.read();
+ fail("Didn't time out");
+ } catch (SocketTimeoutException ste) {
+ assertTimeSince(st, timeoutMillis);
+ }
+ }
+
+ private void assertTimeSince(long startNanos, int expectedMillis) {
+ long durationNano = System.nanoTime() - startNanos;
+ long millis = TimeUnit.MILLISECONDS.convert(
+ durationNano, TimeUnit.NANOSECONDS);
+ assertTrue("Expected " + expectedMillis + "ms, but took " + millis,
+ Math.abs(millis - expectedMillis) < TIME_FUDGE_MILLIS);
+ }
+
/**
* Test for {
* @throws UnknownHostException @link NetUtils#getLocalInetAddress(String)
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java Fri May 4 20:22:57 2012
@@ -19,6 +19,7 @@ package org.apache.hadoop.net;
import java.io.IOException;
import java.io.InputStream;
+import java.io.InterruptedIOException;
import java.io.OutputStream;
import java.net.SocketTimeoutException;
import java.nio.channels.Pipe;
@@ -26,8 +27,13 @@ import java.util.Arrays;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.MultithreadedTestUtil;
+import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
+import org.apache.hadoop.test.MultithreadedTestUtil.TestingThread;
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
/**
* This tests timout out from SocketInputStream and
@@ -36,14 +42,17 @@ import junit.framework.TestCase;
* Normal read and write using these streams are tested by pretty much
* every DFS unit test.
*/
-public class TestSocketIOWithTimeout extends TestCase {
+public class TestSocketIOWithTimeout {
static Log LOG = LogFactory.getLog(TestSocketIOWithTimeout.class);
private static int TIMEOUT = 1*1000;
private static String TEST_STRING = "1234567890";
+
+ private MultithreadedTestUtil.TestContext ctx = new TestContext();
- private void doIO(InputStream in, OutputStream out) throws IOException {
+ private void doIO(InputStream in, OutputStream out,
+ int expectedTimeout) throws IOException {
/* Keep on writing or reading until we get SocketTimeoutException.
* It expects this exception to occur within 100 millis of TIMEOUT.
*/
@@ -61,34 +70,15 @@ public class TestSocketIOWithTimeout ext
long diff = System.currentTimeMillis() - start;
LOG.info("Got SocketTimeoutException as expected after " +
diff + " millis : " + e.getMessage());
- assertTrue(Math.abs(TIMEOUT - diff) <= 200);
+ assertTrue(Math.abs(expectedTimeout - diff) <=
+ TestNetUtils.TIME_FUDGE_MILLIS);
break;
}
}
}
- /**
- * Just reads one byte from the input stream.
- */
- static class ReadRunnable implements Runnable {
- private InputStream in;
-
- public ReadRunnable(InputStream in) {
- this.in = in;
- }
- public void run() {
- try {
- in.read();
- } catch (IOException e) {
- LOG.info("Got expection while reading as expected : " +
- e.getMessage());
- return;
- }
- assertTrue(false);
- }
- }
-
- public void testSocketIOWithTimeout() throws IOException {
+ @Test
+ public void testSocketIOWithTimeout() throws Exception {
// first open pipe:
Pipe pipe = Pipe.open();
@@ -96,7 +86,7 @@ public class TestSocketIOWithTimeout ext
Pipe.SinkChannel sink = pipe.sink();
try {
- InputStream in = new SocketInputStream(source, TIMEOUT);
+ final InputStream in = new SocketInputStream(source, TIMEOUT);
OutputStream out = new SocketOutputStream(sink, TIMEOUT);
byte[] writeBytes = TEST_STRING.getBytes();
@@ -105,37 +95,62 @@ public class TestSocketIOWithTimeout ext
out.write(writeBytes);
out.write(byteWithHighBit);
- doIO(null, out);
+ doIO(null, out, TIMEOUT);
in.read(readBytes);
assertTrue(Arrays.equals(writeBytes, readBytes));
assertEquals(byteWithHighBit & 0xff, in.read());
- doIO(in, null);
+ doIO(in, null, TIMEOUT);
+
+ // Change timeout on the read side.
+ ((SocketInputStream)in).setTimeout(TIMEOUT * 2);
+ doIO(in, null, TIMEOUT * 2);
+
/*
* Verify that it handles interrupted threads properly.
- * Use a large timeout and expect the thread to return quickly.
+ * Use a large timeout and expect the thread to return quickly
+ * upon interruption.
*/
- in = new SocketInputStream(source, 0);
- Thread thread = new Thread(new ReadRunnable(in));
- thread.start();
-
- try {
- Thread.sleep(1000);
- } catch (InterruptedException ignored) {}
-
+ ((SocketInputStream)in).setTimeout(0);
+ TestingThread thread = new TestingThread(ctx) {
+ @Override
+ public void doWork() throws Exception {
+ try {
+ in.read();
+ fail("Did not fail with interrupt");
+ } catch (InterruptedIOException ste) {
+ LOG.info("Got expection while reading as expected : " +
+ ste.getMessage());
+ }
+ }
+ };
+ ctx.addThread(thread);
+ ctx.startThreads();
+ // If the thread is interrupted before it calls read()
+ // then it throws ClosedByInterruptException due to
+ // some Java quirk. Waiting for it to call read()
+ // gets it into select(), so we get the expected
+ // InterruptedIOException.
+ Thread.sleep(1000);
thread.interrupt();
+ ctx.stop();
+
+ //make sure the channels are still open
+ assertTrue(source.isOpen());
+ assertTrue(sink.isOpen());
+ // Nevertheless, the output stream is closed, because
+ // a partial write may have succeeded (see comment in
+ // SocketOutputStream#write(byte[]), int, int)
try {
- thread.join();
- } catch (InterruptedException e) {
- throw new IOException("Unexpected InterruptedException : " + e);
+ out.write(1);
+ fail("Did not throw");
+ } catch (IOException ioe) {
+ GenericTestUtils.assertExceptionContains(
+ "stream is closed", ioe);
}
- //make sure the channels are still open
- assertTrue(source.isOpen());
- assertTrue(sink.isOpen());
-
out.close();
assertFalse(sink.isOpen());