You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2012/05/08 23:58:04 UTC
svn commit: r1335791 - in
/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common: ./
src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/conf/
src/main/java/org/apache/hadoop/fs/
src/main/java/org/apache/hadoop/fs/viewfs/ src/m...
Author: szetszwo
Date: Tue May 8 21:57:58 2012
New Revision: 1335791
URL: http://svn.apache.org/viewvc?rev=1335791&view=rev
Log:
Merge r1334158 through r1335790 from trunk.
Removed:
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Krb5AndCertsSslSocketConnector.java
Modified:
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed)
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/ (props changed)
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/core/ (props changed)
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java
hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt Tue May 8 21:57:58 2012
@@ -326,8 +326,6 @@ Release 2.0.0 - UNRELEASED
HADOOP-8104. Inconsistent Jackson versions (tucu)
- HADOOP-7940. The Text.clear() method does not clear the bytes as intended. (Csaba Miklos via harsh)
-
HADOOP-8119. Fix javac warnings in TestAuthenticationFilter in hadoop-auth.
(szetszwo)
@@ -423,6 +421,14 @@ Release 2.0.0 - UNRELEASED
HADOOP-8355. SPNEGO filter throws/logs exception when authentication fails (tucu)
+ HADOOP-8349. ViewFS doesn't work when the root of a file system is mounted. (atm)
+
+ HADOOP-8328. Duplicate FileSystem Statistics object for 'file' scheme.
+ (tomwhite)
+
+ HADOOP-8359. Fix javadoc warnings in Configuration. (Anupam Seth via
+ szetszwo)
+
BREAKDOWN OF HADOOP-7454 SUBTASKS
HADOOP-7455. HA: Introduce HA Service Protocol Interface. (suresh)
@@ -536,6 +542,11 @@ Release 0.23.3 - UNRELEASED
HADOOP-8335. Improve Configuration's address handling (Daryn Sharp via
bobby)
+ HADOOP-8327. distcpv2 and distcpv1 jars should not coexist (Dave Thompson
+ via bobby)
+
+ HADOOP-8341. Fix or filter findbugs issues in hadoop-tools (bobby)
+
Release 0.23.2 - UNRELEASED
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1334158-1335790
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1334158-1335790
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1334158-1335790
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Tue May 8 21:57:58 2012
@@ -278,7 +278,7 @@ public class Configuration implements It
* @param key
* @param newKeys
* @param customMessage
- * @deprecated use {@link addDeprecation(String key, String newKey,
+ * @deprecated use {@link #addDeprecation(String key, String newKey,
String customMessage)} instead
*/
@Deprecated
@@ -328,7 +328,7 @@ public class Configuration implements It
*
* @param key Key that is to be deprecated
* @param newKeys list of keys that take up the values of deprecated key
- * @deprecated use {@link addDeprecation(String key, String newKey)} instead
+ * @deprecated use {@link #addDeprecation(String key, String newKey)} instead
*/
@Deprecated
public synchronized static void addDeprecation(String key, String[] newKeys) {
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java Tue May 8 21:57:58 2012
@@ -346,7 +346,7 @@ public abstract class AbstractFileSystem
path);
} else {
throw new InvalidPathException(
- "Path without scheme with non-null autorhrity:" + path);
+ "Path without scheme with non-null authority:" + path);
}
}
String thisScheme = this.getUri().getScheme();
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java Tue May 8 21:57:58 2012
@@ -53,7 +53,7 @@ import org.apache.hadoop.util.Progressab
public class FilterFileSystem extends FileSystem {
protected FileSystem fs;
- private String swapScheme;
+ protected String swapScheme;
/*
* so that extending classes can define it
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java Tue May 8 21:57:58 2012
@@ -39,6 +39,17 @@ public class LocalFileSystem extends Che
public LocalFileSystem() {
this(new RawLocalFileSystem());
}
+
+ @Override
+ public void initialize(URI name, Configuration conf) throws IOException {
+ if (fs.getConf() == null) {
+ fs.initialize(name, conf);
+ }
+ String scheme = name.getScheme();
+ if (!scheme.equals(fs.getUri().getScheme())) {
+ swapScheme = scheme;
+ }
+ }
/**
* Return the protocol scheme for the FileSystem.
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java Tue May 8 21:57:58 2012
@@ -223,6 +223,13 @@ public class Path implements Comparable
return isUriPathAbsolute();
}
+ /**
+ * @return true if and only if this path represents the root of a file system
+ */
+ public boolean isRoot() {
+ return getParent() == null;
+ }
+
/** Returns the final component of this path.*/
public String getName() {
String path = uri.getPath();
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java Tue May 8 21:57:58 2012
@@ -75,7 +75,8 @@ class ChRootedFileSystem extends FilterF
protected Path fullPath(final Path path) {
super.checkPath(path);
return path.isAbsolute() ?
- new Path(chRootPathPartString + path.toUri().getPath()) :
+ new Path((chRootPathPart.isRoot() ? "" : chRootPathPartString)
+ + path.toUri().getPath()) :
new Path(chRootPathPartString + workingDir.toUri().getPath(), path);
}
@@ -127,7 +128,7 @@ class ChRootedFileSystem extends FilterF
}
String pathPart = p.toUri().getPath();
return (pathPart.length() == chRootPathPartString.length()) ? "" : pathPart
- .substring(chRootPathPartString.length() + 1);
+ .substring(chRootPathPartString.length() + (chRootPathPart.isRoot() ? 0 : 1));
}
@Override
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java Tue May 8 21:57:58 2012
@@ -79,7 +79,8 @@ class ChRootedFs extends AbstractFileSys
*/
protected Path fullPath(final Path path) {
super.checkPath(path);
- return new Path(chRootPathPartString + path.toUri().getPath());
+ return new Path((chRootPathPart.isRoot() ? "" : chRootPathPartString)
+ + path.toUri().getPath());
}
public ChRootedFs(final AbstractFileSystem fs, final Path theRoot)
@@ -127,7 +128,8 @@ class ChRootedFs extends AbstractFileSys
}
String pathPart = p.toUri().getPath();
return (pathPart.length() == chRootPathPartString.length()) ?
- "" : pathPart.substring(chRootPathPartString.length() + 1);
+ "" : pathPart.substring(chRootPathPartString.length() +
+ (chRootPathPart.isRoot() ? 0 : 1));
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java Tue May 8 21:57:58 2012
@@ -52,8 +52,6 @@ import org.apache.hadoop.fs.CommonConfig
import org.apache.hadoop.jmx.JMXJsonServlet;
import org.apache.hadoop.log.LogLevel;
import org.apache.hadoop.metrics.MetricsServlet;
-import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector;
-import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector.MODE;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.ReflectionUtils;
@@ -99,6 +97,7 @@ public class HttpServer implements Filte
// gets stored.
public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
static final String ADMINS_ACL = "admins.acl";
+ public static final String SPNEGO_FILTER = "SpnegoFilter";
public static final String BIND_ADDRESS = "bind.address";
@@ -237,11 +236,7 @@ public class HttpServer implements Filte
webServer.addHandler(webAppContext);
addDefaultApps(contexts, appDir, conf);
-
- defineFilter(webAppContext, "krb5Filter",
- Krb5AndCertsSslSocketConnector.Krb5SslFilter.class.getName(),
- null, null);
-
+
addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
final FilterInitializer[] initializers = getFilterInitializers(conf);
if (initializers != null) {
@@ -424,12 +419,13 @@ public class HttpServer implements Filte
* protect with Kerberos authentication.
* Note: This method is to be used for adding servlets that facilitate
* internal communication and not for user facing functionality. For
- * servlets added using this method, filters (except internal Kerberized
+ + * servlets added using this method, filters (except internal Kerberos
* filters) are not enabled.
*
* @param name The name of the servlet (can be passed as null)
* @param pathSpec The path spec for the servlet
* @param clazz The servlet class
+ * @param requireAuth Require Kerberos authenticate to access servlet
*/
public void addInternalServlet(String name, String pathSpec,
Class<? extends HttpServlet> clazz, boolean requireAuth) {
@@ -440,11 +436,11 @@ public class HttpServer implements Filte
webAppContext.addServlet(holder, pathSpec);
if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
- LOG.info("Adding Kerberos filter to " + name);
+ LOG.info("Adding Kerberos (SPNEGO) filter to " + name);
ServletHandler handler = webAppContext.getServletHandler();
FilterMapping fmap = new FilterMapping();
fmap.setPathSpec(pathSpec);
- fmap.setFilterName("krb5Filter");
+ fmap.setFilterName(SPNEGO_FILTER);
fmap.setDispatches(Handler.ALL);
handler.addFilterMapping(fmap);
}
@@ -584,22 +580,10 @@ public class HttpServer implements Filte
* Configure an ssl listener on the server.
* @param addr address to listen on
* @param sslConf conf to retrieve ssl options
- * @param needClientAuth whether client authentication is required
- */
- public void addSslListener(InetSocketAddress addr, Configuration sslConf,
- boolean needClientAuth) throws IOException {
- addSslListener(addr, sslConf, needClientAuth, false);
- }
-
- /**
- * Configure an ssl listener on the server.
- * @param addr address to listen on
- * @param sslConf conf to retrieve ssl options
* @param needCertsAuth whether x509 certificate authentication is required
- * @param needKrbAuth whether to allow kerberos auth
*/
public void addSslListener(InetSocketAddress addr, Configuration sslConf,
- boolean needCertsAuth, boolean needKrbAuth) throws IOException {
+ boolean needCertsAuth) throws IOException {
if (webServer.isStarted()) {
throw new IOException("Failed to add ssl listener");
}
@@ -612,15 +596,7 @@ public class HttpServer implements Filte
System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
"ssl.server.truststore.type", "jks"));
}
- Krb5AndCertsSslSocketConnector.MODE mode;
- if(needCertsAuth && needKrbAuth)
- mode = MODE.BOTH;
- else if (!needCertsAuth && needKrbAuth)
- mode = MODE.KRB;
- else // Default to certificates
- mode = MODE.CERTS;
-
- SslSocketConnector sslListener = new Krb5AndCertsSslSocketConnector(mode);
+ SslSocketConnector sslListener = new SslSocketConnector();
sslListener.setHost(addr.getHostName());
sslListener.setPort(addr.getPort());
sslListener.setKeystore(sslConf.get("ssl.server.keystore.location"));
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java Tue May 8 21:57:58 2012
@@ -239,7 +239,6 @@ public class Text extends BinaryComparab
*/
public void clear() {
length = 0;
- bytes = EMPTY_BYTES;
}
/*
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java Tue May 8 21:57:58 2012
@@ -17,14 +17,11 @@
package org.apache.hadoop.security;
import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.Field;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URL;
+import java.net.URLConnection;
import java.net.UnknownHostException;
import java.security.AccessController;
import java.security.PrivilegedAction;
@@ -45,6 +42,8 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenInfo;
@@ -135,79 +134,6 @@ public class SecurityUtil {
}
/**
- * Explicitly pull the service ticket for the specified host. This solves a
- * problem with Java's Kerberos SSL problem where the client cannot
- * authenticate against a cross-realm service. It is necessary for clients
- * making kerberized https requests to call this method on the target URL
- * to ensure that in a cross-realm environment the remote host will be
- * successfully authenticated.
- *
- * This method is internal to Hadoop and should not be used by other
- * applications. This method should not be considered stable or open:
- * it will be removed when the Java behavior is changed.
- *
- * @param remoteHost Target URL the krb-https client will access
- * @throws IOException if the service ticket cannot be retrieved
- */
- public static void fetchServiceTicket(URL remoteHost) throws IOException {
- if(!UserGroupInformation.isSecurityEnabled())
- return;
-
- String serviceName = "host/" + remoteHost.getHost();
- if (LOG.isDebugEnabled())
- LOG.debug("Fetching service ticket for host at: " + serviceName);
- Object serviceCred = null;
- Method credsToTicketMeth;
- Class<?> krb5utilClass;
- try {
- Class<?> principalClass;
- Class<?> credentialsClass;
-
- if (System.getProperty("java.vendor").contains("IBM")) {
- principalClass = Class.forName("com.ibm.security.krb5.PrincipalName");
-
- credentialsClass = Class.forName("com.ibm.security.krb5.Credentials");
- krb5utilClass = Class.forName("com.ibm.security.jgss.mech.krb5");
- } else {
- principalClass = Class.forName("sun.security.krb5.PrincipalName");
- credentialsClass = Class.forName("sun.security.krb5.Credentials");
- krb5utilClass = Class.forName("sun.security.jgss.krb5.Krb5Util");
- }
- @SuppressWarnings("rawtypes")
- Constructor principalConstructor = principalClass.getConstructor(String.class,
- int.class);
- Field KRB_NT_SRV_HST = principalClass.getDeclaredField("KRB_NT_SRV_HST");
- Method acquireServiceCredsMeth =
- credentialsClass.getDeclaredMethod("acquireServiceCreds",
- String.class, credentialsClass);
- Method ticketToCredsMeth = krb5utilClass.getDeclaredMethod("ticketToCreds",
- KerberosTicket.class);
- credsToTicketMeth = krb5utilClass.getDeclaredMethod("credsToTicket",
- credentialsClass);
-
- Object principal = principalConstructor.newInstance(serviceName,
- KRB_NT_SRV_HST.get(principalClass));
-
- serviceCred = acquireServiceCredsMeth.invoke(credentialsClass,
- principal.toString(),
- ticketToCredsMeth.invoke(krb5utilClass, getTgtFromSubject()));
- } catch (Exception e) {
- throw new IOException("Can't get service ticket for: "
- + serviceName, e);
- }
- if (serviceCred == null) {
- throw new IOException("Can't get service ticket for " + serviceName);
- }
- try {
- Subject.getSubject(AccessController.getContext()).getPrivateCredentials()
- .add(credsToTicketMeth.invoke(krb5utilClass, serviceCred));
- } catch (Exception e) {
- throw new IOException("Can't get service ticket for: "
- + serviceName, e);
- }
- }
-
- /**
* Convert Kerberos principal name pattern to valid Kerberos principal
* names. It replaces hostname pattern with hostname, which should be
* fully-qualified domain name. If hostname is null or "0.0.0.0", it uses
@@ -514,6 +440,30 @@ public class SecurityUtil {
}
/**
+ * Open a (if need be) secure connection to a URL in a secure environment
+ * that is using SPNEGO to authenticate its URLs. All Namenode and Secondary
+ * Namenode URLs that are protected via SPNEGO should be accessed via this
+ * method.
+ *
+ * @param url to authenticate via SPNEGO.
+ * @return A connection that has been authenticated via SPNEGO
+ * @throws IOException If unable to authenticate via SPNEGO
+ */
+ public static URLConnection openSecureHttpConnection(URL url) throws IOException {
+ if(!UserGroupInformation.isSecurityEnabled()) {
+ return url.openConnection();
+ }
+
+ AuthenticatedURL.Token token = new AuthenticatedURL.Token();
+ try {
+ return new AuthenticatedURL().openConnection(url, token);
+ } catch (AuthenticationException e) {
+ throw new IOException("Exception trying to open authenticated connection to "
+ + url, e);
+ }
+ }
+
+ /**
* Resolves a host subject to the security requirements determined by
* hadoop.security.token.service.use_ip.
*
@@ -664,10 +614,4 @@ public class SecurityUtil {
}
}
- public static void initKrb5CipherSuites() {
- if (UserGroupInformation.isSecurityEnabled()) {
- System.setProperty("https.cipherSuites",
- Krb5AndCertsSslSocketConnector.KRB5_CIPHER_SUITES.get(0));
- }
- }
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml Tue May 8 21:57:58 2012
@@ -129,13 +129,6 @@
</property>
<property>
- <name>dfs.secondary.https.port</name>
- <value>50490</value>
- <description>The https port where secondary-namenode binds</description>
-
- </property>
-
- <property>
<name>dfs.datanode.kerberos.principal</name>
<value>dn/_HOST@${local.realm}</value>
<description>
Propchange: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1334158-1335790
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java Tue May 8 21:57:58 2012
@@ -18,11 +18,14 @@
package org.apache.hadoop.fs;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem.Statistics;
+
import static org.apache.hadoop.fs.FileSystemTestHelper.*;
import java.io.*;
import static org.junit.Assert.*;
+
import org.junit.Before;
import org.junit.Test;
@@ -233,4 +236,16 @@ public class TestLocalFileSystem {
assertTrue("Did not delete file", fs.delete(file1));
assertTrue("Did not delete non-empty dir", fs.delete(dir1));
}
+
+ @Test
+ public void testStatistics() throws Exception {
+ FileSystem.getLocal(new Configuration());
+ int fileSchemeCount = 0;
+ for (Statistics stats : FileSystem.getAllStatistics()) {
+ if (stats.getScheme().equals("file")) {
+ fileSchemeCount++;
+ }
+ }
+ assertEquals(1, fileSchemeCount);
+ }
}
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java Tue May 8 21:57:58 2012
@@ -71,11 +71,8 @@ public class ViewFileSystemBaseTest {
@Before
public void setUp() throws Exception {
- targetTestRoot = FileSystemTestHelper.getAbsoluteTestRootPath(fsTarget);
- // In case previous test was killed before cleanup
- fsTarget.delete(targetTestRoot, true);
+ initializeTargetTestRoot();
- fsTarget.mkdirs(targetTestRoot);
// Make user and data dirs - we creates links to them in the mount table
fsTarget.mkdirs(new Path(targetTestRoot,"user"));
fsTarget.mkdirs(new Path(targetTestRoot,"data"));
@@ -99,7 +96,16 @@ public class ViewFileSystemBaseTest {
fsTarget.delete(FileSystemTestHelper.getTestRootPath(fsTarget), true);
}
+ void initializeTargetTestRoot() throws IOException {
+ targetTestRoot = FileSystemTestHelper.getAbsoluteTestRootPath(fsTarget);
+ // In case previous test was killed before cleanup
+ fsTarget.delete(targetTestRoot, true);
+
+ fsTarget.mkdirs(targetTestRoot);
+ }
+
void setupMountPoints() {
+ ConfigUtil.addLink(conf, "/targetRoot", targetTestRoot.toUri());
ConfigUtil.addLink(conf, "/user", new Path(targetTestRoot,"user").toUri());
ConfigUtil.addLink(conf, "/user2", new Path(targetTestRoot,"user").toUri());
ConfigUtil.addLink(conf, "/data", new Path(targetTestRoot,"data").toUri());
@@ -121,7 +127,7 @@ public class ViewFileSystemBaseTest {
}
int getExpectedMountPoints() {
- return 7;
+ return 8;
}
/**
@@ -166,7 +172,7 @@ public class ViewFileSystemBaseTest {
}
}
}
- Assert.assertEquals(expectedTokenCount / 2, delTokens.size());
+ Assert.assertEquals((expectedTokenCount + 1) / 2, delTokens.size());
}
int getExpectedDelegationTokenCountWithCredentials() {
@@ -309,6 +315,16 @@ public class ViewFileSystemBaseTest {
Assert.assertTrue("Renamed dest should exist as dir in target",
fsTarget.isDirectory(new Path(targetTestRoot,"user/dirFooBar")));
+ // Make a directory under a directory that's mounted from the root of another FS
+ fsView.mkdirs(new Path("/targetRoot/dirFoo"));
+ Assert.assertTrue(fsView.exists(new Path("/targetRoot/dirFoo")));
+ boolean dirFooPresent = false;
+ for (FileStatus fileStatus : fsView.listStatus(new Path("/targetRoot/"))) {
+ if (fileStatus.getPath().getName().equals("dirFoo")) {
+ dirFooPresent = true;
+ }
+ }
+ Assert.assertTrue(dirFooPresent);
}
// rename across mount points that point to same target also fail
@@ -418,7 +434,7 @@ public class ViewFileSystemBaseTest {
}
int getExpectedDirPaths() {
- return 6;
+ return 7;
}
@Test
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java Tue May 8 21:57:58 2012
@@ -33,6 +33,7 @@ import org.apache.hadoop.fs.AbstractFile
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileContextTestHelper;
+import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.FileContextTestHelper.fileType;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FsConstants;
@@ -77,12 +78,8 @@ public class ViewFsBaseTest {
@Before
public void setUp() throws Exception {
-
- targetTestRoot = FileContextTestHelper.getAbsoluteTestRootPath(fcTarget);
- // In case previous test was killed before cleanup
- fcTarget.delete(targetTestRoot, true);
+ initializeTargetTestRoot();
- fcTarget.mkdir(targetTestRoot, FileContext.DEFAULT_PERM, true);
// Make user and data dirs - we creates links to them in the mount table
fcTarget.mkdir(new Path(targetTestRoot,"user"),
FileContext.DEFAULT_PERM, true);
@@ -100,6 +97,7 @@ public class ViewFsBaseTest {
// Set up the defaultMT in the config with our mount point links
conf = new Configuration();
+ ConfigUtil.addLink(conf, "/targetRoot", targetTestRoot.toUri());
ConfigUtil.addLink(conf, "/user",
new Path(targetTestRoot,"user").toUri());
ConfigUtil.addLink(conf, "/user2",
@@ -118,6 +116,14 @@ public class ViewFsBaseTest {
fcView = FileContext.getFileContext(FsConstants.VIEWFS_URI, conf);
// Also try viewfs://default/ - note authority is name of mount table
}
+
+ void initializeTargetTestRoot() throws IOException {
+ targetTestRoot = FileContextTestHelper.getAbsoluteTestRootPath(fcTarget);
+ // In case previous test was killed before cleanup
+ fcTarget.delete(targetTestRoot, true);
+
+ fcTarget.mkdir(targetTestRoot, FileContext.DEFAULT_PERM, true);
+ }
@After
public void tearDown() throws Exception {
@@ -128,7 +134,11 @@ public class ViewFsBaseTest {
public void testGetMountPoints() {
ViewFs viewfs = (ViewFs) fcView.getDefaultFileSystem();
MountPoint[] mountPoints = viewfs.getMountPoints();
- Assert.assertEquals(7, mountPoints.length);
+ Assert.assertEquals(8, mountPoints.length);
+ }
+
+ int getExpectedDelegationTokenCount() {
+ return 0;
}
/**
@@ -140,7 +150,7 @@ public class ViewFsBaseTest {
public void testGetDelegationTokens() throws IOException {
List<Token<?>> delTokens =
fcView.getDelegationTokens(new Path("/"), "sanjay");
- Assert.assertEquals(0, delTokens.size());
+ Assert.assertEquals(getExpectedDelegationTokenCount(), delTokens.size());
}
@@ -281,6 +291,19 @@ public class ViewFsBaseTest {
Assert.assertTrue("Renamed dest should exist as dir in target",
isDir(fcTarget,new Path(targetTestRoot,"user/dirFooBar")));
+ // Make a directory under a directory that's mounted from the root of another FS
+ fcView.mkdir(new Path("/targetRoot/dirFoo"), FileContext.DEFAULT_PERM, false);
+ Assert.assertTrue(exists(fcView, new Path("/targetRoot/dirFoo")));
+ boolean dirFooPresent = false;
+ RemoteIterator<FileStatus> dirContents = fcView.listStatus(new Path(
+ "/targetRoot/"));
+ while (dirContents.hasNext()) {
+ FileStatus fileStatus = dirContents.next();
+ if (fileStatus.getPath().getName().equals("dirFoo")) {
+ dirFooPresent = true;
+ }
+ }
+ Assert.assertTrue(dirFooPresent);
}
// rename across mount points that point to same target also fail
@@ -358,7 +381,7 @@ public class ViewFsBaseTest {
FileStatus[] dirPaths = fcView.util().listStatus(new Path("/"));
FileStatus fs;
- Assert.assertEquals(6, dirPaths.length);
+ Assert.assertEquals(7, dirPaths.length);
fs = FileContextTestHelper.containsPath(fcView, "/user", dirPaths);
Assert.assertNotNull(fs);
Assert.assertTrue("A mount should appear as symlink", fs.isSymlink());
Modified: hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java Tue May 8 21:57:58 2012
@@ -192,16 +192,6 @@ public class TestText extends TestCase {
assertTrue(text.find("\u20ac", 5)==11);
}
- public void testClear() {
- Text text = new Text();
- assertEquals("", text.toString());
- assertEquals(0, text.getBytes().length);
- text = new Text("abcd\u20acbdcd\u20ac");
- text.clear();
- assertEquals("", text.toString());
- assertEquals(0, text.getBytes().length);
- }
-
public void testFindAfterUpdatingContents() throws Exception {
Text text = new Text("abcd");
text.set("a".getBytes());