You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by tu...@apache.org on 2012/07/26 15:39:06 UTC
svn commit: r1365988 [1/3] - in /hadoop/common/trunk/hadoop-hdfs-project:
hadoop-hdfs-httpfs/ hadoop-hdfs-httpfs/dev-support/
hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/
hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/...
Author: tucu
Date: Thu Jul 26 13:39:05 2012
New Revision: 1365988
URL: http://svn.apache.org/viewvc?rev=1365988&view=rev
Log:
HDFS-3113. httpfs does not support delegation tokens. (tucu)
Added:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSPseudoAuthenticator.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandler.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManager.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManagerException.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java.orig
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandlerForTesting.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java.orig
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/service/security/TestDelegationTokenManagerService.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/KerberosTestUtils.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java.orig
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/krb5.conf
Removed:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpKerberosAuthenticator.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpPseudoAuthenticator.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/AuthFilter.java
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/web.xml
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestWebhdfsFileSystem.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml Thu Jul 26 13:39:05 2012
@@ -25,4 +25,9 @@
<Method name="destroy" />
<Bug pattern="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD" />
</Match>
+ <Match>
+ <Class name="org.apache.hadoop.lib.servlet.ServerWebApp" />
+ <Field name="authority" />
+ <Bug pattern="IS2_INCONSISTENT_SYNC" />
+ </Match>
</FindBugsFilter>
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml Thu Jul 26 13:39:05 2012
@@ -43,6 +43,8 @@
<httpfs.tomcat.dist.dir>
${project.build.directory}/${project.artifactId}-${project.version}/share/hadoop/httpfs/tomcat
</httpfs.tomcat.dist.dir>
+ <kerberos.realm>LOCALHOST</kerberos.realm>
+ <test.exclude.kerberos.test>**/TestHttpFSWithKerberos.java</test.exclude.kerberos.test>
</properties>
<dependencies>
@@ -267,6 +269,22 @@
</excludes>
</resource>
</resources>
+ <testResources>
+ <testResource>
+ <directory>${basedir}/src/test/resources</directory>
+ <filtering>false</filtering>
+ <excludes>
+ <exclude>krb5.conf</exclude>
+ </excludes>
+ </testResource>
+ <testResource>
+ <directory>${basedir}/src/test/resources</directory>
+ <filtering>true</filtering>
+ <includes>
+ <include>krb5.conf</include>
+ </includes>
+ </testResource>
+ </testResources>
<plugins>
<plugin>
@@ -281,6 +299,16 @@
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<threadCount>1</threadCount>
+ <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
+ <systemPropertyVariables>
+ <java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
+ <kerberos.realm>${kerberos.realm}</kerberos.realm>
+ </systemPropertyVariables>
+ <excludes>
+ <exclude>**/${test.exclude}.java</exclude>
+ <exclude>${test.exclude.pattern}</exclude>
+ <exclude>${test.exclude.kerberos.test}</exclude>
+ </excludes>
</configuration>
</plugin>
<plugin>
@@ -396,6 +424,36 @@
<profiles>
<profile>
+ <id>testKerberos</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ <properties>
+ <test.exclude.kerberos.test>_</test.exclude.kerberos.test>
+ </properties>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <forkMode>once</forkMode>
+ <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
+ <systemPropertyVariables>
+ <java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
+ <kerberos.realm>${kerberos.realm}</kerberos.realm>
+ <httpfs.http.hostname>localhost</httpfs.http.hostname>
+ </systemPropertyVariables>
+ <includes>
+ <include>**/TestHttpFSWithKerberos.java</include>
+ </includes>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+
+ <profile>
<id>docs</id>
<activation>
<activeByDefault>false</activeByDefault>
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java Thu Jul 26 13:39:05 2012
@@ -19,6 +19,7 @@ package org.apache.hadoop.fs.http.client
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.DelegationTokenRenewer;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileChecksum;
@@ -28,16 +29,18 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PositionedReadable;
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
import org.apache.hadoop.security.authentication.client.Authenticator;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
-import org.json.simple.parser.JSONParser;
-import org.json.simple.parser.ParseException;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
@@ -47,30 +50,32 @@ import java.io.FileNotFoundException;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.io.InputStreamReader;
import java.io.OutputStream;
-import java.lang.reflect.Constructor;
import java.net.HttpURLConnection;
+import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
-import java.net.URLEncoder;
+import java.security.PrivilegedExceptionAction;
import java.text.MessageFormat;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
+import java.util.concurrent.Callable;
/**
* HttpFSServer implementation of the FileSystemAccess FileSystem.
* <p/>
* This implementation allows a user to access HDFS over HTTP via a HttpFSServer server.
*/
-public class HttpFSFileSystem extends FileSystem {
+public class HttpFSFileSystem extends FileSystem
+ implements DelegationTokenRenewer.Renewable {
- public static final String SERVICE_NAME = "/webhdfs";
+ public static final String SERVICE_NAME = HttpFSUtils.SERVICE_NAME;
- public static final String SERVICE_VERSION = "/v1";
+ public static final String SERVICE_VERSION = HttpFSUtils.SERVICE_VERSION;
- public static final String SERVICE_PREFIX = SERVICE_NAME + SERVICE_VERSION;
+ public static final String SCHEME = "webhdfs";
public static final String OP_PARAM = "op";
public static final String DO_AS_PARAM = "doas";
@@ -84,7 +89,6 @@ public class HttpFSFileSystem extends Fi
public static final String GROUP_PARAM = "group";
public static final String MODIFICATION_TIME_PARAM = "modificationtime";
public static final String ACCESS_TIME_PARAM = "accesstime";
- public static final String RENEWER_PARAM = "renewer";
public static final Short DEFAULT_PERMISSION = 0755;
@@ -144,9 +148,6 @@ public class HttpFSFileSystem extends Fi
public static final String CONTENT_SUMMARY_SPACE_CONSUMED_JSON = "spaceConsumed";
public static final String CONTENT_SUMMARY_SPACE_QUOTA_JSON = "spaceQuota";
- public static final String DELEGATION_TOKEN_JSON = "Token";
- public static final String DELEGATION_TOKEN_URL_STRING_JSON = "urlString";
-
public static final String ERROR_JSON = "RemoteException";
public static final String ERROR_EXCEPTION_JSON = "exception";
public static final String ERROR_CLASSNAME_JSON = "javaClassName";
@@ -184,8 +185,31 @@ public class HttpFSFileSystem extends Fi
private AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
private URI uri;
+ private InetSocketAddress httpFSAddr;
private Path workingDir;
+ private UserGroupInformation realUser;
private String doAs;
+ private Token<?> delegationToken;
+
+ //This method enables handling UGI doAs with SPNEGO, we have to
+ //fallback to the realuser who logged in with Kerberos credentials
+ private <T> T doAsRealUserIfNecessary(final Callable<T> callable)
+ throws IOException {
+ try {
+ if (realUser.getShortUserName().equals(doAs)) {
+ return callable.call();
+ } else {
+ return realUser.doAs(new PrivilegedExceptionAction<T>() {
+ @Override
+ public T run() throws Exception {
+ return callable.call();
+ }
+ });
+ }
+ } catch (Exception ex) {
+ throw new IOException(ex.toString(), ex);
+ }
+ }
/**
* Convenience method that creates a <code>HttpURLConnection</code> for the
@@ -204,25 +228,23 @@ public class HttpFSFileSystem extends Fi
*
* @throws IOException thrown if an IO error occurrs.
*/
- private HttpURLConnection getConnection(String method, Map<String, String> params,
- Path path, boolean makeQualified) throws IOException {
- params.put(DO_AS_PARAM, doAs);
+ private HttpURLConnection getConnection(final String method,
+ Map<String, String> params, Path path, boolean makeQualified)
+ throws IOException {
+ if (!realUser.getShortUserName().equals(doAs)) {
+ params.put(DO_AS_PARAM, doAs);
+ }
+ HttpFSKerberosAuthenticator.injectDelegationToken(params, delegationToken);
if (makeQualified) {
path = makeQualified(path);
}
- URI uri = path.toUri();
- StringBuilder sb = new StringBuilder();
- sb.append(uri.getScheme()).append("://").append(uri.getAuthority()).
- append(SERVICE_PREFIX).append(uri.getPath());
-
- String separator = "?";
- for (Map.Entry<String, String> entry : params.entrySet()) {
- sb.append(separator).append(entry.getKey()).append("=").
- append(URLEncoder.encode(entry.getValue(), "UTF8"));
- separator = "&";
- }
- URL url = new URL(sb.toString());
- return getConnection(url, method);
+ final URL url = HttpFSUtils.createHttpURL(path, params);
+ return doAsRealUserIfNecessary(new Callable<HttpURLConnection>() {
+ @Override
+ public HttpURLConnection call() throws Exception {
+ return getConnection(url, method);
+ }
+ });
}
/**
@@ -240,7 +262,8 @@ public class HttpFSFileSystem extends Fi
*/
private HttpURLConnection getConnection(URL url, String method) throws IOException {
Class<? extends Authenticator> klass =
- getConf().getClass("httpfs.authenticator.class", HttpKerberosAuthenticator.class, Authenticator.class);
+ getConf().getClass("httpfs.authenticator.class",
+ HttpFSKerberosAuthenticator.class, Authenticator.class);
Authenticator authenticator = ReflectionUtils.newInstance(klass, getConf());
try {
HttpURLConnection conn = new AuthenticatedURL(authenticator).openConnection(url, authToken);
@@ -255,63 +278,6 @@ public class HttpFSFileSystem extends Fi
}
/**
- * Convenience method that JSON Parses the <code>InputStream</code> of a <code>HttpURLConnection</code>.
- *
- * @param conn the <code>HttpURLConnection</code>.
- *
- * @return the parsed JSON object.
- *
- * @throws IOException thrown if the <code>InputStream</code> could not be JSON parsed.
- */
- private static Object jsonParse(HttpURLConnection conn) throws IOException {
- try {
- JSONParser parser = new JSONParser();
- return parser.parse(new InputStreamReader(conn.getInputStream()));
- } catch (ParseException ex) {
- throw new IOException("JSON parser error, " + ex.getMessage(), ex);
- }
- }
-
- /**
- * Validates the status of an <code>HttpURLConnection</code> against an expected HTTP
- * status code. If the current status code is not the expected one it throws an exception
- * with a detail message using Server side error messages if available.
- *
- * @param conn the <code>HttpURLConnection</code>.
- * @param expected the expected HTTP status code.
- *
- * @throws IOException thrown if the current status code does not match the expected one.
- */
- private static void validateResponse(HttpURLConnection conn, int expected) throws IOException {
- int status = conn.getResponseCode();
- if (status != expected) {
- try {
- JSONObject json = (JSONObject) jsonParse(conn);
- json = (JSONObject) json.get(ERROR_JSON);
- String message = (String) json.get(ERROR_MESSAGE_JSON);
- String exception = (String) json.get(ERROR_EXCEPTION_JSON);
- String className = (String) json.get(ERROR_CLASSNAME_JSON);
-
- try {
- ClassLoader cl = HttpFSFileSystem.class.getClassLoader();
- Class klass = cl.loadClass(className);
- Constructor constr = klass.getConstructor(String.class);
- throw (IOException) constr.newInstance(message);
- } catch (IOException ex) {
- throw ex;
- } catch (Exception ex) {
- throw new IOException(MessageFormat.format("{0} - {1}", exception, message));
- }
- } catch (IOException ex) {
- if (ex.getCause() instanceof IOException) {
- throw (IOException) ex.getCause();
- }
- throw new IOException(MessageFormat.format("HTTP status [{0}], {1}", status, conn.getResponseMessage()));
- }
- }
- }
-
- /**
* Called after a new FileSystem instance is constructed.
*
* @param name a uri whose authority section names the host, port, etc. for this FileSystem
@@ -320,15 +286,28 @@ public class HttpFSFileSystem extends Fi
@Override
public void initialize(URI name, Configuration conf) throws IOException {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
- doAs = ugi.getUserName();
+
+ //the real use is the one that has the Kerberos credentials needed for
+ //SPNEGO to work
+ realUser = ugi.getRealUser();
+ if (realUser == null) {
+ realUser = UserGroupInformation.getLoginUser();
+ }
+ doAs = ugi.getShortUserName();
super.initialize(name, conf);
try {
- uri = new URI(name.getScheme() + "://" + name.getHost() + ":" + name.getPort());
+ uri = new URI(name.getScheme() + "://" + name.getAuthority());
+ httpFSAddr = NetUtils.createSocketAddr(getCanonicalUri().toString());
} catch (URISyntaxException ex) {
throw new IOException(ex);
}
}
+ @Override
+ public String getScheme() {
+ return SCHEME;
+ }
+
/**
* Returns a URI whose scheme and authority identify this FileSystem.
*
@@ -340,6 +319,16 @@ public class HttpFSFileSystem extends Fi
}
/**
+ * Get the default port for this file system.
+ * @return the default port or 0 if there isn't one
+ */
+ @Override
+ protected int getDefaultPort() {
+ return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY,
+ DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT);
+ }
+
+ /**
* HttpFSServer subclass of the <code>FSDataInputStream</code>.
* <p/>
* This implementation does not support the
@@ -397,7 +386,7 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.OPEN.toString());
HttpURLConnection conn = getConnection(Operation.OPEN.getMethod(), params,
f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
return new FSDataInputStream(
new HttpFSDataInputStream(conn.getInputStream(), bufferSize));
}
@@ -424,7 +413,7 @@ public class HttpFSFileSystem extends Fi
try {
super.close();
} finally {
- validateResponse(conn, closeStatus);
+ HttpFSUtils.validateResponse(conn, closeStatus);
}
}
@@ -460,11 +449,11 @@ public class HttpFSFileSystem extends Fi
OutputStream os = new BufferedOutputStream(conn.getOutputStream(), bufferSize);
return new HttpFSDataOutputStream(conn, os, expectedStatus, statistics);
} catch (IOException ex) {
- validateResponse(conn, expectedStatus);
+ HttpFSUtils.validateResponse(conn, expectedStatus);
throw ex;
}
} else {
- validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
+ HttpFSUtils.validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
throw new IOException("Missing HTTP 'Location' header for [" + conn.getURL() + "]");
}
} else {
@@ -476,7 +465,7 @@ public class HttpFSFileSystem extends Fi
if (exceptionAlreadyHandled) {
throw ex;
} else {
- validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
+ HttpFSUtils.validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
throw ex;
}
}
@@ -548,8 +537,8 @@ public class HttpFSFileSystem extends Fi
params.put(DESTINATION_PARAM, dst.toString());
HttpURLConnection conn = getConnection(Operation.RENAME.getMethod(),
params, src, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(RENAME_JSON);
}
@@ -584,8 +573,8 @@ public class HttpFSFileSystem extends Fi
params.put(RECURSIVE_PARAM, Boolean.toString(recursive));
HttpURLConnection conn = getConnection(Operation.DELETE.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(DELETE_JSON);
}
@@ -605,8 +594,8 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.LISTSTATUS.toString());
HttpURLConnection conn = getConnection(Operation.LISTSTATUS.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
json = (JSONObject) json.get(FILE_STATUSES_JSON);
JSONArray jsonArray = (JSONArray) json.get(FILE_STATUS_JSON);
FileStatus[] array = new FileStatus[jsonArray.size()];
@@ -653,8 +642,8 @@ public class HttpFSFileSystem extends Fi
params.put(PERMISSION_PARAM, permissionToString(permission));
HttpURLConnection conn = getConnection(Operation.MKDIRS.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(MKDIRS_JSON);
}
@@ -674,8 +663,8 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.GETFILESTATUS.toString());
HttpURLConnection conn = getConnection(Operation.GETFILESTATUS.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
json = (JSONObject) json.get(FILE_STATUS_JSON);
f = makeQualified(f);
return createFileStatus(f, json);
@@ -693,8 +682,8 @@ public class HttpFSFileSystem extends Fi
HttpURLConnection conn =
getConnection(Operation.GETHOMEDIRECTORY.getMethod(), params,
new Path(getUri().toString(), "/"), false);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return new Path((String) json.get(HOME_DIR_JSON));
} catch (IOException ex) {
throw new RuntimeException(ex);
@@ -718,7 +707,7 @@ public class HttpFSFileSystem extends Fi
params.put(GROUP_PARAM, groupname);
HttpURLConnection conn = getConnection(Operation.SETOWNER.getMethod(),
params, p, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
@@ -733,7 +722,7 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.SETPERMISSION.toString());
params.put(PERMISSION_PARAM, permissionToString(permission));
HttpURLConnection conn = getConnection(Operation.SETPERMISSION.getMethod(), params, p, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
@@ -755,7 +744,7 @@ public class HttpFSFileSystem extends Fi
params.put(ACCESS_TIME_PARAM, Long.toString(atime));
HttpURLConnection conn = getConnection(Operation.SETTIMES.getMethod(),
params, p, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
@@ -777,19 +766,11 @@ public class HttpFSFileSystem extends Fi
params.put(REPLICATION_PARAM, Short.toString(replication));
HttpURLConnection conn =
getConnection(Operation.SETREPLICATION.getMethod(), params, src, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(SET_REPLICATION_JSON);
}
- /**
- * Creates a <code>FileStatus</code> object using a JSON file-status payload
- * received from a HttpFSServer server.
- *
- * @param json a JSON file-status payload received from a HttpFSServer server
- *
- * @return the corresponding <code>FileStatus</code>
- */
private FileStatus createFileStatus(Path parent, JSONObject json) {
String pathSuffix = (String) json.get(PATH_SUFFIX_JSON);
Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix);
@@ -828,9 +809,9 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.GETCONTENTSUMMARY.toString());
HttpURLConnection conn =
getConnection(Operation.GETCONTENTSUMMARY.getMethod(), params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json =
- (JSONObject) ((JSONObject) jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) ((JSONObject)
+ HttpFSUtils.jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
return new ContentSummary((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON),
(Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON),
(Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON),
@@ -846,9 +827,9 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString());
HttpURLConnection conn =
getConnection(Operation.GETFILECHECKSUM.getMethod(), params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- final JSONObject json =
- (JSONObject) ((JSONObject) jsonParse(conn)).get(FILE_CHECKSUM_JSON);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ final JSONObject json = (JSONObject) ((JSONObject)
+ HttpFSUtils.jsonParse(conn)).get(FILE_CHECKSUM_JSON);
return new FileChecksum() {
@Override
public String getAlgorithmName() {
@@ -877,4 +858,56 @@ public class HttpFSFileSystem extends Fi
};
}
+
+ @Override
+ @SuppressWarnings("deprecation")
+ public Token<?> getDelegationToken(final String renewer)
+ throws IOException {
+ return doAsRealUserIfNecessary(new Callable<Token<?>>() {
+ @Override
+ public Token<?> call() throws Exception {
+ return HttpFSKerberosAuthenticator.
+ getDelegationToken(uri, httpFSAddr, authToken, renewer);
+ }
+ });
+ }
+
+
+ @Override
+ public List<Token<?>> getDelegationTokens(final String renewer)
+ throws IOException {
+ return doAsRealUserIfNecessary(new Callable<List<Token<?>>>() {
+ @Override
+ public List<Token<?>> call() throws Exception {
+ return HttpFSKerberosAuthenticator.
+ getDelegationTokens(uri, httpFSAddr, authToken, renewer);
+ }
+ });
+ }
+
+ public long renewDelegationToken(final Token<?> token) throws IOException {
+ return doAsRealUserIfNecessary(new Callable<Long>() {
+ @Override
+ public Long call() throws Exception {
+ return HttpFSKerberosAuthenticator.
+ renewDelegationToken(uri, authToken, token);
+ }
+ });
+ }
+
+ public void cancelDelegationToken(final Token<?> token) throws IOException {
+ HttpFSKerberosAuthenticator.
+ cancelDelegationToken(uri, authToken, token);
+ }
+
+ @Override
+ public Token<?> getRenewToken() {
+ return delegationToken;
+ }
+
+ @Override
+ public <T extends TokenIdentifier> void setDelegationToken(Token<T> token) {
+ delegationToken = token;
+ }
+
}
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSKerberosAuthenticator.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,226 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.client;
+
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.authentication.client.Authenticator;
+import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A <code>KerberosAuthenticator</code> subclass that fallback to
+ * {@link HttpFSPseudoAuthenticator}.
+ */
+public class HttpFSKerberosAuthenticator extends KerberosAuthenticator {
+
+ /**
+ * Returns the fallback authenticator if the server does not use
+ * Kerberos SPNEGO HTTP authentication.
+ *
+ * @return a {@link HttpFSPseudoAuthenticator} instance.
+ */
+ @Override
+ protected Authenticator getFallBackAuthenticator() {
+ return new HttpFSPseudoAuthenticator();
+ }
+
+ private static final String HTTP_GET = "GET";
+ private static final String HTTP_PUT = "PUT";
+
+ public static final String DELEGATION_PARAM = "delegation";
+ public static final String TOKEN_PARAM = "token";
+ public static final String RENEWER_PARAM = "renewer";
+ public static final String TOKEN_KIND = "HTTPFS_DELEGATION_TOKEN";
+ public static final String DELEGATION_TOKEN_JSON = "Token";
+ public static final String DELEGATION_TOKENS_JSON = "Tokens";
+ public static final String DELEGATION_TOKEN_URL_STRING_JSON = "urlString";
+ public static final String RENEW_DELEGATION_TOKEN_JSON = "long";
+
+ /**
+ * DelegationToken operations.
+ */
+ public static enum DelegationTokenOperation {
+ GETDELEGATIONTOKEN(HTTP_GET, true),
+ GETDELEGATIONTOKENS(HTTP_GET, true),
+ RENEWDELEGATIONTOKEN(HTTP_PUT, true),
+ CANCELDELEGATIONTOKEN(HTTP_PUT, false);
+
+ private String httpMethod;
+ private boolean requiresKerberosCredentials;
+
+ private DelegationTokenOperation(String httpMethod,
+ boolean requiresKerberosCredentials) {
+ this.httpMethod = httpMethod;
+ this.requiresKerberosCredentials = requiresKerberosCredentials;
+ }
+
+ public String getHttpMethod() {
+ return httpMethod;
+ }
+
+ public boolean requiresKerberosCredentials() {
+ return requiresKerberosCredentials;
+ }
+
+ }
+
+ public static void injectDelegationToken(Map<String, String> params,
+ Token<?> dtToken)
+ throws IOException {
+ if (dtToken != null) {
+ params.put(DELEGATION_PARAM, dtToken.encodeToUrlString());
+ }
+ }
+
+ private boolean hasDelegationToken(URL url) {
+ return url.getQuery().contains(DELEGATION_PARAM + "=");
+ }
+
+ @Override
+ public void authenticate(URL url, AuthenticatedURL.Token token)
+ throws IOException, AuthenticationException {
+ if (!hasDelegationToken(url)) {
+ super.authenticate(url, token);
+ }
+ }
+
+ public static final String OP_PARAM = "op";
+
+ private static List<Token<?>> getDelegationTokens(URI fsURI,
+ InetSocketAddress httpFSAddr, DelegationTokenOperation op,
+ AuthenticatedURL.Token token, String renewer)
+ throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM, op.toString());
+ params.put(RENEWER_PARAM,renewer);
+ URL url = HttpFSUtils.createHttpURL(new Path(fsURI), params);
+ AuthenticatedURL aUrl =
+ new AuthenticatedURL(new HttpFSKerberosAuthenticator());
+ try {
+ HttpURLConnection conn = aUrl.openConnection(url, token);
+ conn.setRequestMethod(op.getHttpMethod());
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ List<String> list = new ArrayList<String>();
+ if (op == DelegationTokenOperation.GETDELEGATIONTOKEN) {
+ JSONObject json = (JSONObject) ((JSONObject)
+ HttpFSUtils.jsonParse(conn)).get(DELEGATION_TOKEN_JSON);
+ String tokenStr = (String)
+ json.get(DELEGATION_TOKEN_URL_STRING_JSON);
+ list.add(tokenStr);
+ }
+ else if (op == DelegationTokenOperation.GETDELEGATIONTOKENS) {
+ JSONObject json = (JSONObject) ((JSONObject)
+ HttpFSUtils.jsonParse(conn)).get(DELEGATION_TOKENS_JSON);
+ JSONArray array = (JSONArray) json.get(DELEGATION_TOKEN_JSON);
+ for (Object element : array) {
+ String tokenStr = (String)
+ ((Map) element).get(DELEGATION_TOKEN_URL_STRING_JSON);
+ list.add(tokenStr);
+ }
+
+ } else {
+ throw new IllegalArgumentException("Invalid operation: " +
+ op.toString());
+ }
+ List<Token<?>> dTokens = new ArrayList<Token<?>>();
+ for (String tokenStr : list) {
+ Token<AbstractDelegationTokenIdentifier> dToken =
+ new Token<AbstractDelegationTokenIdentifier>();
+ dToken.decodeFromUrlString(tokenStr);
+ dTokens.add(dToken);
+ SecurityUtil.setTokenService(dToken, httpFSAddr);
+ }
+ return dTokens;
+ } catch (AuthenticationException ex) {
+ throw new IOException(ex.toString(), ex);
+ }
+ }
+
+ public static List<Token<?>> getDelegationTokens(URI fsURI,
+ InetSocketAddress httpFSAddr, AuthenticatedURL.Token token,
+ String renewer) throws IOException {
+ return getDelegationTokens(fsURI, httpFSAddr,
+ DelegationTokenOperation.GETDELEGATIONTOKENS, token, renewer);
+ }
+
+ public static Token<?> getDelegationToken(URI fsURI,
+ InetSocketAddress httpFSAddr, AuthenticatedURL.Token token,
+ String renewer) throws IOException {
+ return getDelegationTokens(fsURI, httpFSAddr,
+ DelegationTokenOperation.GETDELEGATIONTOKENS, token, renewer).get(0);
+ }
+
+ public static long renewDelegationToken(URI fsURI,
+ AuthenticatedURL.Token token, Token<?> dToken) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM,
+ DelegationTokenOperation.RENEWDELEGATIONTOKEN.toString());
+ params.put(TOKEN_PARAM, dToken.encodeToUrlString());
+ URL url = HttpFSUtils.createHttpURL(new Path(fsURI), params);
+ AuthenticatedURL aUrl =
+ new AuthenticatedURL(new HttpFSKerberosAuthenticator());
+ try {
+ HttpURLConnection conn = aUrl.openConnection(url, token);
+ conn.setRequestMethod(
+ DelegationTokenOperation.RENEWDELEGATIONTOKEN.getHttpMethod());
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) ((JSONObject)
+ HttpFSUtils.jsonParse(conn)).get(DELEGATION_TOKEN_JSON);
+ return (Long)(json.get(RENEW_DELEGATION_TOKEN_JSON));
+ } catch (AuthenticationException ex) {
+ throw new IOException(ex.toString(), ex);
+ }
+ }
+
+ public static void cancelDelegationToken(URI fsURI,
+ AuthenticatedURL.Token token, Token<?> dToken) throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(OP_PARAM,
+ DelegationTokenOperation.CANCELDELEGATIONTOKEN.toString());
+ params.put(TOKEN_PARAM, dToken.encodeToUrlString());
+ URL url = HttpFSUtils.createHttpURL(new Path(fsURI), params);
+ AuthenticatedURL aUrl =
+ new AuthenticatedURL(new HttpFSKerberosAuthenticator());
+ try {
+ HttpURLConnection conn = aUrl.openConnection(url, token);
+ conn.setRequestMethod(
+ DelegationTokenOperation.CANCELDELEGATIONTOKEN.getHttpMethod());
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ } catch (AuthenticationException ex) {
+ throw new IOException(ex.toString(), ex);
+ }
+ }
+
+}
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSPseudoAuthenticator.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSPseudoAuthenticator.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSPseudoAuthenticator.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSPseudoAuthenticator.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.http.client;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
+
+import java.io.IOException;
+
+/**
+ * A <code>PseudoAuthenticator</code> subclass that uses FileSystemAccess's
+ * <code>UserGroupInformation</code> to obtain the client user name (the UGI's login user).
+ */
+public class HttpFSPseudoAuthenticator extends PseudoAuthenticator {
+
+ /**
+ * Return the client user name.
+ *
+ * @return the client user name.
+ */
+ @Override
+ protected String getUserName() {
+ try {
+ return UserGroupInformation.getLoginUser().getUserName();
+ } catch (IOException ex) {
+ throw new SecurityException("Could not obtain current user, " + ex.getMessage(), ex);
+ }
+ }
+}
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,148 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.client;
+
+import org.apache.hadoop.fs.Path;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.lang.reflect.Constructor;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URL;
+import java.net.URLEncoder;
+import java.text.MessageFormat;
+import java.util.Map;
+
+/**
+ * Utility methods used by HttpFS classes.
+ */
+public class HttpFSUtils {
+
+ public static final String SERVICE_NAME = "/webhdfs";
+
+ public static final String SERVICE_VERSION = "/v1";
+
+ private static final String SERVICE_PATH = SERVICE_NAME + SERVICE_VERSION;
+
+ /**
+ * Convenience method that creates an HTTP <code>URL</code> for the
+ * HttpFSServer file system operations.
+ * <p/>
+ *
+ * @param path the file path.
+ * @param params the query string parameters.
+ *
+ * @return a <code>URL</code> for the HttpFSServer server,
+ *
+ * @throws IOException thrown if an IO error occurrs.
+ */
+ static URL createHttpURL(Path path, Map<String, String> params)
+ throws IOException {
+ URI uri = path.toUri();
+ String realScheme;
+ if (uri.getScheme().equalsIgnoreCase(HttpFSFileSystem.SCHEME)) {
+ realScheme = "http";
+ } else {
+ throw new IllegalArgumentException(MessageFormat.format(
+ "Invalid scheme [{0}] it should be 'webhdfs'", uri));
+ }
+ StringBuilder sb = new StringBuilder();
+ sb.append(realScheme).append("://").append(uri.getAuthority()).
+ append(SERVICE_PATH).append(uri.getPath());
+
+ String separator = "?";
+ for (Map.Entry<String, String> entry : params.entrySet()) {
+ sb.append(separator).append(entry.getKey()).append("=").
+ append(URLEncoder.encode(entry.getValue(), "UTF8"));
+ separator = "&";
+ }
+ return new URL(sb.toString());
+ }
+
+ /**
+ * Validates the status of an <code>HttpURLConnection</code> against an
+ * expected HTTP status code. If the current status code is not the expected
+ * one it throws an exception with a detail message using Server side error
+ * messages if available.
+ *
+ * @param conn the <code>HttpURLConnection</code>.
+ * @param expected the expected HTTP status code.
+ *
+ * @throws IOException thrown if the current status code does not match the
+ * expected one.
+ */
+ @SuppressWarnings({"unchecked", "deprecation"})
+ static void validateResponse(HttpURLConnection conn, int expected)
+ throws IOException {
+ int status = conn.getResponseCode();
+ if (status != expected) {
+ try {
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
+ json = (JSONObject) json.get(HttpFSFileSystem.ERROR_JSON);
+ String message = (String) json.get(HttpFSFileSystem.ERROR_MESSAGE_JSON);
+ String exception = (String)
+ json.get(HttpFSFileSystem.ERROR_EXCEPTION_JSON);
+ String className = (String)
+ json.get(HttpFSFileSystem.ERROR_CLASSNAME_JSON);
+
+ try {
+ ClassLoader cl = HttpFSFileSystem.class.getClassLoader();
+ Class klass = cl.loadClass(className);
+ Constructor constr = klass.getConstructor(String.class);
+ throw (IOException) constr.newInstance(message);
+ } catch (IOException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new IOException(MessageFormat.format("{0} - {1}", exception,
+ message));
+ }
+ } catch (IOException ex) {
+ if (ex.getCause() instanceof IOException) {
+ throw (IOException) ex.getCause();
+ }
+ throw new IOException(
+ MessageFormat.format("HTTP status [{0}], {1}",
+ status, conn.getResponseMessage()));
+ }
+ }
+ }
+
+ /**
+ * Convenience method that JSON Parses the <code>InputStream</code> of a
+ * <code>HttpURLConnection</code>.
+ *
+ * @param conn the <code>HttpURLConnection</code>.
+ *
+ * @return the parsed JSON object.
+ *
+ * @throws IOException thrown if the <code>InputStream</code> could not be
+ * JSON parsed.
+ */
+ static Object jsonParse(HttpURLConnection conn) throws IOException {
+ try {
+ JSONParser parser = new JSONParser();
+ return parser.parse(new InputStreamReader(conn.getInputStream()));
+ } catch (ParseException ex) {
+ throw new IOException("JSON parser error, " + ex.getMessage(), ex);
+ }
+ }
+}
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.server;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import javax.servlet.FilterConfig;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Subclass of hadoop-auth <code>AuthenticationFilter</code> that obtains its configuration
+ * from HttpFSServer's server configuration.
+ */
+public class HttpFSAuthenticationFilter extends AuthenticationFilter {
+ private static final String CONF_PREFIX = "httpfs.authentication.";
+
+ private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + ".file";
+
+ /**
+ * Returns the hadoop-auth configuration from HttpFSServer's configuration.
+ * <p/>
+ * It returns all HttpFSServer's configuration properties prefixed with
+ * <code>httpfs.authentication</code>. The <code>httpfs.authentication</code>
+ * prefix is removed from the returned property names.
+ *
+ * @param configPrefix parameter not used.
+ * @param filterConfig parameter not used.
+ *
+ * @return hadoop-auth configuration read from HttpFSServer's configuration.
+ */
+ @Override
+ protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
+ Properties props = new Properties();
+ Configuration conf = HttpFSServerWebApp.get().getConfig();
+
+ props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
+ for (Map.Entry<String, String> entry : conf) {
+ String name = entry.getKey();
+ if (name.startsWith(CONF_PREFIX)) {
+ String value = conf.get(name);
+ name = name.substring(CONF_PREFIX.length());
+ props.setProperty(name, value);
+ }
+ }
+
+ if (props.getProperty(AUTH_TYPE).equals("kerberos")) {
+ props.setProperty(AUTH_TYPE,
+ HttpFSKerberosAuthenticationHandler.class.getName());
+ }
+
+ String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
+ if (signatureSecretFile == null) {
+ throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
+ }
+
+ try {
+ StringBuilder secret = new StringBuilder();
+ Reader reader = new FileReader(signatureSecretFile);
+ int c = reader.read();
+ while (c > -1) {
+ secret.append((char)c);
+ c = reader.read();
+ }
+ reader.close();
+ props.setProperty(AuthenticationFilter.SIGNATURE_SECRET, secret.toString());
+ } catch (IOException ex) {
+ throw new RuntimeException("Could not read HttpFS signature secret file: " + signatureSecretFile);
+ }
+ return props;
+ }
+
+}
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandler.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandler.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSKerberosAuthenticationHandler.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,255 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.http.server;
+
+import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
+import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator;
+import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator.DelegationTokenOperation;
+import org.apache.hadoop.lib.service.DelegationTokenIdentifier;
+import org.apache.hadoop.lib.service.DelegationTokenManager;
+import org.apache.hadoop.lib.service.DelegationTokenManagerException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.authentication.server.AuthenticationToken;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.hadoop.security.token.Token;
+import org.json.simple.JSONObject;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.MediaType;
+import java.io.IOException;
+import java.io.Writer;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Server side <code>AuthenticationHandler</code> that authenticates requests
+ * using the incoming delegation token as a 'delegation' query string parameter.
+ * <p/>
+ * If not delegation token is present in the request it delegates to the
+ * {@link KerberosAuthenticationHandler}
+ */
+public class HttpFSKerberosAuthenticationHandler
+ extends KerberosAuthenticationHandler {
+
+ static final Set<String> DELEGATION_TOKEN_OPS =
+ new HashSet<String>();
+
+ static {
+ DELEGATION_TOKEN_OPS.add(
+ DelegationTokenOperation.GETDELEGATIONTOKEN.toString());
+ DELEGATION_TOKEN_OPS.add(
+ DelegationTokenOperation.GETDELEGATIONTOKENS.toString());
+ DELEGATION_TOKEN_OPS.add(
+ DelegationTokenOperation.RENEWDELEGATIONTOKEN.toString());
+ DELEGATION_TOKEN_OPS.add(
+ DelegationTokenOperation.CANCELDELEGATIONTOKEN.toString());
+ }
+
+ public static final String TYPE = "kerberos-dt";
+
+ /**
+ * Returns authentication type of the handler.
+ *
+ * @return <code>delegationtoken-kerberos</code>
+ */
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ private static final String ENTER = System.getProperty("line.separator");
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public boolean managementOperation(AuthenticationToken token,
+ HttpServletRequest request, HttpServletResponse response)
+ throws IOException, AuthenticationException {
+ boolean requestContinues = true;
+ String op = request.getParameter(HttpFSFileSystem.OP_PARAM);
+ op = (op != null) ? op.toUpperCase() : null;
+ if (DELEGATION_TOKEN_OPS.contains(op) &&
+ !request.getMethod().equals("OPTIONS")) {
+ DelegationTokenOperation dtOp =
+ DelegationTokenOperation.valueOf(op);
+ if (dtOp.getHttpMethod().equals(request.getMethod())) {
+ if (dtOp.requiresKerberosCredentials() && token == null) {
+ response.sendError(HttpServletResponse.SC_UNAUTHORIZED,
+ MessageFormat.format(
+ "Operation [{0}] requires SPNEGO authentication established",
+ dtOp));
+ requestContinues = false;
+ } else {
+ DelegationTokenManager tokenManager =
+ HttpFSServerWebApp.get().get(DelegationTokenManager.class);
+ try {
+ Map map = null;
+ switch (dtOp) {
+ case GETDELEGATIONTOKEN:
+ case GETDELEGATIONTOKENS:
+ String renewerParam =
+ request.getParameter(HttpFSKerberosAuthenticator.RENEWER_PARAM);
+ if (renewerParam == null) {
+ renewerParam = token.getUserName();
+ }
+ Token<?> dToken = tokenManager.createToken(
+ UserGroupInformation.getCurrentUser(), renewerParam);
+ if (dtOp == DelegationTokenOperation.GETDELEGATIONTOKEN) {
+ map = delegationTokenToJSON(dToken);
+ } else {
+ map = delegationTokensToJSON(Arrays.asList((Token)dToken));
+ }
+ break;
+ case RENEWDELEGATIONTOKEN:
+ case CANCELDELEGATIONTOKEN:
+ String tokenParam =
+ request.getParameter(HttpFSKerberosAuthenticator.TOKEN_PARAM);
+ if (tokenParam == null) {
+ response.sendError(HttpServletResponse.SC_BAD_REQUEST,
+ MessageFormat.format(
+ "Operation [{0}] requires the parameter [{1}]",
+ dtOp, HttpFSKerberosAuthenticator.TOKEN_PARAM));
+ requestContinues = false;
+ } else {
+ if (dtOp == DelegationTokenOperation.CANCELDELEGATIONTOKEN) {
+ Token<DelegationTokenIdentifier> dt =
+ new Token<DelegationTokenIdentifier>();
+ dt.decodeFromUrlString(tokenParam);
+ tokenManager.cancelToken(dt,
+ UserGroupInformation.getCurrentUser().getUserName());
+ } else {
+ Token<DelegationTokenIdentifier> dt =
+ new Token<DelegationTokenIdentifier>();
+ dt.decodeFromUrlString(tokenParam);
+ long expirationTime =
+ tokenManager.renewToken(dt, token.getUserName());
+ map = new HashMap();
+ map.put("long", expirationTime);
+ }
+ }
+ break;
+ }
+ if (requestContinues) {
+ response.setStatus(HttpServletResponse.SC_OK);
+ if (map != null) {
+ response.setContentType(MediaType.APPLICATION_JSON);
+ Writer writer = response.getWriter();
+ JSONObject.writeJSONString(map, writer);
+ writer.write(ENTER);
+ writer.flush();
+
+ }
+ requestContinues = false;
+ }
+ } catch (DelegationTokenManagerException ex) {
+ throw new AuthenticationException(ex.toString(), ex);
+ }
+ }
+ } else {
+ response.sendError(HttpServletResponse.SC_BAD_REQUEST,
+ MessageFormat.format(
+ "Wrong HTTP method [{0}] for operation [{1}], it should be [{2}]",
+ request.getMethod(), dtOp, dtOp.getHttpMethod()));
+ requestContinues = false;
+ }
+ }
+ return requestContinues;
+ }
+
+ @SuppressWarnings("unchecked")
+ private static Map delegationTokenToJSON(Token token) throws IOException {
+ Map json = new LinkedHashMap();
+ json.put(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON,
+ token.encodeToUrlString());
+ Map response = new LinkedHashMap();
+ response.put(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_JSON, json);
+ return response;
+ }
+
+ @SuppressWarnings("unchecked")
+ private static Map delegationTokensToJSON(List<Token> tokens)
+ throws IOException {
+ List list = new ArrayList();
+ for (Token token : tokens) {
+ Map map = new HashMap();
+ map.put(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON,
+ token.encodeToUrlString());
+ list.add(map);
+ }
+ Map map = new HashMap();
+ map.put(HttpFSKerberosAuthenticator.DELEGATION_TOKEN_JSON, list);
+ Map response = new LinkedHashMap();
+ response.put(HttpFSKerberosAuthenticator.DELEGATION_TOKENS_JSON, map);
+ return response;
+ }
+
+ /**
+ * Authenticates a request looking for the <code>delegation</code>
+ * query-string parameter and verifying it is a valid token. If there is not
+ * <code>delegation</code> query-string parameter, it delegates the
+ * authentication to the {@link KerberosAuthenticationHandler} unless it is
+ * disabled.
+ *
+ * @param request the HTTP client request.
+ * @param response the HTTP client response.
+ *
+ * @return the authentication token for the authenticated request.
+ * @throws IOException thrown if an IO error occurred.
+ * @throws AuthenticationException thrown if the authentication failed.
+ */
+ @Override
+ public AuthenticationToken authenticate(HttpServletRequest request,
+ HttpServletResponse response)
+ throws IOException, AuthenticationException {
+ AuthenticationToken token;
+ String delegationParam =
+ request.getParameter(HttpFSKerberosAuthenticator.DELEGATION_PARAM);
+ if (delegationParam != null) {
+ try {
+ Token<DelegationTokenIdentifier> dt =
+ new Token<DelegationTokenIdentifier>();
+ dt.decodeFromUrlString(delegationParam);
+ DelegationTokenManager tokenManager =
+ HttpFSServerWebApp.get().get(DelegationTokenManager.class);
+ UserGroupInformation ugi = tokenManager.verifyToken(dt);
+ final String shortName = ugi.getShortUserName();
+
+ // creating a ephemeral token
+ token = new AuthenticationToken(shortName, ugi.getUserName(),
+ getType());
+ token.setExpires(0);
+ } catch (Throwable ex) {
+ throw new AuthenticationException("Could not verify DelegationToken, " +
+ ex.toString(), ex);
+ }
+ } else {
+ token = super.authenticate(request, response);
+ }
+ return token;
+ }
+
+
+}
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java Thu Jul 26 13:39:05 2012
@@ -70,7 +70,7 @@ public class HttpFSServerWebApp extends
/**
* Constructor used for testing purposes.
*/
- protected HttpFSServerWebApp(String homeDir, String configDir, String logDir,
+ public HttpFSServerWebApp(String homeDir, String configDir, String logDir,
String tempDir, Configuration config) {
super(NAME, homeDir, configDir, logDir, tempDir, config);
}
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java Thu Jul 26 13:39:05 2012
@@ -39,7 +39,11 @@ public class ServerException extends XEx
S08("Could not load service classes, {0}"),
S09("Could not set service [{0}] programmatically -server shutting down-, {1}"),
S10("Service [{0}] requires service [{1}]"),
- S11("Service [{0}] exception during status change to [{1}] -server shutting down-, {2}");
+ S11("Service [{0}] exception during status change to [{1}] -server shutting down-, {2}"),
+ S12("Could not start service [{0}], {1}"),
+ S13("Missing system property [{0}]"),
+ S14("Could not initialize server, {0}")
+ ;
private String msg;
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenIdentifier.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.lib.service;
+
+import org.apache.hadoop.fs.http.client.HttpFSKerberosAuthenticator;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
+
+/**
+ * HttpFS <code>DelegationTokenIdentifier</code> implementation.
+ */
+public class DelegationTokenIdentifier
+ extends AbstractDelegationTokenIdentifier {
+
+ public static final Text KIND_NAME =
+ new Text(HttpFSKerberosAuthenticator.TOKEN_KIND);
+
+ public DelegationTokenIdentifier() {
+ }
+
+ /**
+ * Create a new delegation token identifier
+ *
+ * @param owner the effective username of the token owner
+ * @param renewer the username of the renewer
+ * @param realUser the real username of the token owner
+ */
+ public DelegationTokenIdentifier(Text owner, Text renewer, Text realUser) {
+ super(owner, renewer, realUser);
+ }
+
+
+ /**
+ * Returns the kind, <code>TOKEN_KIND</code>.
+ * @return returns <code>TOKEN_KIND</code>.
+ */
+ @Override
+ public Text getKind() {
+ return KIND_NAME;
+ }
+
+}
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManager.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManager.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManager.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.lib.service;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+
+/**
+ * Service interface to manage HttpFS delegation tokens.
+ */
+public interface DelegationTokenManager {
+
+ /**
+ * Creates a delegation token.
+ *
+ * @param ugi UGI creating the token.
+ * @param renewer token renewer.
+ * @return new delegation token.
+ * @throws DelegationTokenManagerException thrown if the token could not be
+ * created.
+ */
+ public Token<DelegationTokenIdentifier> createToken(UserGroupInformation ugi,
+ String renewer)
+ throws DelegationTokenManagerException;
+
+ /**
+ * Renews a delegation token.
+ *
+ * @param token delegation token to renew.
+ * @param renewer token renewer.
+ * @return epoc expiration time.
+ * @throws DelegationTokenManagerException thrown if the token could not be
+ * renewed.
+ */
+ public long renewToken(Token<DelegationTokenIdentifier> token, String renewer)
+ throws DelegationTokenManagerException;
+
+ /**
+ * Cancels a delegation token.
+ *
+ * @param token delegation token to cancel.
+ * @param canceler token canceler.
+ * @throws DelegationTokenManagerException thrown if the token could not be
+ * canceled.
+ */
+ public void cancelToken(Token<DelegationTokenIdentifier> token,
+ String canceler)
+ throws DelegationTokenManagerException;
+
+ /**
+ * Verifies a delegation token.
+ *
+ * @param token delegation token to verify.
+ * @return the UGI for the token.
+ * @throws DelegationTokenManagerException thrown if the token could not be
+ * verified.
+ */
+ public UserGroupInformation verifyToken(Token<DelegationTokenIdentifier> token)
+ throws DelegationTokenManagerException;
+
+}
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManagerException.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManagerException.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManagerException.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/DelegationTokenManagerException.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.lib.service;
+
+import org.apache.hadoop.lib.lang.XException;
+
+/**
+ * Exception thrown by the {@link DelegationTokenManager} service implementation.
+ */
+public class DelegationTokenManagerException extends XException {
+
+ public enum ERROR implements XException.ERROR {
+ DT01("Could not verify delegation token, {0}"),
+ DT02("Could not renew delegation token, {0}"),
+ DT03("Could not cancel delegation token, {0}"),
+ DT04("Could not create delegation token, {0}");
+
+ private String template;
+
+ ERROR(String template) {
+ this.template = template;
+ }
+
+ @Override
+ public String getTemplate() {
+ return template;
+ }
+ }
+
+ public DelegationTokenManagerException(ERROR error, Object... params) {
+ super(error, params);
+ }
+
+}
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java?rev=1365988&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/DelegationTokenManagerService.java Thu Jul 26 13:39:05 2012
@@ -0,0 +1,231 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.lib.service.security;
+
+import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.lib.server.BaseService;
+import org.apache.hadoop.lib.server.ServerException;
+import org.apache.hadoop.lib.server.ServiceException;
+import org.apache.hadoop.lib.service.DelegationTokenIdentifier;
+import org.apache.hadoop.lib.service.DelegationTokenManager;
+import org.apache.hadoop.lib.service.DelegationTokenManagerException;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.IOException;
+
+/**
+ * DelegationTokenManager service implementation.
+ */
+public class DelegationTokenManagerService extends BaseService
+ implements DelegationTokenManager {
+
+ private static final String PREFIX = "delegation.token.manager";
+
+ private static final String UPDATE_INTERVAL = "update.interval";
+
+ private static final String MAX_LIFETIME = "max.lifetime";
+
+ private static final String RENEW_INTERVAL = "renew.interval";
+
+ private static final long HOUR = 60 * 60 * 1000;
+ private static final long DAY = 24 * HOUR;
+
+ DelegationTokenSecretManager secretManager = null;
+
+ public DelegationTokenManagerService() {
+ super(PREFIX);
+ }
+
+ /**
+ * Initializes the service.
+ *
+ * @throws ServiceException thrown if the service could not be initialized.
+ */
+ @Override
+ protected void init() throws ServiceException {
+
+ long updateInterval = getServiceConfig().getLong(UPDATE_INTERVAL, DAY);
+ long maxLifetime = getServiceConfig().getLong(MAX_LIFETIME, 7 * DAY);
+ long renewInterval = getServiceConfig().getLong(RENEW_INTERVAL, DAY);
+ secretManager = new DelegationTokenSecretManager(updateInterval,
+ maxLifetime,
+ renewInterval, HOUR);
+ try {
+ secretManager.startThreads();
+ } catch (IOException ex) {
+ throw new ServiceException(ServiceException.ERROR.S12,
+ DelegationTokenManager.class.getSimpleName(),
+ ex.toString(), ex);
+ }
+ }
+
+ /**
+ * Destroys the service.
+ */
+ @Override
+ public void destroy() {
+ secretManager.stopThreads();
+ super.destroy();
+ }
+
+ /**
+ * Returns the service interface.
+ *
+ * @return the service interface.
+ */
+ @Override
+ public Class getInterface() {
+ return DelegationTokenManager.class;
+ }
+
+ /**
+ * Creates a delegation token.
+ *
+ * @param ugi UGI creating the token.
+ * @param renewer token renewer.
+ * @return new delegation token.
+ * @throws DelegationTokenManagerException thrown if the token could not be
+ * created.
+ */
+ @Override
+ public Token<DelegationTokenIdentifier> createToken(UserGroupInformation ugi,
+ String renewer)
+ throws DelegationTokenManagerException {
+ renewer = (renewer == null) ? ugi.getShortUserName() : renewer;
+ String user = ugi.getUserName();
+ Text owner = new Text(user);
+ Text realUser = null;
+ if (ugi.getRealUser() != null) {
+ realUser = new Text(ugi.getRealUser().getUserName());
+ }
+ DelegationTokenIdentifier tokenIdentifier =
+ new DelegationTokenIdentifier(owner, new Text(renewer), realUser);
+ Token<DelegationTokenIdentifier> token =
+ new Token<DelegationTokenIdentifier>(tokenIdentifier, secretManager);
+ try {
+ SecurityUtil.setTokenService(token,
+ HttpFSServerWebApp.get().getAuthority());
+ } catch (ServerException ex) {
+ throw new DelegationTokenManagerException(
+ DelegationTokenManagerException.ERROR.DT04, ex.toString(), ex);
+ }
+ return token;
+ }
+
+ /**
+ * Renews a delegation token.
+ *
+ * @param token delegation token to renew.
+ * @param renewer token renewer.
+ * @return epoc expiration time.
+ * @throws DelegationTokenManagerException thrown if the token could not be
+ * renewed.
+ */
+ @Override
+ public long renewToken(Token<DelegationTokenIdentifier> token, String renewer)
+ throws DelegationTokenManagerException {
+ try {
+ return secretManager.renewToken(token, renewer);
+ } catch (IOException ex) {
+ throw new DelegationTokenManagerException(
+ DelegationTokenManagerException.ERROR.DT02, ex.toString(), ex);
+ }
+ }
+
+ /**
+ * Cancels a delegation token.
+ *
+ * @param token delegation token to cancel.
+ * @param canceler token canceler.
+ * @throws DelegationTokenManagerException thrown if the token could not be
+ * canceled.
+ */
+ @Override
+ public void cancelToken(Token<DelegationTokenIdentifier> token,
+ String canceler)
+ throws DelegationTokenManagerException {
+ try {
+ secretManager.cancelToken(token, canceler);
+ } catch (IOException ex) {
+ throw new DelegationTokenManagerException(
+ DelegationTokenManagerException.ERROR.DT03, ex.toString(), ex);
+ }
+ }
+
+ /**
+ * Verifies a delegation token.
+ *
+ * @param token delegation token to verify.
+ * @return the UGI for the token.
+ * @throws DelegationTokenManagerException thrown if the token could not be
+ * verified.
+ */
+ @Override
+ public UserGroupInformation verifyToken(Token<DelegationTokenIdentifier> token)
+ throws DelegationTokenManagerException {
+ ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
+ DataInputStream dis = new DataInputStream(buf);
+ DelegationTokenIdentifier id = new DelegationTokenIdentifier();
+ try {
+ id.readFields(dis);
+ dis.close();
+ secretManager.verifyToken(id, token.getPassword());
+ } catch (Exception ex) {
+ throw new DelegationTokenManagerException(
+ DelegationTokenManagerException.ERROR.DT01, ex.toString(), ex);
+ }
+ return id.getUser();
+ }
+
+ private static class DelegationTokenSecretManager
+ extends AbstractDelegationTokenSecretManager<DelegationTokenIdentifier> {
+
+ /**
+ * Create a secret manager
+ *
+ * @param delegationKeyUpdateInterval the number of seconds for rolling new
+ * secret keys.
+ * @param delegationTokenMaxLifetime the maximum lifetime of the delegation
+ * tokens
+ * @param delegationTokenRenewInterval how often the tokens must be renewed
+ * @param delegationTokenRemoverScanInterval how often the tokens are
+ * scanned
+ * for expired tokens
+ */
+ public DelegationTokenSecretManager(long delegationKeyUpdateInterval,
+ long delegationTokenMaxLifetime,
+ long delegationTokenRenewInterval,
+ long delegationTokenRemoverScanInterval) {
+ super(delegationKeyUpdateInterval, delegationTokenMaxLifetime,
+ delegationTokenRenewInterval, delegationTokenRemoverScanInterval);
+ }
+
+ @Override
+ public DelegationTokenIdentifier createIdentifier() {
+ return new DelegationTokenIdentifier();
+ }
+
+ }
+
+}
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java?rev=1365988&r1=1365987&r2=1365988&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java Thu Jul 26 13:39:05 2012
@@ -18,12 +18,16 @@
package org.apache.hadoop.lib.servlet;
+import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.server.Server;
import org.apache.hadoop.lib.server.ServerException;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
import java.text.MessageFormat;
/**
@@ -36,9 +40,13 @@ public abstract class ServerWebApp exten
private static final String CONFIG_DIR = ".config.dir";
private static final String LOG_DIR = ".log.dir";
private static final String TEMP_DIR = ".temp.dir";
+ private static final String HTTP_HOSTNAME = ".http.hostname";
+ private static final String HTTP_PORT = ".http.port";
private static ThreadLocal<String> HOME_DIR_TL = new ThreadLocal<String>();
+ private InetSocketAddress authority;
+
/**
* Method for testing purposes.
*/
@@ -147,6 +155,38 @@ public abstract class ServerWebApp exten
}
/**
+ * Resolves the host & port InetSocketAddress the web server is listening to.
+ * <p/>
+ * This implementation looks for the following 2 properties:
+ * <ul>
+ * <li>#SERVER_NAME#.http.hostname</li>
+ * <li>#SERVER_NAME#.http.port</li>
+ * </ul>
+ *
+ * @return the host & port InetSocketAddress the web server is listening to.
+ * @throws ServerException thrown if any of the above 2 properties is not defined.
+ */
+ protected InetSocketAddress resolveAuthority() throws ServerException {
+ String hostnameKey = getName() + HTTP_HOSTNAME;
+ String portKey = getName() + HTTP_PORT;
+ String host = System.getProperty(hostnameKey);
+ String port = System.getProperty(portKey);
+ if (host == null) {
+ throw new ServerException(ServerException.ERROR.S13, hostnameKey);
+ }
+ if (port == null) {
+ throw new ServerException(ServerException.ERROR.S13, portKey);
+ }
+ try {
+ InetAddress add = InetAddress.getByName(hostnameKey);
+ int portNum = Integer.parseInt(port);
+ return new InetSocketAddress(add, portNum);
+ } catch (UnknownHostException ex) {
+ throw new ServerException(ServerException.ERROR.S14, ex.toString(), ex);
+ }
+ }
+
+ /**
* Destroys the <code>ServletContextListener</code> which destroys
* the Server.
*
@@ -156,4 +196,29 @@ public abstract class ServerWebApp exten
destroy();
}
+ /**
+ * Returns the hostname:port InetSocketAddress the webserver is listening to.
+ *
+ * @return the hostname:port InetSocketAddress the webserver is listening to.
+ */
+ public InetSocketAddress getAuthority() throws ServerException {
+ synchronized (this) {
+ if (authority == null) {
+ authority = resolveAuthority();
+ }
+ }
+ return authority;
+ }
+
+ /**
+ * Sets an alternate hostname:port InetSocketAddress to use.
+ * <p/>
+ * For testing purposes.
+ *
+ * @param authority alterante authority.
+ */
+ @VisibleForTesting
+ public void setAuthority(InetSocketAddress authority) {
+ this.authority = authority;
+ }
}