You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by gk...@apache.org on 2012/08/03 21:00:59 UTC
svn commit: r1369164 [2/16] - in
/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project: ./
hadoop-hdfs-httpfs/ hadoop-hdfs-httpfs/dev-support/
hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/
hadoop-hdfs-httpfs/src/main/java/or...
Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:r1358480-1369130
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml Fri Aug 3 19:00:15 2012
@@ -25,4 +25,14 @@
<Method name="destroy" />
<Bug pattern="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD" />
</Match>
+ <Match>
+ <Class name="org.apache.hadoop.lib.servlet.ServerWebApp" />
+ <Field name="authority" />
+ <Bug pattern="IS2_INCONSISTENT_SYNC" />
+ </Match>
+ <Match>
+ <Class name="org.apache.hadoop.lib.service.hadoop.FileSystemAccessService" />
+ <Method name="closeFileSystem" />
+ <Bug pattern="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE" />
+ </Match>
</FindBugsFilter>
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml Fri Aug 3 19:00:15 2012
@@ -43,6 +43,8 @@
<httpfs.tomcat.dist.dir>
${project.build.directory}/${project.artifactId}-${project.version}/share/hadoop/httpfs/tomcat
</httpfs.tomcat.dist.dir>
+ <kerberos.realm>LOCALHOST</kerberos.realm>
+ <test.exclude.kerberos.test>**/TestHttpFSWithKerberos.java</test.exclude.kerberos.test>
</properties>
<dependencies>
@@ -267,6 +269,22 @@
</excludes>
</resource>
</resources>
+ <testResources>
+ <testResource>
+ <directory>${basedir}/src/test/resources</directory>
+ <filtering>false</filtering>
+ <excludes>
+ <exclude>krb5.conf</exclude>
+ </excludes>
+ </testResource>
+ <testResource>
+ <directory>${basedir}/src/test/resources</directory>
+ <filtering>true</filtering>
+ <includes>
+ <include>krb5.conf</include>
+ </includes>
+ </testResource>
+ </testResources>
<plugins>
<plugin>
@@ -281,6 +299,16 @@
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<threadCount>1</threadCount>
+ <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
+ <systemPropertyVariables>
+ <java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
+ <kerberos.realm>${kerberos.realm}</kerberos.realm>
+ </systemPropertyVariables>
+ <excludes>
+ <exclude>**/${test.exclude}.java</exclude>
+ <exclude>${test.exclude.pattern}</exclude>
+ <exclude>${test.exclude.kerberos.test}</exclude>
+ </excludes>
</configuration>
</plugin>
<plugin>
@@ -396,6 +424,36 @@
<profiles>
<profile>
+ <id>testKerberos</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ <properties>
+ <test.exclude.kerberos.test>_</test.exclude.kerberos.test>
+ </properties>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <forkMode>once</forkMode>
+ <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
+ <systemPropertyVariables>
+ <java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
+ <kerberos.realm>${kerberos.realm}</kerberos.realm>
+ <httpfs.http.hostname>localhost</httpfs.http.hostname>
+ </systemPropertyVariables>
+ <includes>
+ <include>**/TestHttpFSWithKerberos.java</include>
+ </includes>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+
+ <profile>
<id>docs</id>
<activation>
<activeByDefault>false</activeByDefault>
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java Fri Aug 3 19:00:15 2012
@@ -17,8 +17,10 @@
*/
package org.apache.hadoop.fs.http.client;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.DelegationTokenRenewer;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileChecksum;
@@ -28,16 +30,18 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PositionedReadable;
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
import org.apache.hadoop.security.authentication.client.Authenticator;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
-import org.json.simple.parser.JSONParser;
-import org.json.simple.parser.ParseException;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
@@ -47,30 +51,33 @@ import java.io.FileNotFoundException;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.io.InputStreamReader;
import java.io.OutputStream;
-import java.lang.reflect.Constructor;
import java.net.HttpURLConnection;
+import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
-import java.net.URLEncoder;
+import java.security.PrivilegedExceptionAction;
import java.text.MessageFormat;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
+import java.util.concurrent.Callable;
/**
* HttpFSServer implementation of the FileSystemAccess FileSystem.
* <p/>
* This implementation allows a user to access HDFS over HTTP via a HttpFSServer server.
*/
-public class HttpFSFileSystem extends FileSystem {
+@InterfaceAudience.Private
+public class HttpFSFileSystem extends FileSystem
+ implements DelegationTokenRenewer.Renewable {
- public static final String SERVICE_NAME = "/webhdfs";
+ public static final String SERVICE_NAME = HttpFSUtils.SERVICE_NAME;
- public static final String SERVICE_VERSION = "/v1";
+ public static final String SERVICE_VERSION = HttpFSUtils.SERVICE_VERSION;
- public static final String SERVICE_PREFIX = SERVICE_NAME + SERVICE_VERSION;
+ public static final String SCHEME = "webhdfs";
public static final String OP_PARAM = "op";
public static final String DO_AS_PARAM = "doas";
@@ -84,7 +91,6 @@ public class HttpFSFileSystem extends Fi
public static final String GROUP_PARAM = "group";
public static final String MODIFICATION_TIME_PARAM = "modificationtime";
public static final String ACCESS_TIME_PARAM = "accesstime";
- public static final String RENEWER_PARAM = "renewer";
public static final Short DEFAULT_PERMISSION = 0755;
@@ -144,9 +150,6 @@ public class HttpFSFileSystem extends Fi
public static final String CONTENT_SUMMARY_SPACE_CONSUMED_JSON = "spaceConsumed";
public static final String CONTENT_SUMMARY_SPACE_QUOTA_JSON = "spaceQuota";
- public static final String DELEGATION_TOKEN_JSON = "Token";
- public static final String DELEGATION_TOKEN_URL_STRING_JSON = "urlString";
-
public static final String ERROR_JSON = "RemoteException";
public static final String ERROR_EXCEPTION_JSON = "exception";
public static final String ERROR_CLASSNAME_JSON = "javaClassName";
@@ -159,7 +162,8 @@ public class HttpFSFileSystem extends Fi
private static final String HTTP_POST = "POST";
private static final String HTTP_DELETE = "DELETE";
- public enum Operation {
+ @InterfaceAudience.Private
+ public static enum Operation {
OPEN(HTTP_GET), GETFILESTATUS(HTTP_GET), LISTSTATUS(HTTP_GET),
GETHOMEDIRECTORY(HTTP_GET), GETCONTENTSUMMARY(HTTP_GET),
GETFILECHECKSUM(HTTP_GET), GETFILEBLOCKLOCATIONS(HTTP_GET),
@@ -184,8 +188,31 @@ public class HttpFSFileSystem extends Fi
private AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
private URI uri;
+ private InetSocketAddress httpFSAddr;
private Path workingDir;
+ private UserGroupInformation realUser;
private String doAs;
+ private Token<?> delegationToken;
+
+ //This method enables handling UGI doAs with SPNEGO, we have to
+ //fallback to the realuser who logged in with Kerberos credentials
+ private <T> T doAsRealUserIfNecessary(final Callable<T> callable)
+ throws IOException {
+ try {
+ if (realUser.getShortUserName().equals(doAs)) {
+ return callable.call();
+ } else {
+ return realUser.doAs(new PrivilegedExceptionAction<T>() {
+ @Override
+ public T run() throws Exception {
+ return callable.call();
+ }
+ });
+ }
+ } catch (Exception ex) {
+ throw new IOException(ex.toString(), ex);
+ }
+ }
/**
* Convenience method that creates a <code>HttpURLConnection</code> for the
@@ -204,25 +231,23 @@ public class HttpFSFileSystem extends Fi
*
* @throws IOException thrown if an IO error occurrs.
*/
- private HttpURLConnection getConnection(String method, Map<String, String> params,
- Path path, boolean makeQualified) throws IOException {
- params.put(DO_AS_PARAM, doAs);
+ private HttpURLConnection getConnection(final String method,
+ Map<String, String> params, Path path, boolean makeQualified)
+ throws IOException {
+ if (!realUser.getShortUserName().equals(doAs)) {
+ params.put(DO_AS_PARAM, doAs);
+ }
+ HttpFSKerberosAuthenticator.injectDelegationToken(params, delegationToken);
if (makeQualified) {
path = makeQualified(path);
}
- URI uri = path.toUri();
- StringBuilder sb = new StringBuilder();
- sb.append(uri.getScheme()).append("://").append(uri.getAuthority()).
- append(SERVICE_PREFIX).append(uri.getPath());
-
- String separator = "?";
- for (Map.Entry<String, String> entry : params.entrySet()) {
- sb.append(separator).append(entry.getKey()).append("=").
- append(URLEncoder.encode(entry.getValue(), "UTF8"));
- separator = "&";
- }
- URL url = new URL(sb.toString());
- return getConnection(url, method);
+ final URL url = HttpFSUtils.createHttpURL(path, params);
+ return doAsRealUserIfNecessary(new Callable<HttpURLConnection>() {
+ @Override
+ public HttpURLConnection call() throws Exception {
+ return getConnection(url, method);
+ }
+ });
}
/**
@@ -240,7 +265,8 @@ public class HttpFSFileSystem extends Fi
*/
private HttpURLConnection getConnection(URL url, String method) throws IOException {
Class<? extends Authenticator> klass =
- getConf().getClass("httpfs.authenticator.class", HttpKerberosAuthenticator.class, Authenticator.class);
+ getConf().getClass("httpfs.authenticator.class",
+ HttpFSKerberosAuthenticator.class, Authenticator.class);
Authenticator authenticator = ReflectionUtils.newInstance(klass, getConf());
try {
HttpURLConnection conn = new AuthenticatedURL(authenticator).openConnection(url, authToken);
@@ -255,63 +281,6 @@ public class HttpFSFileSystem extends Fi
}
/**
- * Convenience method that JSON Parses the <code>InputStream</code> of a <code>HttpURLConnection</code>.
- *
- * @param conn the <code>HttpURLConnection</code>.
- *
- * @return the parsed JSON object.
- *
- * @throws IOException thrown if the <code>InputStream</code> could not be JSON parsed.
- */
- private static Object jsonParse(HttpURLConnection conn) throws IOException {
- try {
- JSONParser parser = new JSONParser();
- return parser.parse(new InputStreamReader(conn.getInputStream()));
- } catch (ParseException ex) {
- throw new IOException("JSON parser error, " + ex.getMessage(), ex);
- }
- }
-
- /**
- * Validates the status of an <code>HttpURLConnection</code> against an expected HTTP
- * status code. If the current status code is not the expected one it throws an exception
- * with a detail message using Server side error messages if available.
- *
- * @param conn the <code>HttpURLConnection</code>.
- * @param expected the expected HTTP status code.
- *
- * @throws IOException thrown if the current status code does not match the expected one.
- */
- private static void validateResponse(HttpURLConnection conn, int expected) throws IOException {
- int status = conn.getResponseCode();
- if (status != expected) {
- try {
- JSONObject json = (JSONObject) jsonParse(conn);
- json = (JSONObject) json.get(ERROR_JSON);
- String message = (String) json.get(ERROR_MESSAGE_JSON);
- String exception = (String) json.get(ERROR_EXCEPTION_JSON);
- String className = (String) json.get(ERROR_CLASSNAME_JSON);
-
- try {
- ClassLoader cl = HttpFSFileSystem.class.getClassLoader();
- Class klass = cl.loadClass(className);
- Constructor constr = klass.getConstructor(String.class);
- throw (IOException) constr.newInstance(message);
- } catch (IOException ex) {
- throw ex;
- } catch (Exception ex) {
- throw new IOException(MessageFormat.format("{0} - {1}", exception, message));
- }
- } catch (IOException ex) {
- if (ex.getCause() instanceof IOException) {
- throw (IOException) ex.getCause();
- }
- throw new IOException(MessageFormat.format("HTTP status [{0}], {1}", status, conn.getResponseMessage()));
- }
- }
- }
-
- /**
* Called after a new FileSystem instance is constructed.
*
* @param name a uri whose authority section names the host, port, etc. for this FileSystem
@@ -320,15 +289,28 @@ public class HttpFSFileSystem extends Fi
@Override
public void initialize(URI name, Configuration conf) throws IOException {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
- doAs = ugi.getUserName();
+
+ //the real use is the one that has the Kerberos credentials needed for
+ //SPNEGO to work
+ realUser = ugi.getRealUser();
+ if (realUser == null) {
+ realUser = UserGroupInformation.getLoginUser();
+ }
+ doAs = ugi.getShortUserName();
super.initialize(name, conf);
try {
- uri = new URI(name.getScheme() + "://" + name.getHost() + ":" + name.getPort());
+ uri = new URI(name.getScheme() + "://" + name.getAuthority());
+ httpFSAddr = NetUtils.createSocketAddr(getCanonicalUri().toString());
} catch (URISyntaxException ex) {
throw new IOException(ex);
}
}
+ @Override
+ public String getScheme() {
+ return SCHEME;
+ }
+
/**
* Returns a URI whose scheme and authority identify this FileSystem.
*
@@ -340,6 +322,16 @@ public class HttpFSFileSystem extends Fi
}
/**
+ * Get the default port for this file system.
+ * @return the default port or 0 if there isn't one
+ */
+ @Override
+ protected int getDefaultPort() {
+ return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY,
+ DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT);
+ }
+
+ /**
* HttpFSServer subclass of the <code>FSDataInputStream</code>.
* <p/>
* This implementation does not support the
@@ -397,7 +389,7 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.OPEN.toString());
HttpURLConnection conn = getConnection(Operation.OPEN.getMethod(), params,
f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
return new FSDataInputStream(
new HttpFSDataInputStream(conn.getInputStream(), bufferSize));
}
@@ -424,7 +416,7 @@ public class HttpFSFileSystem extends Fi
try {
super.close();
} finally {
- validateResponse(conn, closeStatus);
+ HttpFSUtils.validateResponse(conn, closeStatus);
}
}
@@ -460,11 +452,11 @@ public class HttpFSFileSystem extends Fi
OutputStream os = new BufferedOutputStream(conn.getOutputStream(), bufferSize);
return new HttpFSDataOutputStream(conn, os, expectedStatus, statistics);
} catch (IOException ex) {
- validateResponse(conn, expectedStatus);
+ HttpFSUtils.validateResponse(conn, expectedStatus);
throw ex;
}
} else {
- validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
+ HttpFSUtils.validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
throw new IOException("Missing HTTP 'Location' header for [" + conn.getURL() + "]");
}
} else {
@@ -476,7 +468,7 @@ public class HttpFSFileSystem extends Fi
if (exceptionAlreadyHandled) {
throw ex;
} else {
- validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
+ HttpFSUtils.validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
throw ex;
}
}
@@ -548,8 +540,8 @@ public class HttpFSFileSystem extends Fi
params.put(DESTINATION_PARAM, dst.toString());
HttpURLConnection conn = getConnection(Operation.RENAME.getMethod(),
params, src, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(RENAME_JSON);
}
@@ -584,8 +576,8 @@ public class HttpFSFileSystem extends Fi
params.put(RECURSIVE_PARAM, Boolean.toString(recursive));
HttpURLConnection conn = getConnection(Operation.DELETE.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(DELETE_JSON);
}
@@ -605,8 +597,8 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.LISTSTATUS.toString());
HttpURLConnection conn = getConnection(Operation.LISTSTATUS.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
json = (JSONObject) json.get(FILE_STATUSES_JSON);
JSONArray jsonArray = (JSONArray) json.get(FILE_STATUS_JSON);
FileStatus[] array = new FileStatus[jsonArray.size()];
@@ -653,8 +645,8 @@ public class HttpFSFileSystem extends Fi
params.put(PERMISSION_PARAM, permissionToString(permission));
HttpURLConnection conn = getConnection(Operation.MKDIRS.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(MKDIRS_JSON);
}
@@ -674,8 +666,8 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.GETFILESTATUS.toString());
HttpURLConnection conn = getConnection(Operation.GETFILESTATUS.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
json = (JSONObject) json.get(FILE_STATUS_JSON);
f = makeQualified(f);
return createFileStatus(f, json);
@@ -693,8 +685,8 @@ public class HttpFSFileSystem extends Fi
HttpURLConnection conn =
getConnection(Operation.GETHOMEDIRECTORY.getMethod(), params,
new Path(getUri().toString(), "/"), false);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return new Path((String) json.get(HOME_DIR_JSON));
} catch (IOException ex) {
throw new RuntimeException(ex);
@@ -718,7 +710,7 @@ public class HttpFSFileSystem extends Fi
params.put(GROUP_PARAM, groupname);
HttpURLConnection conn = getConnection(Operation.SETOWNER.getMethod(),
params, p, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
@@ -733,7 +725,7 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.SETPERMISSION.toString());
params.put(PERMISSION_PARAM, permissionToString(permission));
HttpURLConnection conn = getConnection(Operation.SETPERMISSION.getMethod(), params, p, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
@@ -755,7 +747,7 @@ public class HttpFSFileSystem extends Fi
params.put(ACCESS_TIME_PARAM, Long.toString(atime));
HttpURLConnection conn = getConnection(Operation.SETTIMES.getMethod(),
params, p, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
@@ -777,19 +769,11 @@ public class HttpFSFileSystem extends Fi
params.put(REPLICATION_PARAM, Short.toString(replication));
HttpURLConnection conn =
getConnection(Operation.SETREPLICATION.getMethod(), params, src, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(SET_REPLICATION_JSON);
}
- /**
- * Creates a <code>FileStatus</code> object using a JSON file-status payload
- * received from a HttpFSServer server.
- *
- * @param json a JSON file-status payload received from a HttpFSServer server
- *
- * @return the corresponding <code>FileStatus</code>
- */
private FileStatus createFileStatus(Path parent, JSONObject json) {
String pathSuffix = (String) json.get(PATH_SUFFIX_JSON);
Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix);
@@ -828,9 +812,9 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.GETCONTENTSUMMARY.toString());
HttpURLConnection conn =
getConnection(Operation.GETCONTENTSUMMARY.getMethod(), params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json =
- (JSONObject) ((JSONObject) jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) ((JSONObject)
+ HttpFSUtils.jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
return new ContentSummary((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON),
(Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON),
(Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON),
@@ -846,9 +830,9 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString());
HttpURLConnection conn =
getConnection(Operation.GETFILECHECKSUM.getMethod(), params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- final JSONObject json =
- (JSONObject) ((JSONObject) jsonParse(conn)).get(FILE_CHECKSUM_JSON);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ final JSONObject json = (JSONObject) ((JSONObject)
+ HttpFSUtils.jsonParse(conn)).get(FILE_CHECKSUM_JSON);
return new FileChecksum() {
@Override
public String getAlgorithmName() {
@@ -877,4 +861,56 @@ public class HttpFSFileSystem extends Fi
};
}
+
+ @Override
+ @SuppressWarnings("deprecation")
+ public Token<?> getDelegationToken(final String renewer)
+ throws IOException {
+ return doAsRealUserIfNecessary(new Callable<Token<?>>() {
+ @Override
+ public Token<?> call() throws Exception {
+ return HttpFSKerberosAuthenticator.
+ getDelegationToken(uri, httpFSAddr, authToken, renewer);
+ }
+ });
+ }
+
+
+ @Override
+ public List<Token<?>> getDelegationTokens(final String renewer)
+ throws IOException {
+ return doAsRealUserIfNecessary(new Callable<List<Token<?>>>() {
+ @Override
+ public List<Token<?>> call() throws Exception {
+ return HttpFSKerberosAuthenticator.
+ getDelegationTokens(uri, httpFSAddr, authToken, renewer);
+ }
+ });
+ }
+
+ public long renewDelegationToken(final Token<?> token) throws IOException {
+ return doAsRealUserIfNecessary(new Callable<Long>() {
+ @Override
+ public Long call() throws Exception {
+ return HttpFSKerberosAuthenticator.
+ renewDelegationToken(uri, authToken, token);
+ }
+ });
+ }
+
+ public void cancelDelegationToken(final Token<?> token) throws IOException {
+ HttpFSKerberosAuthenticator.
+ cancelDelegationToken(uri, authToken, token);
+ }
+
+ @Override
+ public Token<?> getRenewToken() {
+ return delegationToken;
+ }
+
+ @Override
+ public <T extends TokenIdentifier> void setDelegationToken(Token<T> token) {
+ delegationToken = token;
+ }
+
}
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java Fri Aug 3 19:00:15 2012
@@ -19,6 +19,7 @@
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import javax.servlet.Filter;
@@ -37,6 +38,7 @@ import java.util.Set;
* Filter that Enforces the content-type to be application/octet-stream for
* POST and PUT requests.
*/
+@InterfaceAudience.Private
public class CheckUploadContentTypeFilter implements Filter {
private static final Set<String> UPLOAD_OPERATIONS = new HashSet<String>();
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java Fri Aug 3 19:00:15 2012
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
@@ -40,6 +41,7 @@ import java.util.Map;
/**
* FileSystem operation executors used by {@link HttpFSServer}.
*/
+@InterfaceAudience.Private
public class FSOperations {
@SuppressWarnings({"unchecked", "deprecation"})
@@ -160,6 +162,7 @@ public class FSOperations {
/**
* Executor that performs an append FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSAppend implements FileSystemAccess.FileSystemExecutor<Void> {
private InputStream is;
private Path path;
@@ -198,6 +201,7 @@ public class FSOperations {
/**
* Executor that performs a content-summary FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSContentSummary implements FileSystemAccess.FileSystemExecutor<Map> {
private Path path;
@@ -230,6 +234,7 @@ public class FSOperations {
/**
* Executor that performs a create FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSCreate implements FileSystemAccess.FileSystemExecutor<Void> {
private InputStream is;
private Path path;
@@ -288,6 +293,7 @@ public class FSOperations {
/**
* Executor that performs a delete FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSDelete implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
private boolean recursive;
@@ -324,6 +330,7 @@ public class FSOperations {
/**
* Executor that performs a file-checksum FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSFileChecksum implements FileSystemAccess.FileSystemExecutor<Map> {
private Path path;
@@ -356,6 +363,7 @@ public class FSOperations {
/**
* Executor that performs a file-status FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSFileStatus implements FileSystemAccess.FileSystemExecutor<Map> {
private Path path;
@@ -388,6 +396,7 @@ public class FSOperations {
/**
* Executor that performs a home-dir FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSHomeDir implements FileSystemAccess.FileSystemExecutor<JSONObject> {
/**
@@ -413,6 +422,7 @@ public class FSOperations {
/**
* Executor that performs a list-status FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSListStatus implements FileSystemAccess.FileSystemExecutor<Map>, PathFilter {
private Path path;
private PathFilter filter;
@@ -456,6 +466,7 @@ public class FSOperations {
/**
* Executor that performs a mkdirs FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSMkdirs implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
@@ -494,6 +505,7 @@ public class FSOperations {
/**
* Executor that performs a open FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSOpen implements FileSystemAccess.FileSystemExecutor<InputStream> {
private Path path;
@@ -526,6 +538,7 @@ public class FSOperations {
/**
* Executor that performs a rename FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSRename implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
private Path toPath;
@@ -562,6 +575,7 @@ public class FSOperations {
/**
* Executor that performs a set-owner FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSSetOwner implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
private String owner;
@@ -600,6 +614,7 @@ public class FSOperations {
/**
* Executor that performs a set-permission FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSSetPermission implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
@@ -637,6 +652,7 @@ public class FSOperations {
/**
* Executor that performs a set-replication FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSSetReplication implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
private short replication;
@@ -676,6 +692,7 @@ public class FSOperations {
/**
* Executor that performs a set-times FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSSetTimes implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
private long mTime;
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java Fri Aug 3 19:00:15 2012
@@ -19,6 +19,7 @@
package org.apache.hadoop.fs.http.server;
import com.sun.jersey.api.container.ContainerException;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.service.FileSystemAccessException;
import org.apache.hadoop.lib.wsrs.ExceptionProvider;
import org.slf4j.Logger;
@@ -35,6 +36,7 @@ import java.io.IOException;
* exceptions to HTTP status codes.
*/
@Provider
+@InterfaceAudience.Private
public class HttpFSExceptionProvider extends ExceptionProvider {
private static Logger AUDIT_LOG = LoggerFactory.getLogger("httpfsaudit");
private static Logger LOG = LoggerFactory.getLogger(HttpFSExceptionProvider.class);
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java Fri Aug 3 19:00:15 2012
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem.Operation;
import org.apache.hadoop.lib.wsrs.BooleanParam;
@@ -38,6 +39,7 @@ import java.util.regex.Pattern;
* HttpFS ParametersProvider.
*/
@Provider
+@InterfaceAudience.Private
public class HttpFSParametersProvider extends ParametersProvider {
private static final Map<Enum, Class<Param<?>>[]> PARAMS_DEF =
@@ -85,6 +87,7 @@ public class HttpFSParametersProvider ex
/**
* Class for access-time parameter.
*/
+ @InterfaceAudience.Private
public static class AccessTimeParam extends LongParam {
/**
@@ -102,6 +105,7 @@ public class HttpFSParametersProvider ex
/**
* Class for block-size parameter.
*/
+ @InterfaceAudience.Private
public static class BlockSizeParam extends LongParam {
/**
@@ -120,6 +124,7 @@ public class HttpFSParametersProvider ex
/**
* Class for data parameter.
*/
+ @InterfaceAudience.Private
public static class DataParam extends BooleanParam {
/**
@@ -138,6 +143,7 @@ public class HttpFSParametersProvider ex
/**
* Class for operation parameter.
*/
+ @InterfaceAudience.Private
public static class OperationParam extends EnumParam<HttpFSFileSystem.Operation> {
/**
@@ -156,6 +162,7 @@ public class HttpFSParametersProvider ex
/**
* Class for delete's recursive parameter.
*/
+ @InterfaceAudience.Private
public static class RecursiveParam extends BooleanParam {
/**
@@ -174,6 +181,7 @@ public class HttpFSParametersProvider ex
/**
* Class for do-as parameter.
*/
+ @InterfaceAudience.Private
public static class DoAsParam extends StringParam {
/**
@@ -208,6 +216,7 @@ public class HttpFSParametersProvider ex
/**
* Class for filter parameter.
*/
+ @InterfaceAudience.Private
public static class FilterParam extends StringParam {
/**
@@ -227,6 +236,7 @@ public class HttpFSParametersProvider ex
/**
* Class for group parameter.
*/
+ @InterfaceAudience.Private
public static class GroupParam extends StringParam {
/**
@@ -246,6 +256,7 @@ public class HttpFSParametersProvider ex
/**
* Class for len parameter.
*/
+ @InterfaceAudience.Private
public static class LenParam extends LongParam {
/**
@@ -264,6 +275,7 @@ public class HttpFSParametersProvider ex
/**
* Class for modified-time parameter.
*/
+ @InterfaceAudience.Private
public static class ModifiedTimeParam extends LongParam {
/**
@@ -282,6 +294,7 @@ public class HttpFSParametersProvider ex
/**
* Class for offset parameter.
*/
+ @InterfaceAudience.Private
public static class OffsetParam extends LongParam {
/**
@@ -300,6 +313,7 @@ public class HttpFSParametersProvider ex
/**
* Class for overwrite parameter.
*/
+ @InterfaceAudience.Private
public static class OverwriteParam extends BooleanParam {
/**
@@ -318,6 +332,7 @@ public class HttpFSParametersProvider ex
/**
* Class for owner parameter.
*/
+ @InterfaceAudience.Private
public static class OwnerParam extends StringParam {
/**
@@ -337,6 +352,7 @@ public class HttpFSParametersProvider ex
/**
* Class for permission parameter.
*/
+ @InterfaceAudience.Private
public static class PermissionParam extends ShortParam {
/**
@@ -357,6 +373,7 @@ public class HttpFSParametersProvider ex
/**
* Class for replication parameter.
*/
+ @InterfaceAudience.Private
public static class ReplicationParam extends ShortParam {
/**
@@ -375,6 +392,7 @@ public class HttpFSParametersProvider ex
/**
* Class for to-path parameter.
*/
+ @InterfaceAudience.Private
public static class DestinationParam extends StringParam {
/**
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.service.FileSystemAccess;
import org.apache.hadoop.lib.servlet.FileSystemReleaseFilter;
@@ -25,6 +26,7 @@ import org.apache.hadoop.lib.servlet.Fil
* Filter that releases FileSystemAccess filesystem instances upon HTTP request
* completion.
*/
+@InterfaceAudience.Private
public class HttpFSReleaseFilter extends FileSystemReleaseFilter {
/**
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
@@ -82,6 +83,7 @@ import java.util.Map;
* different operations.
*/
@Path(HttpFSFileSystem.SERVICE_VERSION)
+@InterfaceAudience.Private
public class HttpFSServer {
private static Logger AUDIT_LOG = LoggerFactory.getLogger("httpfsaudit");
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.lib.server.ServerException;
@@ -39,6 +40,7 @@ import java.io.IOException;
* All the configuration is loaded from configuration properties prefixed
* with <code>httpfs.</code>.
*/
+@InterfaceAudience.Private
public class HttpFSServerWebApp extends ServerWebApp {
private static final Logger LOG =
LoggerFactory.getLogger(HttpFSServerWebApp.class);
@@ -70,7 +72,7 @@ public class HttpFSServerWebApp extends
/**
* Constructor used for testing purposes.
*/
- protected HttpFSServerWebApp(String homeDir, String configDir, String logDir,
+ public HttpFSServerWebApp(String homeDir, String configDir, String logDir,
String tempDir, Configuration config) {
super(NAME, homeDir, configDir, logDir, tempDir, config);
}
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.lang;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.util.Check;
import java.util.concurrent.Callable;
@@ -26,6 +27,7 @@ import java.util.concurrent.Callable;
* Adapter class that allows <code>Runnable</code>s and <code>Callable</code>s to
* be treated as the other.
*/
+@InterfaceAudience.Private
public class RunnableCallable implements Callable<Void>, Runnable {
private Runnable runnable;
private Callable<?> callable;
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.lang;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.util.Check;
import java.text.MessageFormat;
@@ -26,6 +27,7 @@ import java.text.MessageFormat;
* Generic exception that requires error codes and uses the a message
* template from the error code.
*/
+@InterfaceAudience.Private
public class XException extends Exception {
/**
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.util.ConfigurationUtils;
@@ -26,6 +27,7 @@ import java.util.Map;
/**
* Convenience class implementing the {@link Service} interface.
*/
+@InterfaceAudience.Private
public abstract class BaseService implements Service {
private String prefix;
private Server server;
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.util.Check;
import org.apache.hadoop.lib.util.ConfigurationUtils;
@@ -76,6 +77,7 @@ import java.util.Properties;
* post-initialized (this enables late/conditional service bindings).
* <p/>
*/
+@InterfaceAudience.Private
public class Server {
private Logger log;
@@ -97,7 +99,8 @@ public class Server {
/**
* Enumeration that defines the server status.
*/
- public enum Status {
+ @InterfaceAudience.Private
+ public static enum Status {
UNDEF(false, false),
BOOTING(false, true),
HALTED(true, true),
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java Fri Aug 3 19:00:15 2012
@@ -18,16 +18,19 @@
package org.apache.hadoop.lib.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
/**
* Exception thrown by the {@link Server} class.
*/
+@InterfaceAudience.Private
public class ServerException extends XException {
/**
* Error codes use by the {@link Server} class.
*/
+ @InterfaceAudience.Private
public static enum ERROR implements XException.ERROR {
S01("Dir [{0}] does not exist"),
S02("[{0}] is not a directory"),
@@ -39,7 +42,11 @@ public class ServerException extends XEx
S08("Could not load service classes, {0}"),
S09("Could not set service [{0}] programmatically -server shutting down-, {1}"),
S10("Service [{0}] requires service [{1}]"),
- S11("Service [{0}] exception during status change to [{1}] -server shutting down-, {2}");
+ S11("Service [{0}] exception during status change to [{1}] -server shutting down-, {2}"),
+ S12("Could not start service [{0}], {1}"),
+ S13("Missing system property [{0}]"),
+ S14("Could not initialize server, {0}")
+ ;
private String msg;
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java Fri Aug 3 19:00:15 2012
@@ -18,9 +18,12 @@
package org.apache.hadoop.lib.server;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* Service interface for components to be managed by the {@link Server} class.
*/
+@InterfaceAudience.Private
public interface Service {
/**
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java Fri Aug 3 19:00:15 2012
@@ -18,11 +18,13 @@
package org.apache.hadoop.lib.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
/**
* Exception thrown by {@link Service} implementations.
*/
+@InterfaceAudience.Private
public class ServiceException extends ServerException {
/**
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java Fri Aug 3 19:00:15 2012
@@ -18,11 +18,13 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import java.io.IOException;
+@InterfaceAudience.Private
public interface FileSystemAccess {
public interface FileSystemExecutor<T> {
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java Fri Aug 3 19:00:15 2012
@@ -18,8 +18,10 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
+@InterfaceAudience.Private
public class FileSystemAccessException extends XException {
public enum ERROR implements XException.ERROR {
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java Fri Aug 3 19:00:15 2012
@@ -18,9 +18,12 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import java.io.IOException;
import java.util.List;
+@InterfaceAudience.Private
public interface Groups {
public List<String> getGroups(String user) throws IOException;
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java Fri Aug 3 19:00:15 2012
@@ -18,8 +18,11 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import java.util.Map;
+@InterfaceAudience.Private
public interface Instrumentation {
public interface Cron {
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java Fri Aug 3 19:00:15 2012
@@ -18,9 +18,12 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import java.io.IOException;
import java.security.AccessControlException;
+@InterfaceAudience.Private
public interface ProxyUser {
public void validate(String proxyUser, String proxyHost, String doAsUser) throws IOException, AccessControlException;
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java Fri Aug 3 19:00:15 2012
@@ -18,9 +18,12 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
+@InterfaceAudience.Private
public interface Scheduler {
public abstract void schedule(Callable<?> callable, long delay, long interval, TimeUnit unit);
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.hadoop;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
@@ -27,6 +28,7 @@ import org.apache.hadoop.lib.server.Serv
import org.apache.hadoop.lib.service.FileSystemAccess;
import org.apache.hadoop.lib.service.FileSystemAccessException;
import org.apache.hadoop.lib.service.Instrumentation;
+import org.apache.hadoop.lib.service.Scheduler;
import org.apache.hadoop.lib.util.Check;
import org.apache.hadoop.lib.util.ConfigurationUtils;
import org.apache.hadoop.security.UserGroupInformation;
@@ -42,8 +44,11 @@ import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
+@InterfaceAudience.Private
public class FileSystemAccessService extends BaseService implements FileSystemAccess {
private static final Logger LOG = LoggerFactory.getLogger(FileSystemAccessService.class);
@@ -54,6 +59,8 @@ public class FileSystemAccessService ext
public static final String AUTHENTICATION_TYPE = "authentication.type";
public static final String KERBEROS_KEYTAB = "authentication.kerberos.keytab";
public static final String KERBEROS_PRINCIPAL = "authentication.kerberos.principal";
+ public static final String FS_CACHE_PURGE_FREQUENCY = "filesystem.cache.purge.frequency";
+ public static final String FS_CACHE_PURGE_TIMEOUT = "filesystem.cache.purge.timeout";
public static final String NAME_NODE_WHITELIST = "name.node.whitelist";
@@ -63,6 +70,61 @@ public class FileSystemAccessService ext
private static final String FILE_SYSTEM_SERVICE_CREATED = "FileSystemAccessService.created";
+ private static class CachedFileSystem {
+ private FileSystem fs;
+ private long lastUse;
+ private long timeout;
+ private int count;
+
+ public CachedFileSystem(long timeout) {
+ this.timeout = timeout;
+ lastUse = -1;
+ count = 0;
+ }
+
+ synchronized FileSystem getFileSytem(Configuration conf)
+ throws IOException {
+ if (fs == null) {
+ fs = FileSystem.get(conf);
+ }
+ lastUse = -1;
+ count++;
+ return fs;
+ }
+
+ synchronized void release() throws IOException {
+ count--;
+ if (count == 0) {
+ if (timeout == 0) {
+ fs.close();
+ fs = null;
+ lastUse = -1;
+ }
+ else {
+ lastUse = System.currentTimeMillis();
+ }
+ }
+ }
+
+ // to avoid race conditions in the map cache adding removing entries
+ // an entry in the cache remains forever, it just closes/opens filesystems
+ // based on their utilization. Worse case scenario, the penalty we'll
+ // pay is that the amount of entries in the cache will be the total
+ // number of users in HDFS (which seems a resonable overhead).
+ synchronized boolean purgeIfIdle() throws IOException {
+ boolean ret = false;
+ if (count == 0 && lastUse != -1 &&
+ (System.currentTimeMillis() - lastUse) > timeout) {
+ fs.close();
+ fs = null;
+ lastUse = -1;
+ ret = true;
+ }
+ return ret;
+ }
+
+ }
+
public FileSystemAccessService() {
super(PREFIX);
}
@@ -73,6 +135,11 @@ public class FileSystemAccessService ext
private AtomicInteger unmanagedFileSystems = new AtomicInteger();
+ private ConcurrentHashMap<String, CachedFileSystem> fsCache =
+ new ConcurrentHashMap<String, CachedFileSystem>();
+
+ private long purgeTimeout;
+
@Override
protected void init() throws ServiceException {
LOG.info("Using FileSystemAccess JARs version [{}]", VersionInfo.getVersion());
@@ -157,6 +224,30 @@ public class FileSystemAccessService ext
return (long) unmanagedFileSystems.get();
}
});
+ Scheduler scheduler = getServer().get(Scheduler.class);
+ int purgeInterval = getServiceConfig().getInt(FS_CACHE_PURGE_FREQUENCY, 60);
+ purgeTimeout = getServiceConfig().getLong(FS_CACHE_PURGE_TIMEOUT, 60);
+ purgeTimeout = (purgeTimeout > 0) ? purgeTimeout : 0;
+ if (purgeTimeout > 0) {
+ scheduler.schedule(new FileSystemCachePurger(),
+ purgeInterval, purgeInterval, TimeUnit.SECONDS);
+ }
+ }
+
+ private class FileSystemCachePurger implements Runnable {
+
+ @Override
+ public void run() {
+ int count = 0;
+ for (CachedFileSystem cacheFs : fsCache.values()) {
+ try {
+ count += cacheFs.purgeIfIdle() ? 1 : 0;
+ } catch (Throwable ex) {
+ LOG.warn("Error while purging filesystem, " + ex.toString(), ex);
+ }
+ }
+ LOG.debug("Purged [{}} filesystem instances", count);
+ }
}
private Set<String> toLowerCase(Collection<String> collection) {
@@ -174,7 +265,7 @@ public class FileSystemAccessService ext
@Override
public Class[] getServiceDependencies() {
- return new Class[]{Instrumentation.class};
+ return new Class[]{Instrumentation.class, Scheduler.class};
}
protected UserGroupInformation getUGI(String user) throws IOException {
@@ -185,12 +276,25 @@ public class FileSystemAccessService ext
conf.set("fs.hdfs.impl.disable.cache", "true");
}
- protected FileSystem createFileSystem(Configuration namenodeConf) throws IOException {
- return FileSystem.get(namenodeConf);
+ private static final String HTTPFS_FS_USER = "httpfs.fs.user";
+
+ protected FileSystem createFileSystem(Configuration namenodeConf)
+ throws IOException {
+ String user = UserGroupInformation.getCurrentUser().getShortUserName();
+ CachedFileSystem newCachedFS = new CachedFileSystem(purgeTimeout);
+ CachedFileSystem cachedFS = fsCache.putIfAbsent(user, newCachedFS);
+ if (cachedFS == null) {
+ cachedFS = newCachedFS;
+ }
+ Configuration conf = new Configuration(namenodeConf);
+ conf.set(HTTPFS_FS_USER, user);
+ return cachedFS.getFileSytem(conf);
}
protected void closeFileSystem(FileSystem fs) throws IOException {
- fs.close();
+ if (fsCache.containsKey(fs.getConf().get(HTTPFS_FS_USER))) {
+ fsCache.get(fs.getConf().get(HTTPFS_FS_USER)).release();
+ }
}
protected void validateNamenode(String namenode) throws FileSystemAccessException {
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java Fri Aug 3 19:00:15 2012
@@ -18,10 +18,12 @@
package org.apache.hadoop.lib.service.instrumentation;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.ServiceException;
import org.apache.hadoop.lib.service.Instrumentation;
import org.apache.hadoop.lib.service.Scheduler;
+import org.apache.hadoop.util.Time;
import org.json.simple.JSONAware;
import org.json.simple.JSONObject;
import org.json.simple.JSONStreamAware;
@@ -38,6 +40,7 @@ import java.util.concurrent.atomic.Atomi
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
+@InterfaceAudience.Private
public class InstrumentationService extends BaseService implements Instrumentation {
public static final String PREFIX = "instrumentation";
public static final String CONF_TIMERS_SIZE = "timers.size";
@@ -164,10 +167,10 @@ public class InstrumentationService exte
throw new IllegalStateException("Cron already used");
}
if (start == 0) {
- start = System.currentTimeMillis();
+ start = Time.now();
lapStart = start;
} else if (lapStart == 0) {
- lapStart = System.currentTimeMillis();
+ lapStart = Time.now();
}
return this;
}
@@ -177,7 +180,7 @@ public class InstrumentationService exte
throw new IllegalStateException("Cron already used");
}
if (lapStart > 0) {
- own += System.currentTimeMillis() - lapStart;
+ own += Time.now() - lapStart;
lapStart = 0;
}
return this;
@@ -185,7 +188,7 @@ public class InstrumentationService exte
void end() {
stop();
- total = System.currentTimeMillis() - start;
+ total = Time.now() - start;
}
}
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.scheduler;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.RunnableCallable;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.Server;
@@ -25,6 +26,7 @@ import org.apache.hadoop.lib.server.Serv
import org.apache.hadoop.lib.service.Instrumentation;
import org.apache.hadoop.lib.service.Scheduler;
import org.apache.hadoop.lib.util.Check;
+import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -34,6 +36,7 @@ import java.util.concurrent.ScheduledExe
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
+@InterfaceAudience.Private
public class SchedulerService extends BaseService implements Scheduler {
private static final Logger LOG = LoggerFactory.getLogger(SchedulerService.class);
@@ -59,11 +62,11 @@ public class SchedulerService extends Ba
@Override
public void destroy() {
try {
- long limit = System.currentTimeMillis() + 30 * 1000;
+ long limit = Time.now() + 30 * 1000;
scheduler.shutdownNow();
while (!scheduler.awaitTermination(1000, TimeUnit.MILLISECONDS)) {
LOG.debug("Waiting for scheduler to shutdown");
- if (System.currentTimeMillis() > limit) {
+ if (Time.now() > limit) {
LOG.warn("Gave up waiting for scheduler to shutdown");
break;
}
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.security;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.ServiceException;
@@ -27,6 +28,7 @@ import org.apache.hadoop.lib.util.Config
import java.io.IOException;
import java.util.List;
+@InterfaceAudience.Private
public class GroupsService extends BaseService implements Groups {
private static final String PREFIX = "groups";
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.security;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.ServiceException;
@@ -38,10 +39,12 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
+@InterfaceAudience.Private
public class ProxyUserService extends BaseService implements ProxyUser {
private static Logger LOG = LoggerFactory.getLogger(ProxyUserService.class);
- public enum ERROR implements XException.ERROR {
+ @InterfaceAudience.Private
+ public static enum ERROR implements XException.ERROR {
PRXU01("Could not normalize host name [{0}], {1}"),
PRXU02("Missing [{0}] property");
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.servlet;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.lib.service.FileSystemAccess;
@@ -37,6 +38,7 @@ import java.io.IOException;
* is streaming out HDFS data and the corresponding filesystem
* instance have to be closed after the streaming completes.
*/
+@InterfaceAudience.Private
public abstract class FileSystemReleaseFilter implements Filter {
private static final ThreadLocal<FileSystem> FILE_SYSTEM_TL = new ThreadLocal<FileSystem>();
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java Fri Aug 3 19:00:15 2012
@@ -19,6 +19,8 @@
package org.apache.hadoop.lib.servlet;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
@@ -31,6 +33,7 @@ import java.net.InetAddress;
/**
* Filter that resolves the requester hostname.
*/
+@InterfaceAudience.Private
public class HostnameFilter implements Filter {
static final ThreadLocal<String> HOSTNAME_TL = new ThreadLocal<String>();
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.servlet;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.slf4j.MDC;
import javax.servlet.Filter;
@@ -42,6 +43,7 @@ import java.security.Principal;
* <li>path: the path of the request URL</li>
* </ul>
*/
+@InterfaceAudience.Private
public class MDCFilter implements Filter {
/**
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java Fri Aug 3 19:00:15 2012
@@ -18,27 +18,37 @@
package org.apache.hadoop.lib.servlet;
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.server.Server;
import org.apache.hadoop.lib.server.ServerException;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
import java.text.MessageFormat;
/**
* {@link Server} subclass that implements <code>ServletContextListener</code>
* and uses its lifecycle to start and stop the server.
*/
+@InterfaceAudience.Private
public abstract class ServerWebApp extends Server implements ServletContextListener {
private static final String HOME_DIR = ".home.dir";
private static final String CONFIG_DIR = ".config.dir";
private static final String LOG_DIR = ".log.dir";
private static final String TEMP_DIR = ".temp.dir";
+ private static final String HTTP_HOSTNAME = ".http.hostname";
+ private static final String HTTP_PORT = ".http.port";
private static ThreadLocal<String> HOME_DIR_TL = new ThreadLocal<String>();
+ private InetSocketAddress authority;
+
/**
* Method for testing purposes.
*/
@@ -147,6 +157,38 @@ public abstract class ServerWebApp exten
}
/**
+ * Resolves the host & port InetSocketAddress the web server is listening to.
+ * <p/>
+ * This implementation looks for the following 2 properties:
+ * <ul>
+ * <li>#SERVER_NAME#.http.hostname</li>
+ * <li>#SERVER_NAME#.http.port</li>
+ * </ul>
+ *
+ * @return the host & port InetSocketAddress the web server is listening to.
+ * @throws ServerException thrown if any of the above 2 properties is not defined.
+ */
+ protected InetSocketAddress resolveAuthority() throws ServerException {
+ String hostnameKey = getName() + HTTP_HOSTNAME;
+ String portKey = getName() + HTTP_PORT;
+ String host = System.getProperty(hostnameKey);
+ String port = System.getProperty(portKey);
+ if (host == null) {
+ throw new ServerException(ServerException.ERROR.S13, hostnameKey);
+ }
+ if (port == null) {
+ throw new ServerException(ServerException.ERROR.S13, portKey);
+ }
+ try {
+ InetAddress add = InetAddress.getByName(hostnameKey);
+ int portNum = Integer.parseInt(port);
+ return new InetSocketAddress(add, portNum);
+ } catch (UnknownHostException ex) {
+ throw new ServerException(ServerException.ERROR.S14, ex.toString(), ex);
+ }
+ }
+
+ /**
* Destroys the <code>ServletContextListener</code> which destroys
* the Server.
*
@@ -156,4 +198,29 @@ public abstract class ServerWebApp exten
destroy();
}
+ /**
+ * Returns the hostname:port InetSocketAddress the webserver is listening to.
+ *
+ * @return the hostname:port InetSocketAddress the webserver is listening to.
+ */
+ public InetSocketAddress getAuthority() throws ServerException {
+ synchronized (this) {
+ if (authority == null) {
+ authority = resolveAuthority();
+ }
+ }
+ return authority;
+ }
+
+ /**
+ * Sets an alternate hostname:port InetSocketAddress to use.
+ * <p/>
+ * For testing purposes.
+ *
+ * @param authority alterante authority.
+ */
+ @VisibleForTesting
+ public void setAuthority(InetSocketAddress authority) {
+ this.authority = authority;
+ }
}
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/Check.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,8 @@
package org.apache.hadoop.lib.util;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import java.text.MessageFormat;
import java.util.List;
import java.util.regex.Pattern;
@@ -27,6 +29,7 @@ import java.util.regex.Pattern;
* <p/>
* Commonly used for method arguments preconditions.
*/
+@InterfaceAudience.Private
public class Check {
/**
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/util/ConfigurationUtils.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.util;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
@@ -37,6 +38,7 @@ import java.util.Map;
/**
* Configuration utilities.
*/
+@InterfaceAudience.Private
public abstract class ConfigurationUtils {
/**
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java Fri Aug 3 19:00:15 2012
@@ -18,8 +18,11 @@
package org.apache.hadoop.lib.wsrs;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import java.text.MessageFormat;
+@InterfaceAudience.Private
public abstract class BooleanParam extends Param<Boolean> {
public BooleanParam(String name, Boolean defaultValue) {
Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java Fri Aug 3 19:00:15 2012
@@ -18,6 +18,9 @@
package org.apache.hadoop.lib.wsrs;
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
public abstract class ByteParam extends Param<Byte> {
public ByteParam(String name, Byte defaultValue) {