You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by sz...@apache.org on 2012/10/19 04:28:07 UTC
svn commit: r1399950 [2/27] - in
/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project: ./
hadoop-hdfs-httpfs/ hadoop-hdfs-httpfs/dev-support/
hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/
hadoop-hdfs-httpfs/src/main/java/org/apach...
Propchange: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/
------------------------------------------------------------------------------
Merged /hadoop/common/branches/HDFS-3077/hadoop-hdfs-project/hadoop-hdfs:r1363593-1396941
Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:r1360400-1399945
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/dev-support/findbugsExcludeFile.xml Fri Oct 19 02:25:55 2012
@@ -25,4 +25,14 @@
<Method name="destroy" />
<Bug pattern="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD" />
</Match>
+ <Match>
+ <Class name="org.apache.hadoop.lib.servlet.ServerWebApp" />
+ <Field name="authority" />
+ <Bug pattern="IS2_INCONSISTENT_SYNC" />
+ </Match>
+ <Match>
+ <Class name="org.apache.hadoop.lib.service.hadoop.FileSystemAccessService" />
+ <Method name="closeFileSystem" />
+ <Bug pattern="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE" />
+ </Match>
</FindBugsFilter>
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml Fri Oct 19 02:25:55 2012
@@ -43,6 +43,9 @@
<httpfs.tomcat.dist.dir>
${project.build.directory}/${project.artifactId}-${project.version}/share/hadoop/httpfs/tomcat
</httpfs.tomcat.dist.dir>
+ <kerberos.realm>LOCALHOST</kerberos.realm>
+ <test.exclude.kerberos.test>**/TestHttpFSWithKerberos.java</test.exclude.kerberos.test>
+ <tomcat.download.url>http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz</tomcat.download.url>
</properties>
<dependencies>
@@ -58,8 +61,13 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-annotations</artifactId>
- <scope>provided</scope>
+ <artifactId>hadoop-auth</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ <scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
@@ -72,18 +80,8 @@
<scope>provided</scope>
</dependency>
<dependency>
- <groupId>javax.servlet.jsp</groupId>
- <artifactId>jsp-api</artifactId>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>commons-codec</groupId>
- <artifactId>commons-codec</artifactId>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>org.jdom</groupId>
- <artifactId>jdom</artifactId>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
@@ -92,6 +90,11 @@
<scope>compile</scope>
</dependency>
<dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>compile</scope>
@@ -101,10 +104,6 @@
<artifactId>stax-api</artifactId>
</exclusion>
<exclusion>
- <groupId>commons-cli</groupId>
- <artifactId>commons-cli</artifactId>
- </exclusion>
- <exclusion>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
</exclusion>
@@ -246,7 +245,7 @@
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
- <scope>compile</scope>
+ <scope>runtime</scope>
</dependency>
</dependencies>
@@ -267,6 +266,22 @@
</excludes>
</resource>
</resources>
+ <testResources>
+ <testResource>
+ <directory>${basedir}/src/test/resources</directory>
+ <filtering>false</filtering>
+ <excludes>
+ <exclude>krb5.conf</exclude>
+ </excludes>
+ </testResource>
+ <testResource>
+ <directory>${basedir}/src/test/resources</directory>
+ <filtering>true</filtering>
+ <includes>
+ <include>krb5.conf</include>
+ </includes>
+ </testResource>
+ </testResources>
<plugins>
<plugin>
@@ -281,6 +296,22 @@
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<threadCount>1</threadCount>
+ <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
+ <systemPropertyVariables>
+ <java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
+ <kerberos.realm>${kerberos.realm}</kerberos.realm>
+ </systemPropertyVariables>
+ <properties>
+ <property>
+ <name>listener</name>
+ <value>org.apache.hadoop.test.TimedOutTestsListener</value>
+ </property>
+ </properties>
+ <excludes>
+ <exclude>**/${test.exclude}.java</exclude>
+ <exclude>${test.exclude.pattern}</exclude>
+ <exclude>${test.exclude.kerberos.test}</exclude>
+ </excludes>
</configuration>
</plugin>
<plugin>
@@ -396,6 +427,36 @@
<profiles>
<profile>
+ <id>testKerberos</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ <properties>
+ <test.exclude.kerberos.test>_</test.exclude.kerberos.test>
+ </properties>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <forkMode>once</forkMode>
+ <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
+ <systemPropertyVariables>
+ <java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
+ <kerberos.realm>${kerberos.realm}</kerberos.realm>
+ <httpfs.http.hostname>localhost</httpfs.http.hostname>
+ </systemPropertyVariables>
+ <includes>
+ <include>**/TestHttpFSWithKerberos.java</include>
+ </includes>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+
+ <profile>
<id>docs</id>
<activation>
<activeByDefault>false</activeByDefault>
@@ -469,7 +530,7 @@
<target>
<mkdir dir="downloads"/>
<get
- src="http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz"
+ src="${tomcat.download.url}"
dest="downloads/tomcat.tar.gz" verbose="true" skipexisting="true"/>
<delete dir="${project.build.directory}/tomcat.exp"/>
<mkdir dir="${project.build.directory}/tomcat.exp"/>
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java Fri Oct 19 02:25:55 2012
@@ -17,8 +17,10 @@
*/
package org.apache.hadoop.fs.http.client;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.DelegationTokenRenewer;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileChecksum;
@@ -28,16 +30,18 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PositionedReadable;
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
import org.apache.hadoop.security.authentication.client.Authenticator;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
-import org.json.simple.parser.JSONParser;
-import org.json.simple.parser.ParseException;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
@@ -47,30 +51,32 @@ import java.io.FileNotFoundException;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.io.InputStreamReader;
import java.io.OutputStream;
-import java.lang.reflect.Constructor;
import java.net.HttpURLConnection;
+import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
-import java.net.URLEncoder;
+import java.security.PrivilegedExceptionAction;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
+import java.util.concurrent.Callable;
/**
* HttpFSServer implementation of the FileSystemAccess FileSystem.
* <p/>
* This implementation allows a user to access HDFS over HTTP via a HttpFSServer server.
*/
-public class HttpFSFileSystem extends FileSystem {
+@InterfaceAudience.Private
+public class HttpFSFileSystem extends FileSystem
+ implements DelegationTokenRenewer.Renewable {
- public static final String SERVICE_NAME = "/webhdfs";
+ public static final String SERVICE_NAME = HttpFSUtils.SERVICE_NAME;
- public static final String SERVICE_VERSION = "/v1";
+ public static final String SERVICE_VERSION = HttpFSUtils.SERVICE_VERSION;
- public static final String SERVICE_PREFIX = SERVICE_NAME + SERVICE_VERSION;
+ public static final String SCHEME = "webhdfs";
public static final String OP_PARAM = "op";
public static final String DO_AS_PARAM = "doas";
@@ -84,7 +90,6 @@ public class HttpFSFileSystem extends Fi
public static final String GROUP_PARAM = "group";
public static final String MODIFICATION_TIME_PARAM = "modificationtime";
public static final String ACCESS_TIME_PARAM = "accesstime";
- public static final String RENEWER_PARAM = "renewer";
public static final Short DEFAULT_PERMISSION = 0755;
@@ -144,9 +149,6 @@ public class HttpFSFileSystem extends Fi
public static final String CONTENT_SUMMARY_SPACE_CONSUMED_JSON = "spaceConsumed";
public static final String CONTENT_SUMMARY_SPACE_QUOTA_JSON = "spaceQuota";
- public static final String DELEGATION_TOKEN_JSON = "Token";
- public static final String DELEGATION_TOKEN_URL_STRING_JSON = "urlString";
-
public static final String ERROR_JSON = "RemoteException";
public static final String ERROR_EXCEPTION_JSON = "exception";
public static final String ERROR_CLASSNAME_JSON = "javaClassName";
@@ -159,7 +161,8 @@ public class HttpFSFileSystem extends Fi
private static final String HTTP_POST = "POST";
private static final String HTTP_DELETE = "DELETE";
- public enum Operation {
+ @InterfaceAudience.Private
+ public static enum Operation {
OPEN(HTTP_GET), GETFILESTATUS(HTTP_GET), LISTSTATUS(HTTP_GET),
GETHOMEDIRECTORY(HTTP_GET), GETCONTENTSUMMARY(HTTP_GET),
GETFILECHECKSUM(HTTP_GET), GETFILEBLOCKLOCATIONS(HTTP_GET),
@@ -184,8 +187,31 @@ public class HttpFSFileSystem extends Fi
private AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
private URI uri;
+ private InetSocketAddress httpFSAddr;
private Path workingDir;
+ private UserGroupInformation realUser;
private String doAs;
+ private Token<?> delegationToken;
+
+ //This method enables handling UGI doAs with SPNEGO, we have to
+ //fallback to the realuser who logged in with Kerberos credentials
+ private <T> T doAsRealUserIfNecessary(final Callable<T> callable)
+ throws IOException {
+ try {
+ if (realUser.getShortUserName().equals(doAs)) {
+ return callable.call();
+ } else {
+ return realUser.doAs(new PrivilegedExceptionAction<T>() {
+ @Override
+ public T run() throws Exception {
+ return callable.call();
+ }
+ });
+ }
+ } catch (Exception ex) {
+ throw new IOException(ex.toString(), ex);
+ }
+ }
/**
* Convenience method that creates a <code>HttpURLConnection</code> for the
@@ -204,25 +230,23 @@ public class HttpFSFileSystem extends Fi
*
* @throws IOException thrown if an IO error occurrs.
*/
- private HttpURLConnection getConnection(String method, Map<String, String> params,
- Path path, boolean makeQualified) throws IOException {
- params.put(DO_AS_PARAM, doAs);
+ private HttpURLConnection getConnection(final String method,
+ Map<String, String> params, Path path, boolean makeQualified)
+ throws IOException {
+ if (!realUser.getShortUserName().equals(doAs)) {
+ params.put(DO_AS_PARAM, doAs);
+ }
+ HttpFSKerberosAuthenticator.injectDelegationToken(params, delegationToken);
if (makeQualified) {
path = makeQualified(path);
}
- URI uri = path.toUri();
- StringBuilder sb = new StringBuilder();
- sb.append(uri.getScheme()).append("://").append(uri.getAuthority()).
- append(SERVICE_PREFIX).append(uri.getPath());
-
- String separator = "?";
- for (Map.Entry<String, String> entry : params.entrySet()) {
- sb.append(separator).append(entry.getKey()).append("=").
- append(URLEncoder.encode(entry.getValue(), "UTF8"));
- separator = "&";
- }
- URL url = new URL(sb.toString());
- return getConnection(url, method);
+ final URL url = HttpFSUtils.createHttpURL(path, params);
+ return doAsRealUserIfNecessary(new Callable<HttpURLConnection>() {
+ @Override
+ public HttpURLConnection call() throws Exception {
+ return getConnection(url, method);
+ }
+ });
}
/**
@@ -240,7 +264,8 @@ public class HttpFSFileSystem extends Fi
*/
private HttpURLConnection getConnection(URL url, String method) throws IOException {
Class<? extends Authenticator> klass =
- getConf().getClass("httpfs.authenticator.class", HttpKerberosAuthenticator.class, Authenticator.class);
+ getConf().getClass("httpfs.authenticator.class",
+ HttpFSKerberosAuthenticator.class, Authenticator.class);
Authenticator authenticator = ReflectionUtils.newInstance(klass, getConf());
try {
HttpURLConnection conn = new AuthenticatedURL(authenticator).openConnection(url, authToken);
@@ -255,63 +280,6 @@ public class HttpFSFileSystem extends Fi
}
/**
- * Convenience method that JSON Parses the <code>InputStream</code> of a <code>HttpURLConnection</code>.
- *
- * @param conn the <code>HttpURLConnection</code>.
- *
- * @return the parsed JSON object.
- *
- * @throws IOException thrown if the <code>InputStream</code> could not be JSON parsed.
- */
- private static Object jsonParse(HttpURLConnection conn) throws IOException {
- try {
- JSONParser parser = new JSONParser();
- return parser.parse(new InputStreamReader(conn.getInputStream()));
- } catch (ParseException ex) {
- throw new IOException("JSON parser error, " + ex.getMessage(), ex);
- }
- }
-
- /**
- * Validates the status of an <code>HttpURLConnection</code> against an expected HTTP
- * status code. If the current status code is not the expected one it throws an exception
- * with a detail message using Server side error messages if available.
- *
- * @param conn the <code>HttpURLConnection</code>.
- * @param expected the expected HTTP status code.
- *
- * @throws IOException thrown if the current status code does not match the expected one.
- */
- private static void validateResponse(HttpURLConnection conn, int expected) throws IOException {
- int status = conn.getResponseCode();
- if (status != expected) {
- try {
- JSONObject json = (JSONObject) jsonParse(conn);
- json = (JSONObject) json.get(ERROR_JSON);
- String message = (String) json.get(ERROR_MESSAGE_JSON);
- String exception = (String) json.get(ERROR_EXCEPTION_JSON);
- String className = (String) json.get(ERROR_CLASSNAME_JSON);
-
- try {
- ClassLoader cl = HttpFSFileSystem.class.getClassLoader();
- Class klass = cl.loadClass(className);
- Constructor constr = klass.getConstructor(String.class);
- throw (IOException) constr.newInstance(message);
- } catch (IOException ex) {
- throw ex;
- } catch (Exception ex) {
- throw new IOException(MessageFormat.format("{0} - {1}", exception, message));
- }
- } catch (IOException ex) {
- if (ex.getCause() instanceof IOException) {
- throw (IOException) ex.getCause();
- }
- throw new IOException(MessageFormat.format("HTTP status [{0}], {1}", status, conn.getResponseMessage()));
- }
- }
- }
-
- /**
* Called after a new FileSystem instance is constructed.
*
* @param name a uri whose authority section names the host, port, etc. for this FileSystem
@@ -320,15 +288,28 @@ public class HttpFSFileSystem extends Fi
@Override
public void initialize(URI name, Configuration conf) throws IOException {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
- doAs = ugi.getUserName();
+
+ //the real use is the one that has the Kerberos credentials needed for
+ //SPNEGO to work
+ realUser = ugi.getRealUser();
+ if (realUser == null) {
+ realUser = UserGroupInformation.getLoginUser();
+ }
+ doAs = ugi.getShortUserName();
super.initialize(name, conf);
try {
- uri = new URI(name.getScheme() + "://" + name.getHost() + ":" + name.getPort());
+ uri = new URI(name.getScheme() + "://" + name.getAuthority());
+ httpFSAddr = NetUtils.createSocketAddr(getCanonicalUri().toString());
} catch (URISyntaxException ex) {
throw new IOException(ex);
}
}
+ @Override
+ public String getScheme() {
+ return SCHEME;
+ }
+
/**
* Returns a URI whose scheme and authority identify this FileSystem.
*
@@ -340,6 +321,16 @@ public class HttpFSFileSystem extends Fi
}
/**
+ * Get the default port for this file system.
+ * @return the default port or 0 if there isn't one
+ */
+ @Override
+ protected int getDefaultPort() {
+ return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY,
+ DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT);
+ }
+
+ /**
* HttpFSServer subclass of the <code>FSDataInputStream</code>.
* <p/>
* This implementation does not support the
@@ -397,7 +388,7 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.OPEN.toString());
HttpURLConnection conn = getConnection(Operation.OPEN.getMethod(), params,
f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
return new FSDataInputStream(
new HttpFSDataInputStream(conn.getInputStream(), bufferSize));
}
@@ -424,7 +415,7 @@ public class HttpFSFileSystem extends Fi
try {
super.close();
} finally {
- validateResponse(conn, closeStatus);
+ HttpFSUtils.validateResponse(conn, closeStatus);
}
}
@@ -460,11 +451,11 @@ public class HttpFSFileSystem extends Fi
OutputStream os = new BufferedOutputStream(conn.getOutputStream(), bufferSize);
return new HttpFSDataOutputStream(conn, os, expectedStatus, statistics);
} catch (IOException ex) {
- validateResponse(conn, expectedStatus);
+ HttpFSUtils.validateResponse(conn, expectedStatus);
throw ex;
}
} else {
- validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
+ HttpFSUtils.validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
throw new IOException("Missing HTTP 'Location' header for [" + conn.getURL() + "]");
}
} else {
@@ -476,7 +467,7 @@ public class HttpFSFileSystem extends Fi
if (exceptionAlreadyHandled) {
throw ex;
} else {
- validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
+ HttpFSUtils.validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
throw ex;
}
}
@@ -548,8 +539,8 @@ public class HttpFSFileSystem extends Fi
params.put(DESTINATION_PARAM, dst.toString());
HttpURLConnection conn = getConnection(Operation.RENAME.getMethod(),
params, src, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(RENAME_JSON);
}
@@ -584,8 +575,8 @@ public class HttpFSFileSystem extends Fi
params.put(RECURSIVE_PARAM, Boolean.toString(recursive));
HttpURLConnection conn = getConnection(Operation.DELETE.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(DELETE_JSON);
}
@@ -605,8 +596,8 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.LISTSTATUS.toString());
HttpURLConnection conn = getConnection(Operation.LISTSTATUS.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
json = (JSONObject) json.get(FILE_STATUSES_JSON);
JSONArray jsonArray = (JSONArray) json.get(FILE_STATUS_JSON);
FileStatus[] array = new FileStatus[jsonArray.size()];
@@ -653,8 +644,8 @@ public class HttpFSFileSystem extends Fi
params.put(PERMISSION_PARAM, permissionToString(permission));
HttpURLConnection conn = getConnection(Operation.MKDIRS.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(MKDIRS_JSON);
}
@@ -674,8 +665,8 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.GETFILESTATUS.toString());
HttpURLConnection conn = getConnection(Operation.GETFILESTATUS.getMethod(),
params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
json = (JSONObject) json.get(FILE_STATUS_JSON);
f = makeQualified(f);
return createFileStatus(f, json);
@@ -693,8 +684,8 @@ public class HttpFSFileSystem extends Fi
HttpURLConnection conn =
getConnection(Operation.GETHOMEDIRECTORY.getMethod(), params,
new Path(getUri().toString(), "/"), false);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return new Path((String) json.get(HOME_DIR_JSON));
} catch (IOException ex) {
throw new RuntimeException(ex);
@@ -718,7 +709,7 @@ public class HttpFSFileSystem extends Fi
params.put(GROUP_PARAM, groupname);
HttpURLConnection conn = getConnection(Operation.SETOWNER.getMethod(),
params, p, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
@@ -733,7 +724,7 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.SETPERMISSION.toString());
params.put(PERMISSION_PARAM, permissionToString(permission));
HttpURLConnection conn = getConnection(Operation.SETPERMISSION.getMethod(), params, p, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
@@ -755,7 +746,7 @@ public class HttpFSFileSystem extends Fi
params.put(ACCESS_TIME_PARAM, Long.toString(atime));
HttpURLConnection conn = getConnection(Operation.SETTIMES.getMethod(),
params, p, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
/**
@@ -777,19 +768,11 @@ public class HttpFSFileSystem extends Fi
params.put(REPLICATION_PARAM, Short.toString(replication));
HttpURLConnection conn =
getConnection(Operation.SETREPLICATION.getMethod(), params, src, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json = (JSONObject) jsonParse(conn);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
return (Boolean) json.get(SET_REPLICATION_JSON);
}
- /**
- * Creates a <code>FileStatus</code> object using a JSON file-status payload
- * received from a HttpFSServer server.
- *
- * @param json a JSON file-status payload received from a HttpFSServer server
- *
- * @return the corresponding <code>FileStatus</code>
- */
private FileStatus createFileStatus(Path parent, JSONObject json) {
String pathSuffix = (String) json.get(PATH_SUFFIX_JSON);
Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix);
@@ -828,9 +811,9 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.GETCONTENTSUMMARY.toString());
HttpURLConnection conn =
getConnection(Operation.GETCONTENTSUMMARY.getMethod(), params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- JSONObject json =
- (JSONObject) ((JSONObject) jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ JSONObject json = (JSONObject) ((JSONObject)
+ HttpFSUtils.jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
return new ContentSummary((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON),
(Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON),
(Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON),
@@ -846,9 +829,9 @@ public class HttpFSFileSystem extends Fi
params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString());
HttpURLConnection conn =
getConnection(Operation.GETFILECHECKSUM.getMethod(), params, f, true);
- validateResponse(conn, HttpURLConnection.HTTP_OK);
- final JSONObject json =
- (JSONObject) ((JSONObject) jsonParse(conn)).get(FILE_CHECKSUM_JSON);
+ HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+ final JSONObject json = (JSONObject) ((JSONObject)
+ HttpFSUtils.jsonParse(conn)).get(FILE_CHECKSUM_JSON);
return new FileChecksum() {
@Override
public String getAlgorithmName() {
@@ -877,4 +860,42 @@ public class HttpFSFileSystem extends Fi
};
}
+
+ @Override
+ public Token<?> getDelegationToken(final String renewer)
+ throws IOException {
+ return doAsRealUserIfNecessary(new Callable<Token<?>>() {
+ @Override
+ public Token<?> call() throws Exception {
+ return HttpFSKerberosAuthenticator.
+ getDelegationToken(uri, httpFSAddr, authToken, renewer);
+ }
+ });
+ }
+
+ public long renewDelegationToken(final Token<?> token) throws IOException {
+ return doAsRealUserIfNecessary(new Callable<Long>() {
+ @Override
+ public Long call() throws Exception {
+ return HttpFSKerberosAuthenticator.
+ renewDelegationToken(uri, authToken, token);
+ }
+ });
+ }
+
+ public void cancelDelegationToken(final Token<?> token) throws IOException {
+ HttpFSKerberosAuthenticator.
+ cancelDelegationToken(uri, authToken, token);
+ }
+
+ @Override
+ public Token<?> getRenewToken() {
+ return delegationToken;
+ }
+
+ @Override
+ public <T extends TokenIdentifier> void setDelegationToken(Token<T> token) {
+ delegationToken = token;
+ }
+
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java Fri Oct 19 02:25:55 2012
@@ -19,6 +19,7 @@
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import javax.servlet.Filter;
@@ -37,6 +38,7 @@ import java.util.Set;
* Filter that Enforces the content-type to be application/octet-stream for
* POST and PUT requests.
*/
+@InterfaceAudience.Private
public class CheckUploadContentTypeFilter implements Filter {
private static final Set<String> UPLOAD_OPERATIONS = new HashSet<String>();
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java Fri Oct 19 02:25:55 2012
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
@@ -40,6 +41,7 @@ import java.util.Map;
/**
* FileSystem operation executors used by {@link HttpFSServer}.
*/
+@InterfaceAudience.Private
public class FSOperations {
@SuppressWarnings({"unchecked", "deprecation"})
@@ -160,6 +162,7 @@ public class FSOperations {
/**
* Executor that performs an append FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSAppend implements FileSystemAccess.FileSystemExecutor<Void> {
private InputStream is;
private Path path;
@@ -198,6 +201,7 @@ public class FSOperations {
/**
* Executor that performs a content-summary FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSContentSummary implements FileSystemAccess.FileSystemExecutor<Map> {
private Path path;
@@ -230,6 +234,7 @@ public class FSOperations {
/**
* Executor that performs a create FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSCreate implements FileSystemAccess.FileSystemExecutor<Void> {
private InputStream is;
private Path path;
@@ -288,6 +293,7 @@ public class FSOperations {
/**
* Executor that performs a delete FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSDelete implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
private boolean recursive;
@@ -324,6 +330,7 @@ public class FSOperations {
/**
* Executor that performs a file-checksum FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSFileChecksum implements FileSystemAccess.FileSystemExecutor<Map> {
private Path path;
@@ -356,6 +363,7 @@ public class FSOperations {
/**
* Executor that performs a file-status FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSFileStatus implements FileSystemAccess.FileSystemExecutor<Map> {
private Path path;
@@ -388,6 +396,7 @@ public class FSOperations {
/**
* Executor that performs a home-dir FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSHomeDir implements FileSystemAccess.FileSystemExecutor<JSONObject> {
/**
@@ -413,6 +422,7 @@ public class FSOperations {
/**
* Executor that performs a list-status FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSListStatus implements FileSystemAccess.FileSystemExecutor<Map>, PathFilter {
private Path path;
private PathFilter filter;
@@ -456,6 +466,7 @@ public class FSOperations {
/**
* Executor that performs a mkdirs FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSMkdirs implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
@@ -494,6 +505,7 @@ public class FSOperations {
/**
* Executor that performs a open FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSOpen implements FileSystemAccess.FileSystemExecutor<InputStream> {
private Path path;
@@ -526,6 +538,7 @@ public class FSOperations {
/**
* Executor that performs a rename FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSRename implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
private Path toPath;
@@ -562,6 +575,7 @@ public class FSOperations {
/**
* Executor that performs a set-owner FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSSetOwner implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
private String owner;
@@ -600,6 +614,7 @@ public class FSOperations {
/**
* Executor that performs a set-permission FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSSetPermission implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
@@ -637,6 +652,7 @@ public class FSOperations {
/**
* Executor that performs a set-replication FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSSetReplication implements FileSystemAccess.FileSystemExecutor<JSONObject> {
private Path path;
private short replication;
@@ -676,6 +692,7 @@ public class FSOperations {
/**
* Executor that performs a set-times FileSystemAccess files system operation.
*/
+ @InterfaceAudience.Private
public static class FSSetTimes implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
private long mTime;
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java Fri Oct 19 02:25:55 2012
@@ -19,6 +19,7 @@
package org.apache.hadoop.fs.http.server;
import com.sun.jersey.api.container.ContainerException;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.service.FileSystemAccessException;
import org.apache.hadoop.lib.wsrs.ExceptionProvider;
import org.slf4j.Logger;
@@ -35,6 +36,7 @@ import java.io.IOException;
* exceptions to HTTP status codes.
*/
@Provider
+@InterfaceAudience.Private
public class HttpFSExceptionProvider extends ExceptionProvider {
private static Logger AUDIT_LOG = LoggerFactory.getLogger("httpfsaudit");
private static Logger LOG = LoggerFactory.getLogger(HttpFSExceptionProvider.class);
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java Fri Oct 19 02:25:55 2012
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem.Operation;
import org.apache.hadoop.lib.wsrs.BooleanParam;
@@ -32,12 +33,12 @@ import org.slf4j.MDC;
import javax.ws.rs.ext.Provider;
import java.util.HashMap;
import java.util.Map;
-import java.util.regex.Pattern;
/**
* HttpFS ParametersProvider.
*/
@Provider
+@InterfaceAudience.Private
public class HttpFSParametersProvider extends ParametersProvider {
private static final Map<Enum, Class<Param<?>>[]> PARAMS_DEF =
@@ -85,6 +86,7 @@ public class HttpFSParametersProvider ex
/**
* Class for access-time parameter.
*/
+ @InterfaceAudience.Private
public static class AccessTimeParam extends LongParam {
/**
@@ -102,6 +104,7 @@ public class HttpFSParametersProvider ex
/**
* Class for block-size parameter.
*/
+ @InterfaceAudience.Private
public static class BlockSizeParam extends LongParam {
/**
@@ -120,6 +123,7 @@ public class HttpFSParametersProvider ex
/**
* Class for data parameter.
*/
+ @InterfaceAudience.Private
public static class DataParam extends BooleanParam {
/**
@@ -138,6 +142,7 @@ public class HttpFSParametersProvider ex
/**
* Class for operation parameter.
*/
+ @InterfaceAudience.Private
public static class OperationParam extends EnumParam<HttpFSFileSystem.Operation> {
/**
@@ -156,6 +161,7 @@ public class HttpFSParametersProvider ex
/**
* Class for delete's recursive parameter.
*/
+ @InterfaceAudience.Private
public static class RecursiveParam extends BooleanParam {
/**
@@ -174,6 +180,7 @@ public class HttpFSParametersProvider ex
/**
* Class for do-as parameter.
*/
+ @InterfaceAudience.Private
public static class DoAsParam extends StringParam {
/**
@@ -208,6 +215,7 @@ public class HttpFSParametersProvider ex
/**
* Class for filter parameter.
*/
+ @InterfaceAudience.Private
public static class FilterParam extends StringParam {
/**
@@ -227,6 +235,7 @@ public class HttpFSParametersProvider ex
/**
* Class for group parameter.
*/
+ @InterfaceAudience.Private
public static class GroupParam extends StringParam {
/**
@@ -246,12 +255,13 @@ public class HttpFSParametersProvider ex
/**
* Class for len parameter.
*/
+ @InterfaceAudience.Private
public static class LenParam extends LongParam {
/**
* Parameter name.
*/
- public static final String NAME = "len";
+ public static final String NAME = "length";
/**
* Constructor.
@@ -264,6 +274,7 @@ public class HttpFSParametersProvider ex
/**
* Class for modified-time parameter.
*/
+ @InterfaceAudience.Private
public static class ModifiedTimeParam extends LongParam {
/**
@@ -282,6 +293,7 @@ public class HttpFSParametersProvider ex
/**
* Class for offset parameter.
*/
+ @InterfaceAudience.Private
public static class OffsetParam extends LongParam {
/**
@@ -300,6 +312,7 @@ public class HttpFSParametersProvider ex
/**
* Class for overwrite parameter.
*/
+ @InterfaceAudience.Private
public static class OverwriteParam extends BooleanParam {
/**
@@ -318,6 +331,7 @@ public class HttpFSParametersProvider ex
/**
* Class for owner parameter.
*/
+ @InterfaceAudience.Private
public static class OwnerParam extends StringParam {
/**
@@ -337,6 +351,7 @@ public class HttpFSParametersProvider ex
/**
* Class for permission parameter.
*/
+ @InterfaceAudience.Private
public static class PermissionParam extends ShortParam {
/**
@@ -357,6 +372,7 @@ public class HttpFSParametersProvider ex
/**
* Class for replication parameter.
*/
+ @InterfaceAudience.Private
public static class ReplicationParam extends ShortParam {
/**
@@ -375,6 +391,7 @@ public class HttpFSParametersProvider ex
/**
* Class for to-path parameter.
*/
+ @InterfaceAudience.Private
public static class DestinationParam extends StringParam {
/**
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSReleaseFilter.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.service.FileSystemAccess;
import org.apache.hadoop.lib.servlet.FileSystemReleaseFilter;
@@ -25,6 +26,7 @@ import org.apache.hadoop.lib.servlet.Fil
* Filter that releases FileSystemAccess filesystem instances upon HTTP request
* completion.
*/
+@InterfaceAudience.Private
public class HttpFSReleaseFilter extends FileSystemReleaseFilter {
/**
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
@@ -82,6 +83,7 @@ import java.util.Map;
* different operations.
*/
@Path(HttpFSFileSystem.SERVICE_VERSION)
+@InterfaceAudience.Private
public class HttpFSServer {
private static Logger AUDIT_LOG = LoggerFactory.getLogger("httpfsaudit");
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebApp.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.fs.http.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.lib.server.ServerException;
@@ -39,6 +40,7 @@ import java.io.IOException;
* All the configuration is loaded from configuration properties prefixed
* with <code>httpfs.</code>.
*/
+@InterfaceAudience.Private
public class HttpFSServerWebApp extends ServerWebApp {
private static final Logger LOG =
LoggerFactory.getLogger(HttpFSServerWebApp.class);
@@ -70,7 +72,7 @@ public class HttpFSServerWebApp extends
/**
* Constructor used for testing purposes.
*/
- protected HttpFSServerWebApp(String homeDir, String configDir, String logDir,
+ public HttpFSServerWebApp(String homeDir, String configDir, String logDir,
String tempDir, Configuration config) {
super(NAME, homeDir, configDir, logDir, tempDir, config);
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/RunnableCallable.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.lang;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.util.Check;
import java.util.concurrent.Callable;
@@ -26,6 +27,7 @@ import java.util.concurrent.Callable;
* Adapter class that allows <code>Runnable</code>s and <code>Callable</code>s to
* be treated as the other.
*/
+@InterfaceAudience.Private
public class RunnableCallable implements Callable<Void>, Runnable {
private Runnable runnable;
private Callable<?> callable;
@@ -88,6 +90,7 @@ public class RunnableCallable implements
*
* @return the class name of the wrapper callable/runnable.
*/
+ @Override
public String toString() {
return (runnable != null) ? runnable.getClass().getSimpleName() : callable.getClass().getSimpleName();
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/lang/XException.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.lang;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.util.Check;
import java.text.MessageFormat;
@@ -26,6 +27,7 @@ import java.text.MessageFormat;
* Generic exception that requires error codes and uses the a message
* template from the error code.
*/
+@InterfaceAudience.Private
public class XException extends Exception {
/**
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/BaseService.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.util.ConfigurationUtils;
@@ -26,6 +27,7 @@ import java.util.Map;
/**
* Convenience class implementing the {@link Service} interface.
*/
+@InterfaceAudience.Private
public abstract class BaseService implements Service {
private String prefix;
private Server server;
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.util.Check;
import org.apache.hadoop.lib.util.ConfigurationUtils;
@@ -76,6 +77,7 @@ import java.util.Properties;
* post-initialized (this enables late/conditional service bindings).
* <p/>
*/
+@InterfaceAudience.Private
public class Server {
private Logger log;
@@ -97,7 +99,8 @@ public class Server {
/**
* Enumeration that defines the server status.
*/
- public enum Status {
+ @InterfaceAudience.Private
+ public static enum Status {
UNDEF(false, false),
BOOTING(false, true),
HALTED(true, true),
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServerException.java Fri Oct 19 02:25:55 2012
@@ -18,16 +18,19 @@
package org.apache.hadoop.lib.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
/**
* Exception thrown by the {@link Server} class.
*/
+@InterfaceAudience.Private
public class ServerException extends XException {
/**
* Error codes use by the {@link Server} class.
*/
+ @InterfaceAudience.Private
public static enum ERROR implements XException.ERROR {
S01("Dir [{0}] does not exist"),
S02("[{0}] is not a directory"),
@@ -39,7 +42,11 @@ public class ServerException extends XEx
S08("Could not load service classes, {0}"),
S09("Could not set service [{0}] programmatically -server shutting down-, {1}"),
S10("Service [{0}] requires service [{1}]"),
- S11("Service [{0}] exception during status change to [{1}] -server shutting down-, {2}");
+ S11("Service [{0}] exception during status change to [{1}] -server shutting down-, {2}"),
+ S12("Could not start service [{0}], {1}"),
+ S13("Missing system property [{0}]"),
+ S14("Could not initialize server, {0}")
+ ;
private String msg;
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Service.java Fri Oct 19 02:25:55 2012
@@ -18,9 +18,12 @@
package org.apache.hadoop.lib.server;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* Service interface for components to be managed by the {@link Server} class.
*/
+@InterfaceAudience.Private
public interface Service {
/**
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/ServiceException.java Fri Oct 19 02:25:55 2012
@@ -18,11 +18,13 @@
package org.apache.hadoop.lib.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
/**
* Exception thrown by {@link Service} implementations.
*/
+@InterfaceAudience.Private
public class ServiceException extends ServerException {
/**
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java Fri Oct 19 02:25:55 2012
@@ -18,11 +18,13 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import java.io.IOException;
+@InterfaceAudience.Private
public interface FileSystemAccess {
public interface FileSystemExecutor<T> {
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccessException.java Fri Oct 19 02:25:55 2012
@@ -18,8 +18,10 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
+@InterfaceAudience.Private
public class FileSystemAccessException extends XException {
public enum ERROR implements XException.ERROR {
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Groups.java Fri Oct 19 02:25:55 2012
@@ -18,9 +18,12 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import java.io.IOException;
import java.util.List;
+@InterfaceAudience.Private
public interface Groups {
public List<String> getGroups(String user) throws IOException;
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Instrumentation.java Fri Oct 19 02:25:55 2012
@@ -18,8 +18,11 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import java.util.Map;
+@InterfaceAudience.Private
public interface Instrumentation {
public interface Cron {
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/ProxyUser.java Fri Oct 19 02:25:55 2012
@@ -18,9 +18,12 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import java.io.IOException;
import java.security.AccessControlException;
+@InterfaceAudience.Private
public interface ProxyUser {
public void validate(String proxyUser, String proxyHost, String doAsUser) throws IOException, AccessControlException;
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/Scheduler.java Fri Oct 19 02:25:55 2012
@@ -18,9 +18,12 @@
package org.apache.hadoop.lib.service;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
+@InterfaceAudience.Private
public interface Scheduler {
public abstract void schedule(Callable<?> callable, long delay, long interval, TimeUnit unit);
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.hadoop;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
@@ -27,6 +28,7 @@ import org.apache.hadoop.lib.server.Serv
import org.apache.hadoop.lib.service.FileSystemAccess;
import org.apache.hadoop.lib.service.FileSystemAccessException;
import org.apache.hadoop.lib.service.Instrumentation;
+import org.apache.hadoop.lib.service.Scheduler;
import org.apache.hadoop.lib.util.Check;
import org.apache.hadoop.lib.util.ConfigurationUtils;
import org.apache.hadoop.security.UserGroupInformation;
@@ -42,8 +44,11 @@ import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
+@InterfaceAudience.Private
public class FileSystemAccessService extends BaseService implements FileSystemAccess {
private static final Logger LOG = LoggerFactory.getLogger(FileSystemAccessService.class);
@@ -54,6 +59,8 @@ public class FileSystemAccessService ext
public static final String AUTHENTICATION_TYPE = "authentication.type";
public static final String KERBEROS_KEYTAB = "authentication.kerberos.keytab";
public static final String KERBEROS_PRINCIPAL = "authentication.kerberos.principal";
+ public static final String FS_CACHE_PURGE_FREQUENCY = "filesystem.cache.purge.frequency";
+ public static final String FS_CACHE_PURGE_TIMEOUT = "filesystem.cache.purge.timeout";
public static final String NAME_NODE_WHITELIST = "name.node.whitelist";
@@ -63,6 +70,61 @@ public class FileSystemAccessService ext
private static final String FILE_SYSTEM_SERVICE_CREATED = "FileSystemAccessService.created";
+ private static class CachedFileSystem {
+ private FileSystem fs;
+ private long lastUse;
+ private long timeout;
+ private int count;
+
+ public CachedFileSystem(long timeout) {
+ this.timeout = timeout;
+ lastUse = -1;
+ count = 0;
+ }
+
+ synchronized FileSystem getFileSytem(Configuration conf)
+ throws IOException {
+ if (fs == null) {
+ fs = FileSystem.get(conf);
+ }
+ lastUse = -1;
+ count++;
+ return fs;
+ }
+
+ synchronized void release() throws IOException {
+ count--;
+ if (count == 0) {
+ if (timeout == 0) {
+ fs.close();
+ fs = null;
+ lastUse = -1;
+ }
+ else {
+ lastUse = System.currentTimeMillis();
+ }
+ }
+ }
+
+ // to avoid race conditions in the map cache adding removing entries
+ // an entry in the cache remains forever, it just closes/opens filesystems
+ // based on their utilization. Worse case scenario, the penalty we'll
+ // pay is that the amount of entries in the cache will be the total
+ // number of users in HDFS (which seems a resonable overhead).
+ synchronized boolean purgeIfIdle() throws IOException {
+ boolean ret = false;
+ if (count == 0 && lastUse != -1 &&
+ (System.currentTimeMillis() - lastUse) > timeout) {
+ fs.close();
+ fs = null;
+ lastUse = -1;
+ ret = true;
+ }
+ return ret;
+ }
+
+ }
+
public FileSystemAccessService() {
super(PREFIX);
}
@@ -73,6 +135,11 @@ public class FileSystemAccessService ext
private AtomicInteger unmanagedFileSystems = new AtomicInteger();
+ private ConcurrentHashMap<String, CachedFileSystem> fsCache =
+ new ConcurrentHashMap<String, CachedFileSystem>();
+
+ private long purgeTimeout;
+
@Override
protected void init() throws ServiceException {
LOG.info("Using FileSystemAccess JARs version [{}]", VersionInfo.getVersion());
@@ -157,6 +224,30 @@ public class FileSystemAccessService ext
return (long) unmanagedFileSystems.get();
}
});
+ Scheduler scheduler = getServer().get(Scheduler.class);
+ int purgeInterval = getServiceConfig().getInt(FS_CACHE_PURGE_FREQUENCY, 60);
+ purgeTimeout = getServiceConfig().getLong(FS_CACHE_PURGE_TIMEOUT, 60);
+ purgeTimeout = (purgeTimeout > 0) ? purgeTimeout : 0;
+ if (purgeTimeout > 0) {
+ scheduler.schedule(new FileSystemCachePurger(),
+ purgeInterval, purgeInterval, TimeUnit.SECONDS);
+ }
+ }
+
+ private class FileSystemCachePurger implements Runnable {
+
+ @Override
+ public void run() {
+ int count = 0;
+ for (CachedFileSystem cacheFs : fsCache.values()) {
+ try {
+ count += cacheFs.purgeIfIdle() ? 1 : 0;
+ } catch (Throwable ex) {
+ LOG.warn("Error while purging filesystem, " + ex.toString(), ex);
+ }
+ }
+ LOG.debug("Purged [{}} filesystem instances", count);
+ }
}
private Set<String> toLowerCase(Collection<String> collection) {
@@ -174,7 +265,7 @@ public class FileSystemAccessService ext
@Override
public Class[] getServiceDependencies() {
- return new Class[]{Instrumentation.class};
+ return new Class[]{Instrumentation.class, Scheduler.class};
}
protected UserGroupInformation getUGI(String user) throws IOException {
@@ -185,12 +276,25 @@ public class FileSystemAccessService ext
conf.set("fs.hdfs.impl.disable.cache", "true");
}
- protected FileSystem createFileSystem(Configuration namenodeConf) throws IOException {
- return FileSystem.get(namenodeConf);
+ private static final String HTTPFS_FS_USER = "httpfs.fs.user";
+
+ protected FileSystem createFileSystem(Configuration namenodeConf)
+ throws IOException {
+ String user = UserGroupInformation.getCurrentUser().getShortUserName();
+ CachedFileSystem newCachedFS = new CachedFileSystem(purgeTimeout);
+ CachedFileSystem cachedFS = fsCache.putIfAbsent(user, newCachedFS);
+ if (cachedFS == null) {
+ cachedFS = newCachedFS;
+ }
+ Configuration conf = new Configuration(namenodeConf);
+ conf.set(HTTPFS_FS_USER, user);
+ return cachedFS.getFileSytem(conf);
}
protected void closeFileSystem(FileSystem fs) throws IOException {
- fs.close();
+ if (fsCache.containsKey(fs.getConf().get(HTTPFS_FS_USER))) {
+ fsCache.get(fs.getConf().get(HTTPFS_FS_USER)).release();
+ }
}
protected void validateNamenode(String namenode) throws FileSystemAccessException {
@@ -224,6 +328,7 @@ public class FileSystemAccessService ext
getAuthority());
UserGroupInformation ugi = getUGI(user);
return ugi.doAs(new PrivilegedExceptionAction<T>() {
+ @Override
public T run() throws Exception {
FileSystem fs = createFileSystem(conf);
Instrumentation instrumentation = getServer().get(Instrumentation.class);
@@ -258,6 +363,7 @@ public class FileSystemAccessService ext
new URI(conf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY)).getAuthority());
UserGroupInformation ugi = getUGI(user);
return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+ @Override
public FileSystem run() throws Exception {
return createFileSystem(conf);
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java Fri Oct 19 02:25:55 2012
@@ -18,10 +18,12 @@
package org.apache.hadoop.lib.service.instrumentation;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.ServiceException;
import org.apache.hadoop.lib.service.Instrumentation;
import org.apache.hadoop.lib.service.Scheduler;
+import org.apache.hadoop.util.Time;
import org.json.simple.JSONAware;
import org.json.simple.JSONObject;
import org.json.simple.JSONStreamAware;
@@ -38,6 +40,7 @@ import java.util.concurrent.atomic.Atomi
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
+@InterfaceAudience.Private
public class InstrumentationService extends BaseService implements Instrumentation {
public static final String PREFIX = "instrumentation";
public static final String CONF_TIMERS_SIZE = "timers.size";
@@ -82,16 +85,19 @@ public class InstrumentationService exte
all.put("samplers", (Map) samplers);
jvmVariables.put("free.memory", new VariableHolder<Long>(new Instrumentation.Variable<Long>() {
+ @Override
public Long getValue() {
return Runtime.getRuntime().freeMemory();
}
}));
jvmVariables.put("max.memory", new VariableHolder<Long>(new Instrumentation.Variable<Long>() {
+ @Override
public Long getValue() {
return Runtime.getRuntime().maxMemory();
}
}));
jvmVariables.put("total.memory", new VariableHolder<Long>(new Instrumentation.Variable<Long>() {
+ @Override
public Long getValue() {
return Runtime.getRuntime().totalMemory();
}
@@ -159,25 +165,27 @@ public class InstrumentationService exte
long own;
long total;
+ @Override
public Cron start() {
if (total != 0) {
throw new IllegalStateException("Cron already used");
}
if (start == 0) {
- start = System.currentTimeMillis();
+ start = Time.now();
lapStart = start;
} else if (lapStart == 0) {
- lapStart = System.currentTimeMillis();
+ lapStart = Time.now();
}
return this;
}
+ @Override
public Cron stop() {
if (total != 0) {
throw new IllegalStateException("Cron already used");
}
if (lapStart > 0) {
- own += System.currentTimeMillis() - lapStart;
+ own += Time.now() - lapStart;
lapStart = 0;
}
return this;
@@ -185,7 +193,7 @@ public class InstrumentationService exte
void end() {
stop();
- total = System.currentTimeMillis() - start;
+ total = Time.now() - start;
}
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.scheduler;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.RunnableCallable;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.Server;
@@ -25,6 +26,7 @@ import org.apache.hadoop.lib.server.Serv
import org.apache.hadoop.lib.service.Instrumentation;
import org.apache.hadoop.lib.service.Scheduler;
import org.apache.hadoop.lib.util.Check;
+import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -34,6 +36,7 @@ import java.util.concurrent.ScheduledExe
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
+@InterfaceAudience.Private
public class SchedulerService extends BaseService implements Scheduler {
private static final Logger LOG = LoggerFactory.getLogger(SchedulerService.class);
@@ -59,11 +62,11 @@ public class SchedulerService extends Ba
@Override
public void destroy() {
try {
- long limit = System.currentTimeMillis() + 30 * 1000;
+ long limit = Time.now() + 30 * 1000;
scheduler.shutdownNow();
while (!scheduler.awaitTermination(1000, TimeUnit.MILLISECONDS)) {
LOG.debug("Waiting for scheduler to shutdown");
- if (System.currentTimeMillis() > limit) {
+ if (Time.now() > limit) {
LOG.warn("Gave up waiting for scheduler to shutdown");
break;
}
@@ -93,6 +96,7 @@ public class SchedulerService extends Ba
LOG.debug("Scheduling callable [{}], interval [{}] seconds, delay [{}] in [{}]",
new Object[]{callable, delay, interval, unit});
Runnable r = new Runnable() {
+ @Override
public void run() {
String instrName = callable.getClass().getSimpleName();
Instrumentation instr = getServer().get(Instrumentation.class);
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/GroupsService.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.security;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.ServiceException;
@@ -27,6 +28,7 @@ import org.apache.hadoop.lib.util.Config
import java.io.IOException;
import java.util.List;
+@InterfaceAudience.Private
public class GroupsService extends BaseService implements Groups {
private static final String PREFIX = "groups";
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/security/ProxyUserService.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.service.security;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.lib.lang.XException;
import org.apache.hadoop.lib.server.BaseService;
import org.apache.hadoop.lib.server.ServiceException;
@@ -38,10 +39,12 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
+@InterfaceAudience.Private
public class ProxyUserService extends BaseService implements ProxyUser {
private static Logger LOG = LoggerFactory.getLogger(ProxyUserService.class);
- public enum ERROR implements XException.ERROR {
+ @InterfaceAudience.Private
+ public static enum ERROR implements XException.ERROR {
PRXU01("Could not normalize host name [{0}], {1}"),
PRXU02("Missing [{0}] property");
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/FileSystemReleaseFilter.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.servlet;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.lib.service.FileSystemAccess;
@@ -37,6 +38,7 @@ import java.io.IOException;
* is streaming out HDFS data and the corresponding filesystem
* instance have to be closed after the streaming completes.
*/
+@InterfaceAudience.Private
public abstract class FileSystemReleaseFilter implements Filter {
private static final ThreadLocal<FileSystem> FILE_SYSTEM_TL = new ThreadLocal<FileSystem>();
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/HostnameFilter.java Fri Oct 19 02:25:55 2012
@@ -19,6 +19,8 @@
package org.apache.hadoop.lib.servlet;
+import org.apache.hadoop.classification.InterfaceAudience;
+
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
@@ -31,6 +33,7 @@ import java.net.InetAddress;
/**
* Filter that resolves the requester hostname.
*/
+@InterfaceAudience.Private
public class HostnameFilter implements Filter {
static final ThreadLocal<String> HOSTNAME_TL = new ThreadLocal<String>();
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/MDCFilter.java Fri Oct 19 02:25:55 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.lib.servlet;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.slf4j.MDC;
import javax.servlet.Filter;
@@ -42,6 +43,7 @@ import java.security.Principal;
* <li>path: the path of the request URL</li>
* </ul>
*/
+@InterfaceAudience.Private
public class MDCFilter implements Filter {
/**