You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by sz...@apache.org on 2012/06/26 03:58:14 UTC
svn commit: r1353800 - in
/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: ./
src/main/java/org/apache/hadoop/hdfs/web/
src/test/java/org/apache/hadoop/hdfs/web/
Author: szetszwo
Date: Tue Jun 26 01:58:13 2012
New Revision: 1353800
URL: http://svn.apache.org/viewvc?rev=1353800&view=rev
Log:
HDFS-3516. Check content-type in WebHdfsFileSystem.
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1353800&r1=1353799&r2=1353800&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Jun 26 01:58:13 2012
@@ -238,6 +238,8 @@ Branch-2 ( Unreleased changes )
HDFS-3372. offlineEditsViewer should be able to read a binary
edits file with recovery mode. (Colin Patrick McCabe via eli)
+ HDFS-3516. Check content-type in WebHdfsFileSystem. (szetszwo)
+
OPTIMIZATIONS
HDFS-2982. Startup performance suffers when there are many edit log
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1353800&r1=1353799&r2=1353800&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Tue Jun 26 01:58:13 2012
@@ -34,6 +34,8 @@ import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
+import javax.ws.rs.core.MediaType;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -252,9 +254,23 @@ public class WebHdfsFileSystem extends F
return f.isAbsolute()? f: new Path(workingDir, f);
}
- static Map<?, ?> jsonParse(final InputStream in) throws IOException {
+ static Map<?, ?> jsonParse(final HttpURLConnection c, final boolean useErrorStream
+ ) throws IOException {
+ if (c.getContentLength() == 0) {
+ return null;
+ }
+ final InputStream in = useErrorStream? c.getErrorStream(): c.getInputStream();
if (in == null) {
- throw new IOException("The input stream is null.");
+ throw new IOException("The " + (useErrorStream? "error": "input") + " stream is null.");
+ }
+ final String contentType = c.getContentType();
+ if (contentType != null) {
+ final MediaType parsed = MediaType.valueOf(contentType);
+ if (!MediaType.APPLICATION_JSON_TYPE.isCompatible(parsed)) {
+ throw new IOException("Content-Type \"" + contentType
+ + "\" is incompatible with \"" + MediaType.APPLICATION_JSON
+ + "\" (parsed=\"" + parsed + "\")");
+ }
}
return (Map<?, ?>)JSON.parse(new InputStreamReader(in));
}
@@ -265,7 +281,7 @@ public class WebHdfsFileSystem extends F
if (code != op.getExpectedHttpResponseCode()) {
final Map<?, ?> m;
try {
- m = jsonParse(conn.getErrorStream());
+ m = jsonParse(conn, true);
} catch(IOException e) {
throw new IOException("Unexpected HTTP response: code=" + code + " != "
+ op.getExpectedHttpResponseCode() + ", " + op.toQueryString()
@@ -425,7 +441,7 @@ public class WebHdfsFileSystem extends F
final HttpURLConnection conn = httpConnect(op, fspath, parameters);
try {
final Map<?, ?> m = validateResponse(op, conn);
- return m != null? m: jsonParse(conn.getInputStream());
+ return m != null? m: jsonParse(conn, false);
} finally {
conn.disconnect();
}
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1353800&r1=1353799&r2=1353800&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Tue Jun 26 01:58:13 2012
@@ -287,6 +287,10 @@ public class TestWebHdfsFileSystemContra
final Path root = new Path("/");
final Path dir = new Path("/test/testUrl");
assertTrue(webhdfs.mkdirs(dir));
+ final Path file = new Path("/test/file");
+ final FSDataOutputStream out = webhdfs.create(file);
+ out.write(1);
+ out.close();
{//test GETHOMEDIRECTORY
final URL url = webhdfs.toUrl(GetOpParam.Op.GETHOMEDIRECTORY, root);
@@ -378,5 +382,21 @@ public class TestWebHdfsFileSystemContra
conn.connect();
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
}
+
+ {//test jsonParse with non-json type.
+ final HttpOpParam.Op op = GetOpParam.Op.OPEN;
+ final URL url = webhdfs.toUrl(op, file);
+ final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ conn.setRequestMethod(op.getType().toString());
+ conn.connect();
+
+ try {
+ WebHdfsFileSystem.jsonParse(conn, false);
+ fail();
+ } catch(IOException ioe) {
+ WebHdfsFileSystem.LOG.info("GOOD", ioe);
+ }
+ conn.disconnect();
+ }
}
}
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java?rev=1353800&r1=1353799&r2=1353800&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java Tue Jun 26 01:58:13 2012
@@ -49,7 +49,7 @@ public class WebHdfsTestUtil {
public static WebHdfsFileSystem getWebHdfsFileSystemAs(
final UserGroupInformation ugi, final Configuration conf
- ) throws IOException, URISyntaxException, InterruptedException {
+ ) throws IOException, InterruptedException {
return ugi.doAs(new PrivilegedExceptionAction<WebHdfsFileSystem>() {
@Override
public WebHdfsFileSystem run() throws Exception {
@@ -70,7 +70,7 @@ public class WebHdfsTestUtil {
final int expectedResponseCode) throws IOException {
conn.connect();
Assert.assertEquals(expectedResponseCode, conn.getResponseCode());
- return WebHdfsFileSystem.jsonParse(conn.getInputStream());
+ return WebHdfsFileSystem.jsonParse(conn, false);
}
public static HttpURLConnection twoStepWrite(HttpURLConnection conn,