You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by aa...@apache.org on 2015/03/23 22:25:44 UTC

hadoop git commit: HDFS-7881. TestHftpFileSystem#testSeek fails in branch-2. Contributed by Brahma Reddy Battula.

Repository: hadoop
Updated Branches:
  refs/heads/branch-2 4e0c48703 -> fad8c7817


HDFS-7881. TestHftpFileSystem#testSeek fails in branch-2. Contributed by Brahma Reddy Battula.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fad8c781
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fad8c781
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fad8c781

Branch: refs/heads/branch-2
Commit: fad8c78173c4b7c55324033720f04a09943deac7
Parents: 4e0c487
Author: Akira Ajisaka <aa...@apache.org>
Authored: Tue Mar 24 06:21:14 2015 +0900
Committer: Akira Ajisaka <aa...@apache.org>
Committed: Tue Mar 24 06:24:29 2015 +0900

----------------------------------------------------------------------
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt     |  3 ++
 .../hadoop/hdfs/web/ByteRangeInputStream.java   | 38 ++++++++++++++++----
 2 files changed, 35 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad8c781/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 98ea260..9981d4f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -931,6 +931,9 @@ Release 2.7.0 - UNRELEASED
 
     HDFS-7942. NFS: support regexp grouping in nfs.exports.allowed.hosts (brandonli)
 
+    HDFS-7881. TestHftpFileSystem#testSeek fails in branch-2.
+    (Brahma Reddy Battula via aajisaka)
+
     BREAKDOWN OF HDFS-7584 SUBTASKS AND RELATED JIRAS
 
       HDFS-7720. Quota by Storage Type API, tools and ClientNameNode

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad8c781/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ByteRangeInputStream.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ByteRangeInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ByteRangeInputStream.java
index 395c9f6..9e3b29a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ByteRangeInputStream.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ByteRangeInputStream.java
@@ -28,6 +28,7 @@ import java.util.StringTokenizer;
 
 import org.apache.commons.io.input.BoundedInputStream;
 import org.apache.hadoop.fs.FSInputStream;
+import org.apache.http.HttpStatus;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.net.HttpHeaders;
@@ -127,12 +128,7 @@ public abstract class ByteRangeInputStream extends FSInputStream {
       fileLength = null;
     } else {
       // for non-chunked transfer-encoding, get content-length
-      final String cl = connection.getHeaderField(HttpHeaders.CONTENT_LENGTH);
-      if (cl == null) {
-        throw new IOException(HttpHeaders.CONTENT_LENGTH + " is missing: "
-            + headers);
-      }
-      final long streamlength = Long.parseLong(cl);
+      long streamlength = getStreamLength(connection, headers);
       fileLength = startPos + streamlength;
 
       // Java has a bug with >2GB request streams.  It won't bounds check
@@ -143,6 +139,36 @@ public abstract class ByteRangeInputStream extends FSInputStream {
     return in;
   }
 
+  private static long getStreamLength(HttpURLConnection connection,
+      Map<String, List<String>> headers) throws IOException {
+    String cl = connection.getHeaderField(HttpHeaders.CONTENT_LENGTH);
+    if (cl == null) {
+      // Try to get the content length by parsing the content range
+      // because HftpFileSystem does not return the content length
+      // if the content is partial.
+      if (connection.getResponseCode() == HttpStatus.SC_PARTIAL_CONTENT) {
+        cl = connection.getHeaderField(HttpHeaders.CONTENT_RANGE);
+        return getLengthFromRange(cl);
+      } else {
+        throw new IOException(HttpHeaders.CONTENT_LENGTH + " is missing: "
+            + headers);
+      }
+    }
+    return Long.parseLong(cl);
+  }
+
+  private static long getLengthFromRange(String cl) throws IOException {
+    try {
+
+      String[] str = cl.substring(6).split("[-/]");
+      return Long.parseLong(str[1]) - Long.parseLong(str[0]) + 1;
+    } catch (Exception e) {
+      throw new IOException(
+          "failed to get content length by parsing the content range: " + cl
+              + " " + e.getMessage());
+    }
+  }
+
   private static boolean isChunkedTransferEncoding(
       final Map<String, List<String>> headers) {
     return contains(headers, HttpHeaders.TRANSFER_ENCODING, "chunked")