You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dd...@apache.org on 2008/06/19 14:52:00 UTC
svn commit: r669474 - in /hadoop/core/branches/branch-0.18: CHANGES.txt
src/core/org/apache/hadoop/fs/HarFileSystem.java
src/mapred/org/apache/hadoop/mapred/FileInputFormat.java
Author: ddas
Date: Thu Jun 19 05:51:59 2008
New Revision: 669474
URL: http://svn.apache.org/viewvc?rev=669474&view=rev
Log:
Merge -r 669471:669472 from trunk onto 0.18 branch. Fixes HADOOP-3580.
Modified:
hadoop/core/branches/branch-0.18/CHANGES.txt
hadoop/core/branches/branch-0.18/src/core/org/apache/hadoop/fs/HarFileSystem.java
hadoop/core/branches/branch-0.18/src/mapred/org/apache/hadoop/mapred/FileInputFormat.java
Modified: hadoop/core/branches/branch-0.18/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/CHANGES.txt?rev=669474&r1=669473&r2=669474&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/CHANGES.txt (original)
+++ hadoop/core/branches/branch-0.18/CHANGES.txt Thu Jun 19 05:51:59 2008
@@ -625,6 +625,9 @@
HADOOP-3533. Add deprecated methods to provide API compatibility
between 0.18 and 0.17. Remove the deprecated methods in trunk. (omalley)
+ HADOOP-3580. Fixes a problem to do with specifying a har as an input to
+ a job. (Mahadev Konar via ddas)
+
Release 0.17.1 - Unreleased
INCOMPATIBLE CHANGES
Modified: hadoop/core/branches/branch-0.18/src/core/org/apache/hadoop/fs/HarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/src/core/org/apache/hadoop/fs/HarFileSystem.java?rev=669474&r1=669473&r2=669474&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/src/core/org/apache/hadoop/fs/HarFileSystem.java (original)
+++ hadoop/core/branches/branch-0.18/src/core/org/apache/hadoop/fs/HarFileSystem.java Thu Jun 19 05:51:59 2008
@@ -343,8 +343,24 @@
}
FileStatus fsFile = fs.getFileStatus(new Path(archivePath,
harStatus.getPartName()));
- return fs.getFileBlockLocations(fsFile,
- harStatus.getStartIndex(), harStatus.getLength());
+ BlockLocation[] rawBlocks = fs.getFileBlockLocations(fsFile,
+ harStatus.getStartIndex() + start, len);
+ return fakeBlockLocations(rawBlocks, harStatus.getStartIndex());
+ }
+
+ /**
+ * fake the rawblocks since map reduce uses the block offsets to
+ * fo some computations regarding the blocks
+ * @param rawBlocks the raw blocks returned by the filesystem
+ * @return faked blocks with changed offsets.
+ */
+ private BlockLocation[] fakeBlockLocations(BlockLocation[] rawBlocks,
+ long startIndex) {
+ for (BlockLocation block : rawBlocks) {
+ long rawOffset = block.getOffset();
+ block.setOffset(rawOffset - startIndex);
+ }
+ return rawBlocks;
}
/**
Modified: hadoop/core/branches/branch-0.18/src/mapred/org/apache/hadoop/mapred/FileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/src/mapred/org/apache/hadoop/mapred/FileInputFormat.java?rev=669474&r1=669473&r2=669474&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/src/mapred/org/apache/hadoop/mapred/FileInputFormat.java (original)
+++ hadoop/core/branches/branch-0.18/src/mapred/org/apache/hadoop/mapred/FileInputFormat.java Thu Jun 19 05:51:59 2008
@@ -331,7 +331,7 @@
return i;
}
}
- BlockLocation last = blkLocations[blkLocations.length];
+ BlockLocation last = blkLocations[blkLocations.length -1];
long fileLength = last.getOffset() + last.getLength() -1;
throw new IllegalArgumentException("Offset " + offset +
" is outside of file (0.." +