You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by el...@apache.org on 2010/08/25 00:47:32 UTC
svn commit: r988750 - in /hadoop/common/branches/branch-0.21: CHANGES.txt
src/java/org/apache/hadoop/io/compress/BZip2Codec.java
src/test/core/org/apache/hadoop/io/compress/TestCodec.java
Author: eli
Date: Tue Aug 24 22:47:32 2010
New Revision: 988750
URL: http://svn.apache.org/viewvc?rev=988750&view=rev
Log:
Merge -r 988747:988748 from trunk to branch-0.21. Fixes: HADOOP-6925.
Modified:
hadoop/common/branches/branch-0.21/CHANGES.txt
hadoop/common/branches/branch-0.21/src/java/org/apache/hadoop/io/compress/BZip2Codec.java
hadoop/common/branches/branch-0.21/src/test/core/org/apache/hadoop/io/compress/TestCodec.java
Modified: hadoop/common/branches/branch-0.21/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.21/CHANGES.txt?rev=988750&r1=988749&r2=988750&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.21/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.21/CHANGES.txt Tue Aug 24 22:47:32 2010
@@ -1,5 +1,12 @@
Hadoop Change Log
+Release 0.21.1 - Unreleased
+
+ BUG FIXES
+
+ HADOOP-6925. BZip2Codec incorrectly implements read().
+ (Todd Lipcon via Eli Collins)
+
Release 0.21.0 - 2010-08-13
INCOMPATIBLE CHANGES
Modified: hadoop/common/branches/branch-0.21/src/java/org/apache/hadoop/io/compress/BZip2Codec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.21/src/java/org/apache/hadoop/io/compress/BZip2Codec.java?rev=988750&r1=988749&r2=988750&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.21/src/java/org/apache/hadoop/io/compress/BZip2Codec.java (original)
+++ hadoop/common/branches/branch-0.21/src/java/org/apache/hadoop/io/compress/BZip2Codec.java Tue Aug 24 22:47:32 2010
@@ -443,7 +443,7 @@ public class BZip2Codec implements Split
public int read() throws IOException {
byte b[] = new byte[1];
int result = this.read(b, 0, 1);
- return (result < 0) ? result : b[0];
+ return (result < 0) ? result : (b[0] & 0xff);
}
private void internalReset() throws IOException {
Modified: hadoop/common/branches/branch-0.21/src/test/core/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.21/src/test/core/org/apache/hadoop/io/compress/TestCodec.java?rev=988750&r1=988749&r2=988750&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.21/src/test/core/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ hadoop/common/branches/branch-0.21/src/test/core/org/apache/hadoop/io/compress/TestCodec.java Tue Aug 24 22:47:32 2010
@@ -129,10 +129,6 @@ public class TestCodec {
key.write(data);
value.write(data);
}
- DataInputBuffer originalData = new DataInputBuffer();
- DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
- originalData.reset(data.getData(), 0, data.getLength());
-
LOG.info("Generated " + count + " records");
// Compress data
@@ -156,6 +152,9 @@ public class TestCodec {
new DataInputStream(new BufferedInputStream(inflateFilter));
// Check
+ DataInputBuffer originalData = new DataInputBuffer();
+ originalData.reset(data.getData(), 0, data.getLength());
+ DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
for(int i=0; i < count; ++i) {
RandomDatum k1 = new RandomDatum();
RandomDatum v1 = new RandomDatum();
@@ -167,6 +166,23 @@ public class TestCodec {
k2.readFields(inflateIn);
v2.readFields(inflateIn);
}
+
+ // De-compress data byte-at-a-time
+ originalData.reset(data.getData(), 0, data.getLength());
+ deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
+ compressedDataBuffer.getLength());
+ inflateFilter =
+ codec.createInputStream(deCompressedDataBuffer);
+
+ // Check
+ originalIn = new DataInputStream(new BufferedInputStream(originalData));
+ int expected;
+ do {
+ expected = originalIn.read();
+ assertEquals("Inflated stream read by byte does not match",
+ expected, inflateFilter.read());
+ } while (expected != -1);
+
LOG.info("SUCCESS! Completed checking " + count + " records");
}