You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2009/09/02 01:16:58 UTC
svn commit: r810302 - in /hadoop/hbase/branches/0.20: CHANGES.txt
src/java/org/apache/hadoop/hbase/io/hfile/HFile.java
Author: stack
Date: Tue Sep 1 23:16:57 2009
New Revision: 810302
URL: http://svn.apache.org/viewvc?rev=810302&view=rev
Log:
HBASE-1809 NPE thrown in BoundedRangeFileInputStream
Modified:
hadoop/hbase/branches/0.20/CHANGES.txt
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java
Modified: hadoop/hbase/branches/0.20/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/CHANGES.txt?rev=810302&r1=810301&r2=810302&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/CHANGES.txt (original)
+++ hadoop/hbase/branches/0.20/CHANGES.txt Tue Sep 1 23:16:57 2009
@@ -335,6 +335,7 @@
HBASE-1798 [Regression] Unable to delete a row in the future
HBASE-1780 HTable.flushCommits clears write buffer in finally clause
HBASE-1784 Missing rows after medium intensity insert
+ HBASE-1809 NPE thrown in BoundedRangeFileInputStream
IMPROVEMENTS
HBASE-1089 Add count of regions on filesystem to master UI; add percentage
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=810302&r1=810301&r2=810302&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java Tue Sep 1 23:16:57 2009
@@ -967,18 +967,26 @@
private ByteBuffer decompress(final long offset, final int compressedSize,
final int decompressedSize)
throws IOException {
- Decompressor decompressor = this.compressAlgo.getDecompressor();
- // My guess is that the bounded range fis is needed to stop the
- // decompressor reading into next block -- IIRC, it just grabs a
- // bunch of data w/o regard to whether decompressor is coming to end of a
- // decompression.
- InputStream is = this.compressAlgo.createDecompressionStream(
- new BoundedRangeFileInputStream(this.istream, offset, compressedSize),
- decompressor, 0);
- ByteBuffer buf = ByteBuffer.allocate(decompressedSize);
- IOUtils.readFully(is, buf.array(), 0, buf.capacity());
- is.close();
- this.compressAlgo.returnDecompressor(decompressor);
+
+ Decompressor decompressor = null;
+
+ try {
+ decompressor = this.compressAlgo.getDecompressor();
+ // My guess is that the bounded range fis is needed to stop the
+ // decompressor reading into next block -- IIRC, it just grabs a
+ // bunch of data w/o regard to whether decompressor is coming to end of a
+ // decompression.
+ InputStream is = this.compressAlgo.createDecompressionStream(
+ new BoundedRangeFileInputStream(this.istream, offset, compressedSize),
+ decompressor, 0);
+ ByteBuffer buf = ByteBuffer.allocate(decompressedSize);
+ IOUtils.readFully(is, buf.array(), 0, buf.capacity());
+ is.close();
+ } finally {
+ if (null != decompressor) {
+ this.compressAlgo.returnDecompressor(decompressor);
+ }
+ }
return buf;
}