You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2009/09/02 01:26:44 UTC

svn commit: r810310 - /hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java

Author: stack
Date: Tue Sep  1 23:26:44 2009
New Revision: 810310

URL: http://svn.apache.org/viewvc?rev=810310&view=rev
Log:
HBASE-1784 Missing rows after medium intensity insert

Modified:
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=810310&r1=810309&r2=810310&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java Tue Sep  1 23:26:44 2009
@@ -967,9 +967,8 @@
     private ByteBuffer decompress(final long offset, final int compressedSize,
       final int decompressedSize) 
     throws IOException {
-      
       Decompressor decompressor = null;
-      
+      ByteBuffer buf = null;
       try {
         decompressor = this.compressAlgo.getDecompressor();
         // My guess is that the bounded range fis is needed to stop the 
@@ -979,7 +978,7 @@
         InputStream is = this.compressAlgo.createDecompressionStream(
           new BoundedRangeFileInputStream(this.istream, offset, compressedSize),
           decompressor, 0);
-        ByteBuffer buf = ByteBuffer.allocate(decompressedSize);
+        buf = ByteBuffer.allocate(decompressedSize);
         IOUtils.readFully(is, buf.array(), 0, buf.capacity());
         is.close();        
       } finally {