You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by en...@apache.org on 2013/08/29 23:23:39 UTC

svn commit: r1518831 - /hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java

Author: enis
Date: Thu Aug 29 21:23:38 2013
New Revision: 1518831

URL: http://svn.apache.org/r1518831
Log:
HBASE-9380 StoreFile.Reader is not being closed on memstore flush

Modified:
    hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java

Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java?rev=1518831&r1=1518830&r2=1518831&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java Thu Aug 29 21:23:38 2013
@@ -48,12 +48,12 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CompoundConfiguration;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.RemoteExceptionHandler;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
@@ -273,6 +273,7 @@ public class HStore implements Store {
     return ttl;
   }
 
+  @Override
   public String getColumnFamilyName() {
     return this.family.getNameAsString();
   }
@@ -308,6 +309,7 @@ public class HStore implements Store {
     return this.compactionCheckMultiplier;
   }
 
+  @Override
   public long getBlockingFileCount() {
     return blockingFileCount;
   }
@@ -344,6 +346,7 @@ public class HStore implements Store {
     return closeCheckInterval;
   }
 
+  @Override
   public HColumnDescriptor getFamily() {
     return this.family;
   }
@@ -419,6 +422,7 @@ public class HStore implements Store {
     for (final StoreFileInfo storeFileInfo: files) {
       // open each store file in parallel
       completionService.submit(new Callable<StoreFile>() {
+        @Override
         public StoreFile call() throws IOException {
           StoreFile storeFile = createStoreFileAndReader(storeFileInfo.getPath());
           return storeFile;
@@ -632,6 +636,7 @@ public class HStore implements Store {
           new ExecutorCompletionService<Void>(storeFileCloserThreadPool);
         for (final StoreFile f : result) {
           completionService.submit(new Callable<Void>() {
+            @Override
             public Void call() throws IOException {
               f.closeReader(true);
               return null;
@@ -779,6 +784,7 @@ public class HStore implements Store {
    * @param isCompaction whether we are creating a new file in a compaction
    * @return Writer for a new StoreFile in the tmp dir.
    */
+  @Override
   public StoreFile.Writer createWriterInTmp(long maxKeyCount,
     Compression.Algorithm compression, boolean isCompaction, boolean includeMVCCReadpoint)
   throws IOException {
@@ -944,6 +950,7 @@ public class HStore implements Store {
    * @throws IOException
    * @return Storefile we compacted into or null if we failed or opted out early.
    */
+  @Override
   public List<StoreFile> compact(CompactionContext compaction) throws IOException {
     assert compaction != null && compaction.hasSelection();
     CompactionRequest cr = compaction.getRequest();
@@ -1086,6 +1093,7 @@ public class HStore implements Store {
    * See HBASE-2331.
    * @param compaction
    */
+  @Override
   public void completeCompactionMarker(CompactionDescriptor compaction)
       throws IOException {
     LOG.debug("Completing compaction from the WAL marker");
@@ -1307,6 +1315,7 @@ public class HStore implements Store {
     return compaction;
   }
 
+  @Override
   public void cancelRequestedCompaction(CompactionContext compaction) {
     finishCompactionRequest(compaction.getRequest());
   }
@@ -1332,7 +1341,7 @@ public class HStore implements Store {
       throws IOException {
     StoreFile storeFile = null;
     try {
-      createStoreFileAndReader(path, NoOpDataBlockEncoder.INSTANCE);
+      storeFile = createStoreFileAndReader(path, NoOpDataBlockEncoder.INSTANCE);
     } catch (IOException e) {
       LOG.error("Failed to open store file : " + path
           + ", keeping it in tmp location", e);
@@ -1562,6 +1571,7 @@ public class HStore implements Store {
     return foundCandidate;
   }
 
+  @Override
   public boolean canSplit() {
     this.lock.readLock().lock();
     try {
@@ -1605,6 +1615,7 @@ public class HStore implements Store {
     return storeSize;
   }
 
+  @Override
   public void triggerMajorCompaction() {
     this.forceMajor = true;
   }
@@ -1783,6 +1794,7 @@ public class HStore implements Store {
     }
   }
 
+  @Override
   public StoreFlushContext createFlushContext(long cacheFlushId) {
     return new StoreFlusherImpl(cacheFlushId);
   }
@@ -1874,6 +1886,7 @@ public class HStore implements Store {
     return DEEP_OVERHEAD + this.memstore.heapSize();
   }
 
+  @Override
   public KeyValue.KVComparator getComparator() {
     return comparator;
   }