You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2009/04/23 00:55:58 UTC
svn commit: r767704 - in /hadoop/hbase/trunk: CHANGES.txt
src/java/org/apache/hadoop/hbase/master/BaseScanner.java
src/java/org/apache/hadoop/hbase/regionserver/Store.java
Author: stack
Date: Wed Apr 22 22:55:58 2009
New Revision: 767704
URL: http://svn.apache.org/viewvc?rev=767704&view=rev
Log:
HBASE-1338 lost use of compaction.dir; we were compacting into live store subdirectory
Modified:
hadoop/hbase/trunk/CHANGES.txt
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java
Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=767704&r1=767703&r2=767704&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Wed Apr 22 22:55:58 2009
@@ -81,6 +81,8 @@
HBASE-1332 regionserver carrying .META. starts sucking all cpu, drives load
up - infinite loop? (Ryan Rawson via Stack)
HBASE-1334 .META. region running into hfile errors (Ryan Rawson via Stack)
+ HBASE-1338 lost use of compaction.dir; we were compacting into live store
+ subdirectory
IMPROVEMENTS
HBASE-1089 Add count of regions on filesystem to master UI; add percentage
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java?rev=767704&r1=767703&r2=767704&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java Wed Apr 22 22:55:58 2009
@@ -39,7 +39,6 @@
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.io.BatchUpdate;
-import org.apache.hadoop.hbase.io.Cell;
import org.apache.hadoop.hbase.io.RowResult;
import org.apache.hadoop.hbase.ipc.HRegionInterface;
import org.apache.hadoop.hbase.regionserver.HLog;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java?rev=767704&r1=767703&r2=767704&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java Wed Apr 22 22:55:58 2009
@@ -511,7 +511,16 @@
* @throws IOException
*/
HFile.Writer getWriter() throws IOException {
- return StoreFile.getWriter(this.fs, this.homedir, this.blocksize,
+ return getWriter(this.homedir);
+ }
+
+ /*
+ * @return Writer for this store.
+ * @param basedir Directory to put writer in.
+ * @throws IOException
+ */
+ private HFile.Writer getWriter(final Path basedir) throws IOException {
+ return StoreFile.getWriter(this.fs, basedir, this.blocksize,
this.compression, this.comparator.getRawComparator(), this.bloomfilter);
}
@@ -617,8 +626,8 @@
(forceSplit || (filesToCompact.size() < compactionThreshold))) {
return checkSplit(forceSplit);
}
- if (!fs.exists(compactionDir) && !fs.mkdirs(compactionDir)) {
- LOG.warn("Mkdir on " + compactionDir.toString() + " failed");
+ if (!fs.exists(this.compactionDir) && !fs.mkdirs(this.compactionDir)) {
+ LOG.warn("Mkdir on " + this.compactionDir.toString() + " failed");
return checkSplit(forceSplit);
}
@@ -671,7 +680,7 @@
}
// Step through them, writing to the brand-new file
- HFile.Writer writer = getWriter();
+ HFile.Writer writer = getWriter(this.compactionDir);
if (LOG.isDebugEnabled()) {
LOG.debug("Started compaction of " + filesToCompact.size() + " file(s)" +
(references? ", hasReferences=true,": " ") + " into " +