You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2010/05/14 21:34:47 UTC
svn commit: r944420 - in /hadoop/hbase/branches/0.20: CHANGES.txt
src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
Author: stack
Date: Fri May 14 19:34:46 2010
New Revision: 944420
URL: http://svn.apache.org/viewvc?rev=944420&view=rev
Log:
HBASE-2503 PriorityQueue isn't thread safe, KeyValueHeap uses it that way
Modified:
hadoop/hbase/branches/0.20/CHANGES.txt
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
Modified: hadoop/hbase/branches/0.20/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/CHANGES.txt?rev=944420&r1=944419&r2=944420&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/CHANGES.txt (original)
+++ hadoop/hbase/branches/0.20/CHANGES.txt Fri May 14 19:34:46 2010
@@ -4,6 +4,7 @@ Release 0.20.5
HBASE-2545 Unresponsive region server, potential deadlock
(Todd Lipcon via Stack)
HBASE-2521 no license headers in 5 files
+ HBASE-2503 PriorityQueue isn't thread safe, KeyValueHeap uses it that way
Release 0.20.4 - Mon May 3 16:16:02 PDT 2010
INCOMPATIBLE CHANGES
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java?rev=944420&r1=944419&r2=944420&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java Fri May 14 19:34:46 2010
@@ -33,6 +33,7 @@ package org.apache.hadoop.hbase.regionse
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.NotServingRegionException;
+ import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
@@ -1818,11 +1819,13 @@ public class HRegion implements HConstan
private Scan theScan = null;
private int isScan;
private List<KeyValueScanner> extraScanners = null;
+ private boolean filterClosed = false;
RegionScanner(Scan scan, List<KeyValueScanner> additionalScanners) {
//DebugPrint.println("HRegionScanner.<init>");
this.filter = scan.getFilter();
+ // Doesn't need to be volatile, always accessed under a sync'ed method
this.oldFilter = scan.getOldFilter();
if (Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW)) {
this.stopRow = null;
@@ -1869,7 +1872,13 @@ public class HRegion implements HConstan
ReadWriteConsistencyControl.resetThreadReadPoint(rwcc);
}
- public boolean next(List<KeyValue> outResults) throws IOException {
+ public synchronized boolean next(List<KeyValue> outResults)
+ throws IOException {
+ if (this.filterClosed) {
+ throw new UnknownScannerException("Scanner was closed (timed out?) " +
+ "after we renewed it. Could be caused by a very slow scanner " +
+ "or a lengthy garbage collection");
+ }
if (closing.get() || closed.get()) {
close();
throw new NotServingRegionException(regionInfo.getRegionNameAsString() +
@@ -1899,7 +1908,7 @@ public class HRegion implements HConstan
/*
* @return True if a filter rules the scanner is over, done.
*/
- boolean isFilterDone() {
+ synchronized boolean isFilterDone() {
return
(this.filter != null && this.filter.filterAllRemaining()) ||
(this.oldFilter != null && oldFilter.filterAllRemaining());
@@ -1974,10 +1983,11 @@ public class HRegion implements HConstan
(oldFilter != null && this.oldFilter.filterRowKey(row, 0, row.length));
}
- public void close() {
+ public synchronized void close() {
if (storeHeap != null) {
storeHeap.close();
storeHeap = null;
+ this.filterClosed = true;
}
}
@@ -1985,7 +1995,7 @@ public class HRegion implements HConstan
*
* @param scanner to be closed
*/
- public void close(KeyValueScanner scanner) {
+ public synchronized void close(KeyValueScanner scanner) {
try {
scanner.close();
} catch(NullPointerException npe) {}
Modified: hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java?rev=944420&r1=944419&r2=944420&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java (original)
+++ hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java Fri May 14 19:34:46 2010
@@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.HRegionIn
import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
@@ -212,6 +213,34 @@ public class TestScanner extends HBaseTe
}
}
+ /**
+ * Test that closing a scanner while a client is using it doesn't throw
+ * NPEs but instead a UnknownScannerException. HBASE-2503
+ * @throws Exception
+ */
+ public void testRaceBetweenClientAndTimeout() throws Exception {
+ try {
+ this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null);
+ addContent(this.r, HConstants.CATALOG_FAMILY);
+ Scan scan = new Scan();
+ InternalScanner s = r.getScanner(scan);
+ List<KeyValue> results = new ArrayList<KeyValue>();
+ try {
+ s.next(results);
+ s.close();
+ s.next(results);
+ fail("We don't want anything more, we should be failing");
+ } catch (UnknownScannerException ex) {
+ // ok!
+ return;
+ }
+ } finally {
+ this.r.close();
+ this.r.getLog().closeAndDelete();
+ shutdownDfs(this.cluster);
+ }
+ }
+
/** The test!
* @throws IOException
*/