You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ji...@apache.org on 2008/11/02 09:36:38 UTC

svn commit: r709852 - in /hadoop/hbase/trunk: CHANGES.txt src/java/org/apache/hadoop/hbase/client/HTable.java src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java

Author: jimk
Date: Sun Nov  2 01:36:37 2008
New Revision: 709852

URL: http://svn.apache.org/viewvc?rev=709852&view=rev
Log:
HBASE-81    When a scanner lease times out, throw a more "user friendly" exception

Added:
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java
Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=709852&r1=709851&r2=709852&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Sun Nov  2 01:36:37 2008
@@ -92,7 +92,8 @@
                0.19.0 RC
    HBASE-785   Remove InfoServer, use HADOOP-3824 StatusHttpServer 
                instead (requires hadoop 0.19)
-    
+   HBASE-81    When a scanner lease times out, throw a more "user friendly" exception
+       
   NEW FEATURES
    HBASE-875   Use MurmurHash instead of JenkinsHash [in bloomfilters]
                (Andrzej Bialecki via Stack)

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java?rev=709852&r1=709851&r2=709852&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java Sun Nov  2 01:36:37 2008
@@ -36,6 +36,7 @@
 import org.apache.hadoop.hbase.HServerAddress;
 import org.apache.hadoop.hbase.HStoreKey;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.UnknownScannerException;
 import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor;
 import org.apache.hadoop.hbase.filter.RowFilterInterface;
 import org.apache.hadoop.hbase.filter.StopRowFilter;
@@ -53,12 +54,13 @@
 public class HTable {
   private final HConnection connection;
   private final byte [] tableName;
-  private HBaseConfiguration configuration;
+  protected final int scannerTimeout;
+  private volatile HBaseConfiguration configuration;
   private ArrayList<BatchUpdate> writeBuffer;
   private long writeBufferSize;
   private boolean autoFlush;
   private long currentWriteBufferSize;
-  private int scannerCaching;
+  protected int scannerCaching;
 
   /**
    * Creates an object to access a HBase table
@@ -104,8 +106,10 @@
   public HTable(HBaseConfiguration conf, final byte [] tableName)
   throws IOException {
     this.connection = HConnectionManager.getConnection(conf);
-    this.configuration = conf;
     this.tableName = tableName;
+    this.scannerTimeout =
+      conf.getInt("hbase.regionserver.lease.period", 60 * 1000);
+    this.configuration = conf;
     this.connection.locateRegion(tableName, HConstants.EMPTY_START_ROW);
     this.writeBuffer = new ArrayList<BatchUpdate>();
     this.writeBufferSize = 
@@ -1037,7 +1041,8 @@
    
   /**
    * Release held resources
-   *
+   * 
+   * @throws IOException
   */
   public void close() throws IOException{
     flushCommits();
@@ -1154,8 +1159,10 @@
     private HRegionInfo currentRegion = null;
     private ScannerCallable callable = null;
     protected RowFilterInterface filter;
-    final private LinkedList<RowResult> cache = new LinkedList<RowResult>();
-    final private int scannerCaching = HTable.this.scannerCaching;
+    private final LinkedList<RowResult> cache = new LinkedList<RowResult>();
+    @SuppressWarnings("hiding")
+    private final int scannerCaching = HTable.this.scannerCaching;
+    private long lastNext;
 
     protected ClientScanner(final byte[][] columns, final byte [] startRow,
         final long timestamp, final RowFilterInterface filter) {
@@ -1175,6 +1182,7 @@
       if (filter != null) {
         filter.validate(columns);
       }
+      this.lastNext = System.currentTimeMillis();
     }
 
     //TODO: change visibility to protected
@@ -1280,7 +1288,19 @@
         // with a countdown in nextScanner
         callable.setCaching(this.scannerCaching);
         do {
-          values = getConnection().getRegionServerWithRetries(callable);
+          try {
+            values = getConnection().getRegionServerWithRetries(callable);
+          } catch (IOException e) {
+            if (e instanceof UnknownScannerException &&
+                lastNext + scannerTimeout < System.currentTimeMillis()) {
+              
+              ScannerTimeoutException ex = new ScannerTimeoutException();
+              ex.initCause(e);
+              throw ex;
+            }
+            throw e;
+          }
+          lastNext = System.currentTimeMillis();
           if (values != null && values.length > 0) {
             for (RowResult rs : values) {
               cache.add(rs);

Added: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java?rev=709852&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java (added)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java Sun Nov  2 01:36:37 2008
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2008 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.client;
+
+import org.apache.hadoop.hbase.DoNotRetryIOException;
+
+/**
+ * Thrown when a scanner has timed out. 
+ */
+@SuppressWarnings("serial")
+public class ScannerTimeoutException extends DoNotRetryIOException {
+  /** default constructor */
+  ScannerTimeoutException() {
+    super();
+  }
+
+  /** @param s */
+  ScannerTimeoutException(String s) {
+    super(s);
+  }
+}