You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2009/04/30 00:51:29 UTC

svn commit: r769981 - in /hadoop/hbase/trunk: ./ lib/ lib/jetty-ext/ lib/jsp-2.1/ lib/native/Linux-amd64-64/ lib/native/Linux-i386-32/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/io/hfile/ src/java/org/apache/hadoop/hbase/...

Author: stack
Date: Wed Apr 29 22:51:27 2009
New Revision: 769981

URL: http://svn.apache.org/viewvc?rev=769981&view=rev
Log:
HBASE-1348 Move 0.20.0 targeted TRUNK to 0.20.0 hadoop

Added:
    hadoop/hbase/trunk/lib/commons-el-from-jetty-5.1.4.jar   (with props)
    hadoop/hbase/trunk/lib/hadoop-0.20.0-core.jar   (with props)
    hadoop/hbase/trunk/lib/hadoop-0.20.0-test.jar   (with props)
    hadoop/hbase/trunk/lib/jasper-compiler-5.5.12.jar   (with props)
    hadoop/hbase/trunk/lib/jasper-runtime-5.5.12.jar   (with props)
    hadoop/hbase/trunk/lib/jetty-6.1.14.jar   (with props)
    hadoop/hbase/trunk/lib/jetty-util-6.1.14.jar   (with props)
    hadoop/hbase/trunk/lib/jsp-2.1/
    hadoop/hbase/trunk/lib/jsp-2.1/jsp-2.1.jar   (with props)
    hadoop/hbase/trunk/lib/jsp-2.1/jsp-api-2.1.jar   (with props)
    hadoop/hbase/trunk/lib/servlet-api-2.5-6.1.14.jar   (with props)
Removed:
    hadoop/hbase/trunk/lib/hadoop-0.19.0-core.jar
    hadoop/hbase/trunk/lib/hadoop-0.19.0-test.jar
    hadoop/hbase/trunk/lib/jetty-5.1.4.jar
    hadoop/hbase/trunk/lib/jetty-ext/
    hadoop/hbase/trunk/lib/servlet-api.jar
Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.a
    hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.so
    hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.so.1
    hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.so.1.0.0
    hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.a
    hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.so
    hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.so.1
    hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.so.1.0.0
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/Compression.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseClient.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseServer.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/Dispatcher.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/InfoServer.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestMergeMeta.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Wed Apr 29 22:51:27 2009
@@ -12,6 +12,8 @@
    HBASE-1289  Remove "hbase.fully.distributed" option and update docs
                (Nitay Joffe via Stack)
    HBASE-1234  Change HBase StoreKey format
+   HBASE-1348  Move 0.20.0 targeted TRUNK to 0.20.0 hadoop
+               (Ryan Rawson and Stack)
 
   BUG FIXES
    HBASE-1140  "ant clean test" fails (Nitay Joffe via Stack)

Added: hadoop/hbase/trunk/lib/commons-el-from-jetty-5.1.4.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/commons-el-from-jetty-5.1.4.jar?rev=769981&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/commons-el-from-jetty-5.1.4.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/hadoop-0.20.0-core.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/hadoop-0.20.0-core.jar?rev=769981&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/hadoop-0.20.0-core.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/hadoop-0.20.0-test.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/hadoop-0.20.0-test.jar?rev=769981&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/hadoop-0.20.0-test.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/jasper-compiler-5.5.12.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/jasper-compiler-5.5.12.jar?rev=769981&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/jasper-compiler-5.5.12.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/jasper-runtime-5.5.12.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/jasper-runtime-5.5.12.jar?rev=769981&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/jasper-runtime-5.5.12.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/jetty-6.1.14.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/jetty-6.1.14.jar?rev=769981&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/jetty-6.1.14.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/jetty-util-6.1.14.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/jetty-util-6.1.14.jar?rev=769981&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/jetty-util-6.1.14.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/jsp-2.1/jsp-2.1.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/jsp-2.1/jsp-2.1.jar?rev=769981&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/jsp-2.1/jsp-2.1.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/jsp-2.1/jsp-api-2.1.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/jsp-2.1/jsp-api-2.1.jar?rev=769981&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/jsp-2.1/jsp-api-2.1.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.a
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.a?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
Binary files - no diff available.

Modified: hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.so
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.so?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
Binary files - no diff available.

Modified: hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.so.1
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.so.1?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
Binary files - no diff available.

Modified: hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.so.1.0.0
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/native/Linux-amd64-64/libhadoop.so.1.0.0?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
Binary files - no diff available.

Modified: hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.a
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.a?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
Binary files - no diff available.

Modified: hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.so
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.so?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
Binary files - no diff available.

Modified: hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.so.1
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.so.1?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
Binary files - no diff available.

Modified: hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.so.1.0.0
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/native/Linux-i386-32/libhadoop.so.1.0.0?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
Binary files - no diff available.

Added: hadoop/hbase/trunk/lib/servlet-api-2.5-6.1.14.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/servlet-api-2.5-6.1.14.jar?rev=769981&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/servlet-api-2.5-6.1.14.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java Wed Apr 29 22:51:27 2009
@@ -123,6 +123,7 @@
     private final long pause;
     private final int numRetries;
     private final int maxRPCAttempts;
+    private final long rpcTimeout;
 
     private final Object masterLock = new Object();
     private volatile boolean closed;
@@ -173,6 +174,7 @@
       this.pause = conf.getLong("hbase.client.pause", 2 * 1000);
       this.numRetries = conf.getInt("hbase.client.retries.number", 10);
       this.maxRPCAttempts = conf.getInt("hbase.client.rpc.maxattempts", 1);
+      this.rpcTimeout = conf.getLong("hbase.regionserver.lease.period", 60000);
       
       this.master = null;
       this.masterChecked = false;
@@ -775,7 +777,7 @@
             server = (HRegionInterface)HBaseRPC.waitForProxy(
                 serverInterfaceClass, HBaseRPCProtocolVersion.versionID,
                 regionServer.getInetSocketAddress(), this.conf, 
-                this.maxRPCAttempts);
+                this.maxRPCAttempts, this.rpcTimeout);
           } catch (RemoteException e) {
             throw RemoteExceptionHandler.decodeRemoteException(e);
           }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java Wed Apr 29 22:51:27 2009
@@ -1644,7 +1644,7 @@
         return false;
       }
       // Let the filter see current row.
-      this.filter.filterRowKey(endKey);
+      this.filter.filterRowKey(endKey, 0, endKey.length);
       return this.filter.filterAllRemaining();
     }
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/Compression.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/Compression.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/Compression.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/Compression.java Wed Apr 29 22:51:27 2009
@@ -33,7 +33,6 @@
 import org.apache.hadoop.io.compress.Compressor;
 import org.apache.hadoop.io.compress.Decompressor;
 import org.apache.hadoop.io.compress.GzipCodec;
-import org.apache.hadoop.io.compress.LzoCodec;
 
 /**
  * Compression related stuff.
@@ -73,61 +72,20 @@
    */
   public static enum Algorithm {
     LZO("lzo") {
-      private LzoCodec codec;
 
       @Override
       CompressionCodec getCodec() {
-        if (codec == null) {
-          Configuration conf = new Configuration();
-          conf.setBoolean("hadoop.native.lib", true);
-          codec = new LzoCodec();
-          codec.setConf(conf);
-        }
-
-        return codec;
+        throw new UnsupportedOperationException("LZO compression is disabled for now");
       }
-
       @Override
-      public synchronized InputStream createDecompressionStream(
-          InputStream downStream, Decompressor decompressor,
-          int downStreamBufferSize) throws IOException {
-        InputStream bis1 = null;
-        if (downStreamBufferSize > 0) {
-          bis1 = new BufferedInputStream(downStream, downStreamBufferSize);
-        }
-        else {
-          bis1 = downStream;
-        }
-        codec.getConf()
-            .setInt("io.compression.codec.lzo.buffersize", 64 * 1024);
-        CompressionInputStream cis =
-            codec.createInputStream(bis1, decompressor);
-        BufferedInputStream bis2 = new BufferedInputStream(cis, DATA_IBUF_SIZE);
-        return bis2;
+      public InputStream createDecompressionStream(InputStream downStream, Decompressor decompressor, int downStreamBufferSize) throws IOException {
+        throw new UnsupportedOperationException("LZO compression is disabled for now");
       }
-
       @Override
-      public synchronized OutputStream createCompressionStream(
-          OutputStream downStream, Compressor compressor,
-          int downStreamBufferSize) throws IOException {
-        OutputStream bos1 = null;
-        if (downStreamBufferSize > 0) {
-          bos1 = new BufferedOutputStream(downStream, downStreamBufferSize);
-        }
-        else {
-          bos1 = downStream;
-        }
-        codec.getConf()
-            .setInt("io.compression.codec.lzo.buffersize", 64 * 1024);
-        CompressionOutputStream cos =
-            codec.createOutputStream(bos1, compressor);
-        BufferedOutputStream bos2 =
-            new BufferedOutputStream(new FinishOnFlushCompressionStream(cos),
-                DATA_OBUF_SIZE);
-        return bos2;
+      public OutputStream createCompressionStream(OutputStream downStream, Compressor compressor, int downStreamBufferSize) throws IOException {
+        throw new UnsupportedOperationException("LZO compression is disabled for now");
       }
     },
-
     GZ("gz") {
       private GzipCodec codec;
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/hfile/HFile.java Wed Apr 29 22:51:27 2009
@@ -1342,10 +1342,10 @@
     private BlockIndex() {
       this(null);
     }
-  
+
+
     /**
-     * Constructor
-     * @param trailer File tail structure with index stats.
+     * @param c comparator used to compare keys.
      */
     BlockIndex(final RawComparator<byte []>c) {
       this.comparator = c;

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseClient.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseClient.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseClient.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseClient.java Wed Apr 29 22:51:27 2009
@@ -302,7 +302,7 @@
             this.socket = socketFactory.createSocket();
             this.socket.setTcpNoDelay(tcpNoDelay);
             // connection time out is 20s
-            this.socket.connect(remoteId.getAddress(), 20000);
+            NetUtils.connect(this.socket, remoteId.getAddress(), 20000);
             this.socket.setSoTimeout(pingInterval);
             break;
           } catch (SocketTimeoutException toe) {
@@ -862,4 +862,4 @@
       return address.hashCode() ^ System.identityHashCode(ticket);
     }
   }  
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java Wed Apr 29 22:51:27 2009
@@ -45,7 +45,6 @@
 import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.ipc.VersionedProtocol;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 
@@ -399,15 +398,19 @@
                                                long clientVersion,
                                                InetSocketAddress addr,
                                                Configuration conf,
-                                               int maxAttempts
+                                               int maxAttempts,
+                                               long timeout
                                                ) throws IOException {
     // HBase does limited number of reconnects which is different from hadoop.
+    long startTime = System.currentTimeMillis();
+    IOException ioe;
     int reconnectAttempts = 0;
     while (true) {
       try {
         return getProxy(protocol, clientVersion, addr, conf);
       } catch(ConnectException se) {  // namenode has not been started
         LOG.info("Server at " + addr + " not available yet, Zzzzz...");
+        ioe = se;
         if (maxAttempts >= 0 && ++reconnectAttempts >= maxAttempts) {
           LOG.info("Server at " + addr + " could not be reached after " +
                   reconnectAttempts + " tries, giving up.");
@@ -417,7 +420,14 @@
       }
       } catch(SocketTimeoutException te) {  // namenode is busy
         LOG.info("Problem connecting to server: " + addr);
+        ioe = te;
       }
+      // check if timed out
+      if (System.currentTimeMillis()-timeout >= startTime) {
+        throw ioe;
+      }
+
+      // wait for retry
       try {
         Thread.sleep(1000);
       } catch (InterruptedException ie) {
@@ -639,18 +649,9 @@
           rpcMetrics.rpcQueueTime.inc(qTime);
           rpcMetrics.rpcProcessingTime.inc(processingTime);
         }
-
-	MetricsTimeVaryingRate m = rpcMetrics.metricsList.get(call.getMethodName());
-
-	if (m != null) {
-		m.inc(processingTime);
-	}
-	else {
-		rpcMetrics.metricsList.put(call.getMethodName(), new MetricsTimeVaryingRate(call.getMethodName()));
-		m = rpcMetrics.metricsList.get(call.getMethodName());
-		m.inc(processingTime);
-	}
-
+        rpcMetrics.rpcQueueTime.inc(qTime);
+        rpcMetrics.rpcProcessingTime.inc(processingTime);
+        rpcMetrics.inc(call.getMethodName(), processingTime);
         if (verbose) log("Return: "+value);
 
         return new HbaseObjectWritable(method.getReturnType(), value);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java Wed Apr 29 22:51:27 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.metrics.MetricsUtil;
 import org.apache.hadoop.metrics.Updater;
 import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
+import org.apache.hadoop.metrics.util.MetricsRegistry;
 
 /**
  * 
@@ -65,14 +66,29 @@
    *  - they can be set directly by calling their set/inc methods
    *  -they can also be read directly - e.g. JMX does this.
    */
-  
-  public MetricsTimeVaryingRate rpcQueueTime = new MetricsTimeVaryingRate("RpcQueueTime");
-  public MetricsTimeVaryingRate rpcProcessingTime = new MetricsTimeVaryingRate("RpcProcessingTime");
+  public MetricsRegistry registry = new MetricsRegistry();
 
-  public Map <String, MetricsTimeVaryingRate> metricsList = Collections.synchronizedMap(new HashMap<String, MetricsTimeVaryingRate>());
+  public MetricsTimeVaryingRate rpcQueueTime = new MetricsTimeVaryingRate("RpcQueueTime", registry);
+  public MetricsTimeVaryingRate rpcProcessingTime = new MetricsTimeVaryingRate("RpcProcessingTime", registry);
+
+  //public Map <String, MetricsTimeVaryingRate> metricsList = Collections.synchronizedMap(new HashMap<String, MetricsTimeVaryingRate>());
+
+  private MetricsTimeVaryingRate get(String key) {
+    return (MetricsTimeVaryingRate) registry.get(key);
+  }
+  private MetricsTimeVaryingRate create(String key) {
+    MetricsTimeVaryingRate newMetric = new MetricsTimeVaryingRate(key, this.registry);
+    return newMetric;
+  }
+
+  public synchronized void inc(String name, int amt) {
+    MetricsTimeVaryingRate m = get(name);
+    if (m == null) {
+      m = create(name);
+    }
+    m.inc(amt);
+  }
 
-  
-  
   /**
    * Push the metrics to the monitoring subsystem on doUpdate() call.
    * @param context
@@ -81,18 +97,14 @@
     rpcQueueTime.pushMetric(metricsRecord);
     rpcProcessingTime.pushMetric(metricsRecord);
 
-    synchronized (metricsList) {
-	// Iterate through the rpcMetrics hashmap to propogate the different rpc metrics.
-	Set<String> keys = metricsList.keySet();
-
-	Iterator<String> keyIter = keys.iterator();
-
-	while (keyIter.hasNext()) {
-		Object key = keyIter.next();
-		MetricsTimeVaryingRate value = metricsList.get(key);
+    synchronized (registry) {
+      // Iterate through the registry to propogate the different rpc metrics.
+
+      for (String metricName : registry.getKeyList() ) {
+        MetricsTimeVaryingRate value = (MetricsTimeVaryingRate) registry.get(metricName);
 
-		value.pushMetric(metricsRecord);
-	}
+        value.pushMetric(metricsRecord);
+      }
     }
     metricsRecord.update();
   }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseServer.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseServer.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseServer.java Wed Apr 29 22:51:27 2009
@@ -297,6 +297,8 @@
     public void run() {
       LOG.info(getName() + ": starting");
       SERVER.set(HBaseServer.this);
+      long lastPurgeTime = 0;   // last check for old calls.
+
       while (running) {
         SelectionKey key = null;
         try {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java Wed Apr 29 22:51:27 2009
@@ -410,7 +410,7 @@
       LOG.info("Stopping infoServer");
       try {
         this.infoServer.stop();
-      } catch (InterruptedException ex) {
+      } catch (Exception ex) {
         ex.printStackTrace();
       }
     }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java Wed Apr 29 22:51:27 2009
@@ -125,6 +125,9 @@
     String serverName = HServerInfo.getServerName(info);
     if (serversToServerInfo.containsKey(serverName) ||
         deadServers.contains(serverName)) {
+      LOG.debug("Server start was rejected: " + serverInfo);
+      LOG.debug("serversToServerInfo.containsKey: " + serversToServerInfo.containsKey(serverName));
+      LOG.debug("deadServers.contains: " + deadServers.contains(serverName));
       throw new Leases.LeaseStillHeldException(serverName);
     }
     Watcher watcher = new ServerExpirer(serverName, info.getServerAddress());

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java Wed Apr 29 22:51:27 2009
@@ -25,6 +25,7 @@
 import org.apache.hadoop.metrics.Updater;
 import org.apache.hadoop.metrics.jvm.JvmMetrics;
 import org.apache.hadoop.metrics.util.MetricsIntValue;
+import org.apache.hadoop.metrics.util.MetricsRegistry;
 
 
 /** 
@@ -37,12 +38,12 @@
 public class MasterMetrics implements Updater {
   private final Log LOG = LogFactory.getLog(this.getClass());
   private final MetricsRecord metricsRecord;
-  
+  private final MetricsRegistry registry = new MetricsRegistry();
   /*
    * Count of requests to the cluster since last call to metrics update
    */
   private final MetricsIntValue cluster_requests =
-    new MetricsIntValue("cluster_requests");
+    new MetricsIntValue("cluster_requests", registry);
 
   public MasterMetrics() {
     MetricsContext context = MetricsUtil.getContext("hbase");
@@ -90,7 +91,7 @@
    */
   public void incrementRequests(final int inc) {
     synchronized(this.cluster_requests) {
-      this.cluster_requests.inc(inc);
+      this.cluster_requests.set(this.cluster_requests.get() + inc);
     }
   }
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java Wed Apr 29 22:51:27 2009
@@ -220,6 +220,8 @@
   // A sleeper that sleeps for msgInterval.
   private final Sleeper sleeper;
 
+  private final long rpcTimeout;
+
   /**
    * Starts a HRegionServer at the default location
    * @param conf
@@ -316,6 +318,7 @@
     for(int i = 0; i < nbBlocks; i++)  {
       reservedSpace.add(new byte[DEFAULT_SIZE_RESERVATION_BLOCK]);
     }
+    this.rpcTimeout = conf.getLong("hbase.regionserver.lease.period", 60000);
   }
 
   /**
@@ -523,8 +526,8 @@
       LOG.info("Stopping infoServer");
       try {
         this.infoServer.stop();
-      } catch (InterruptedException ex) {
-        ex.printStackTrace();
+      } catch (Exception e) {
+        e.printStackTrace();
       }
     }
 
@@ -1181,7 +1184,7 @@
         master = (HMasterRegionInterface)HBaseRPC.waitForProxy(
             HMasterRegionInterface.class, HBaseRPCProtocolVersion.versionID,
             masterAddress.getInetSocketAddress(),
-            this.conf, -1);
+            this.conf, -1, this.rpcTimeout);
       } catch (IOException e) {
         LOG.warn("Unable to connect to master. Retrying. Error was:", e);
         sleeper.sleep();

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java Wed Apr 29 22:51:27 2009
@@ -30,6 +30,9 @@
 import org.apache.hadoop.metrics.Updater;
 import org.apache.hadoop.metrics.jvm.JvmMetrics;
 import org.apache.hadoop.metrics.util.MetricsIntValue;
+import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
+import org.apache.hadoop.metrics.util.MetricsLongValue;
+import org.apache.hadoop.metrics.util.MetricsRegistry;
 
 /** 
  * This class is for maintaining the various regionserver statistics
@@ -43,38 +46,43 @@
   private final MetricsRecord metricsRecord;
   private long lastUpdate = System.currentTimeMillis();
   private static final int MB = 1024*1024;
+  private MetricsRegistry registry = new MetricsRegistry();
+    
+  public final MetricsTimeVaryingRate atomicIncrementTime =
+    new MetricsTimeVaryingRate("atomicIncrementTime", registry);
   
   /**
    * Count of regions carried by this regionserver
    */
-  public final MetricsIntValue regions = new MetricsIntValue("hbase_regions");
-  
+  public final MetricsIntValue regions =
+    new MetricsIntValue("regions", registry);
+
   /*
    * Count of requests to the regionservers since last call to metrics update
    */
-  private final MetricsRate requests = new MetricsRate("hbase_requests");
+  private final MetricsRate requests = new MetricsRate("requests");
 
   /**
    * Count of stores open on the regionserver.
    */
-  public final MetricsIntValue stores = new MetricsIntValue("hbase_stores");
+  public final MetricsIntValue stores = new MetricsIntValue("stores", registry);
 
   /**
    * Count of storefiles open on the regionserver.
    */
-  public final MetricsIntValue storefiles = new MetricsIntValue("hbase_storefiles");
+  public final MetricsIntValue storefiles = new MetricsIntValue("storefiles", registry);
 
   /**
    * Sum of all the storefile index sizes in this regionserver in MB
    */
   public final MetricsIntValue storefileIndexSizeMB =
-    new MetricsIntValue("hbase_storefileIndexSizeMB");
+    new MetricsIntValue("storefileIndexSizeMB", registry);
 
   /**
    * Sum of all the memcache sizes in this regionserver in MB
    */
   public final MetricsIntValue memcacheSizeMB =
-    new MetricsIntValue("hbase_memcacheSizeMB");
+    new MetricsIntValue("memcacheSizeMB", registry);
 
   public RegionServerMetrics() {
     MetricsContext context = MetricsUtil.getContext("hbase");
@@ -134,8 +142,7 @@
     if (seconds == 0) {
       seconds = 1;
     }
-    sb = Strings.appendKeyValue(sb, "request",
-      Float.valueOf(this.requests.getPreviousIntervalValue()));
+    sb = Strings.appendKeyValue(sb, "request", Float.valueOf(getRequests()));
     sb = Strings.appendKeyValue(sb, "regions",
       Integer.valueOf(this.regions.get()));
     sb = Strings.appendKeyValue(sb, "stores",
@@ -156,4 +163,4 @@
       Long.valueOf(memory.getMax()/MB));
     return sb.toString();
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/Dispatcher.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/Dispatcher.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/Dispatcher.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/Dispatcher.java Wed Apr 29 22:51:27 2009
@@ -19,15 +19,6 @@
  */
 package org.apache.hadoop.hbase.rest;
 
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Map;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -38,8 +29,20 @@
 import org.apache.hadoop.hbase.rest.serializer.RestSerializerFactory;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.InfoServer;
-import org.mortbay.http.NCSARequestLog;
-import org.mortbay.http.SocketListener;
+import org.mortbay.jetty.Connector;
+import org.mortbay.jetty.NCSARequestLog;
+import org.mortbay.jetty.bio.SocketConnector;
+import org.mortbay.jetty.handler.RequestLogHandler;
+import org.mortbay.jetty.webapp.WebAppContext;
+import org.mortbay.thread.QueuedThreadPool;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Map;
 
 /**
  * Servlet implementation class for hbase REST interface. Presumes container
@@ -466,15 +469,30 @@
       printUsageAndExit();
     }
     org.mortbay.jetty.Server webServer = new org.mortbay.jetty.Server();
-    SocketListener listener = new SocketListener();
-    listener.setPort(port);
-    listener.setHost(bindAddress);
-    listener.setMaxThreads(numThreads);
-    webServer.addListener(listener);
+
+    Connector connector = new SocketConnector();
+    connector.setPort(port);
+    connector.setHost(bindAddress);
+
+    QueuedThreadPool pool = new QueuedThreadPool();
+    pool.setMaxThreads(numThreads);
+
+    webServer.addConnector(connector);
+    webServer.setThreadPool(pool);
+
+    WebAppContext wac = new WebAppContext();
+    wac.setContextPath("/");
+    wac.setWar(InfoServer.getWebAppDir("rest"));
+
     NCSARequestLog ncsa = new NCSARequestLog();
     ncsa.setLogLatency(true);
-    webServer.setRequestLog(ncsa);
-    webServer.addWebApplication("/", InfoServer.getWebAppDir("rest"));
+
+    RequestLogHandler rlh = new RequestLogHandler();
+    rlh.setRequestLog(ncsa);
+    rlh.setHandler(wac);
+
+    webServer.addHandler(rlh);
+
     webServer.start();
   }
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/InfoServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/InfoServer.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/InfoServer.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/InfoServer.java Wed Apr 29 22:51:27 2009
@@ -19,10 +19,14 @@
 
 import java.io.IOException;
 import java.net.URL;
+import java.util.Map;
 
 import org.apache.hadoop.http.HttpServer;
-import org.mortbay.http.HttpContext;
-import org.mortbay.http.handler.ResourceHandler;
+import org.mortbay.jetty.handler.ContextHandlerCollection;
+import org.mortbay.jetty.handler.HandlerCollection;
+import org.mortbay.jetty.servlet.Context;
+import org.mortbay.jetty.servlet.DefaultServlet;
+import org.mortbay.jetty.webapp.WebAppContext;
 
 /**
  * Create a Jetty embedded server to answer http requests. The primary goal
@@ -46,23 +50,46 @@
   public InfoServer(String name, String bindAddress, int port, boolean findPort)
   throws IOException {
     super(name, bindAddress, port, findPort);
-    
-    // Set up the context for "/logs/" if "hbase.log.dir" property is defined. 
+
+    HandlerCollection handlers =
+        new ContextHandlerCollection();
+
+    if (name.equals("master")) {
+      // Put up the rest webapp.
+      WebAppContext wac = new WebAppContext();
+      wac.setContextPath("/api");
+      wac.setWar(getWebAppDir("rest"));
+
+      handlers.addHandler(wac);
+    }
+    webServer.addHandler(handlers);
+  }
+
+  protected void addDefaultApps(ContextHandlerCollection parent, String appDir)
+  throws IOException {
+    super.addDefaultApps(parent, appDir);
+    // Must be same as up in hadoop.
+    final String logsContextPath = "/logs";
+    // Now, put my logs in place of hadoops... disable old one first.
+    Context oldLogsContext = null;
+    for (Map.Entry<Context, Boolean> e : defaultContexts.entrySet()) {
+      if (e.getKey().getContextPath().equals(logsContextPath)) {
+        oldLogsContext = e.getKey();
+        break;
+      }
+    }
+    defaultContexts.put(oldLogsContext, Boolean.FALSE);
+    // Now do my logs.
+    // set up the context for "/logs/" if "hadoop.log.dir" property is defined. 
     String logDir = System.getProperty("hbase.log.dir");
     if (logDir != null) {
-      HttpContext logContext = new HttpContext();
-      logContext.setContextPath("/logs/*");
+      Context logContext = new Context(parent, "/logs");
       logContext.setResourceBase(logDir);
-      logContext.addHandler(new ResourceHandler());
-      webServer.addContext(logContext);
-    }
-    
-    if (name.equals("master")) {
-      // Put up the rest webapp.
-      webServer.addWebApplication("/api", getWebAppDir("rest"));
+      logContext.addServlet(DefaultServlet.class, "/");
+      defaultContexts.put(logContext, true);
     }
   }
-  
+
   /**
    * Get the pathname to the <code>path</code> files.
    * @return the pathname as a URL

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestMergeMeta.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestMergeMeta.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestMergeMeta.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestMergeMeta.java Wed Apr 29 22:51:27 2009
@@ -41,4 +41,4 @@
     assertNotNull(dfsCluster);
     HMerge.merge(conf, dfsCluster.getFileSystem(), HConstants.META_TABLE_NAME);
   }
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java Wed Apr 29 22:51:27 2009
@@ -23,18 +23,12 @@
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RawLocalFileSystem;
-import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
 import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -48,24 +42,14 @@
  * Remove after tfile is committed and use the tfile version of this class
  * instead.</p>
  */
-public class TestHFile extends TestCase {
+public class TestHFile extends HBaseTestCase {
   static final Log LOG = LogFactory.getLog(TestHFile.class);
   
   private static String ROOT_DIR =
     System.getProperty("test.build.data", "/tmp/TestHFile");
-  private FileSystem fs;
-  private Configuration conf;
   private final int minBlockSize = 512;
   private static String localFormatter = "%010d";
 
-  @Override
-  public void setUp() {
-    conf = new HBaseConfiguration();
-    RawLocalFileSystem rawLFS = new RawLocalFileSystem();
-    rawLFS.setConf(conf);
-    fs = new LocalFileSystem(rawLFS);
-  }
-
   // write some records into the tfile
   // write them twice
   private int writeSomeRecords(Writer writer, int start, int n)
@@ -233,7 +217,7 @@
    * Make sure the orginals for our compression libs doesn't change on us.
    */
   public void testCompressionOrdinance() {
-    assertTrue(Compression.Algorithm.LZO.ordinal() == 0);
+    //assertTrue(Compression.Algorithm.LZO.ordinal() == 0);
     assertTrue(Compression.Algorithm.GZ.ordinal() == 1);
     assertTrue(Compression.Algorithm.NONE.ordinal() == 2);
   }

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java Wed Apr 29 22:51:27 2009
@@ -34,7 +34,6 @@
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.GzipCodec;
-import org.apache.hadoop.io.compress.LzoCodec;
 
 /**
  *  Set of long-running tests to measure performance of HFile.
@@ -171,9 +170,7 @@
         writer.close();
     } else if ("SequenceFile".equals(fileType)){
         CompressionCodec codec = null;
-        if ("lzo".equals(codecName))
-          codec = new LzoCodec();
-        else if ("gz".equals(codecName))
+        if ("gz".equals(codecName))
           codec = new GzipCodec();
         else if (!"none".equals(codecName))
           throw new IOException("Codec not supported.");

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java?rev=769981&r1=769980&r2=769981&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java Wed Apr 29 22:51:27 2009
@@ -21,42 +21,19 @@
 
 import java.io.IOException;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RawLocalFileSystem;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
  * Test {@link HFileScanner#seekTo(byte[])} and its variants.
  */
-public class TestSeekTo extends TestCase {
-  private static String ROOT_DIR =
-    System.getProperty("test.build.data", "/tmp/TestHFile");
-
-  private HBaseConfiguration conf;
-  private LocalFileSystem fs;
-
-  @Override
-  public void setUp() {
-    conf = new HBaseConfiguration();
-    RawLocalFileSystem rawLFS = new RawLocalFileSystem();
-    rawLFS.setConf(conf);
-    fs = new LocalFileSystem(rawLFS);
-  }
-  private FSDataOutputStream createFSOutput(Path name) throws IOException {
-    if (fs.exists(name)) fs.delete(name, true);
-    FSDataOutputStream fout = fs.create(name);
-    return fout;
-  }
+public class TestSeekTo extends HBaseTestCase {
 
   Path makeNewFile() throws IOException {
-    Path ncTFile = new Path(ROOT_DIR, "basic.hfile");
-    FSDataOutputStream fout = createFSOutput(ncTFile);
+    Path ncTFile = new Path(this.testDir, "basic.hfile");
+    FSDataOutputStream fout = this.fs.create(ncTFile);
     HFile.Writer writer = new HFile.Writer(fout, 40, "none", null);
     // 4 bytes * 3 * 2 for each key/value +
     // 3 for keys, 15 for values = 42 (woot)