You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by br...@apache.org on 2008/02/24 01:19:44 UTC
svn commit: r630550 [1/7] - in /hadoop/hbase/trunk: bin/ conf/
src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/client/
src/java/org/apache/hadoop/hbase/filter/
src/java/org/apache/hadoop/hbase/generated/regionserver/ src/java/org/apa...
Author: bryanduxbury
Date: Sat Feb 23 16:19:34 2008
New Revision: 630550
URL: http://svn.apache.org/viewvc?rev=630550&view=rev
Log:
419 Move RegionServer and related classes into regionserver package
Added:
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CacheFlushListener.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HInternalScannerInterface.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogEdit.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionInterface.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreKey.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreSize.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/LogRollListener.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/RegionUnavailableListener.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteAll.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteFamily.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestGet.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHMemcache.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestLogRolling.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestRegionServerExit.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestSplit.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestTimestamp.java
Removed:
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/CacheFlushListener.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HAbstractScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HInternalScannerInterface.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HLog.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HLogEdit.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HLogKey.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegion.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInterface.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionServer.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStore.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreFile.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LogRollListener.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/RegionUnavailableListener.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/OOMERegionServer.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestCompaction.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestDeleteAll.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestDeleteFamily.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestGet.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestGet2.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestHLog.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestHMemcache.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestHRegion.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestHStoreFile.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestLogRolling.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestRegionServerExit.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestScanner.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestSplit.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestTimestamp.java
Modified:
hadoop/hbase/trunk/bin/hbase
hadoop/hbase/trunk/conf/hbase-default.xml
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMerge.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HScannerInterface.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnection.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/generated/regionserver/regionserver_jsp.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/AddColumn.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyColumn.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionClose.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionOpen.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionStatusChange.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessServerShutdown.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RegionManager.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RowMap.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/ScannerHandler.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MultiRegionTable.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestCompare.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestHBaseCluster.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestInfoServers.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestScannerAPI.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestHTable.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestRegExpRowFilter.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
hadoop/hbase/trunk/src/webapps/regionserver/regionserver.jsp
Modified: hadoop/hbase/trunk/bin/hbase
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/bin/hbase?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/bin/hbase (original)
+++ hadoop/hbase/trunk/bin/hbase Sat Feb 23 16:19:34 2008
@@ -162,7 +162,7 @@
elif [ "$COMMAND" = "master" ] ; then
CLASS='org.apache.hadoop.hbase.master.HMaster'
elif [ "$COMMAND" = "regionserver" ] ; then
- CLASS='org.apache.hadoop.hbase.HRegionServer'
+ CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer'
elif [ "$COMMAND" = "rest" ] ; then
CLASS='org.apache.hadoop.hbase.rest.Dispatcher'
elif [ "$COMMAND" = "thrift" ] ; then
Modified: hadoop/hbase/trunk/conf/hbase-default.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/conf/hbase-default.xml?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/conf/hbase-default.xml (original)
+++ hadoop/hbase/trunk/conf/hbase-default.xml Sat Feb 23 16:19:34 2008
@@ -72,7 +72,7 @@
</property>
<property>
<name>hbase.regionserver.class</name>
- <value>org.apache.hadoop.hbase.HRegionInterface</value>
+ <value>org.apache.hadoop.hbase.regionserver.HRegionInterface</value>
<description>An interface that is assignable to HRegionInterface. Used in HClient for
opening proxy to remote region server.
</description>
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java Sat Feb 23 16:19:34 2008
@@ -20,6 +20,7 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
/**
* HConstants holds a bunch of HBase-related constants
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMerge.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMerge.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMerge.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMerge.java Sat Feb 23 16:19:34 2008
@@ -37,6 +37,11 @@
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.client.HConnectionManager;
+
+import org.apache.hadoop.hbase.regionserver.HLog;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
+
/**
* A non-instantiable class that has a static method capable of compacting
* a table by merging adjacent regions that have grown too small.
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HScannerInterface.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HScannerInterface.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HScannerInterface.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HScannerInterface.java Sat Feb 23 16:19:34 2008
@@ -26,6 +26,7 @@
import java.util.SortedMap;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
/**
* HScannerInterface iterates through a set of rows. It's implemented by
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java Sat Feb 23 16:19:34 2008
@@ -32,6 +32,8 @@
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
+
/**
* This class creates a single process HBase cluster. One thread is created for
* a master and one per region server.
@@ -159,7 +161,7 @@
while (regionServerThread.isAlive()) {
try {
LOG.info("Waiting on " +
- regionServerThread.getRegionServer().serverInfo.toString());
+ regionServerThread.getRegionServer().getServerInfo().toString());
regionServerThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java Sat Feb 23 16:19:34 2008
@@ -41,10 +41,11 @@
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
-import org.apache.hadoop.hbase.HRegionInterface;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HStoreKey;
+
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
/**
* Provides administrative functions for HBase
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnection.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnection.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnection.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnection.java Sat Feb 23 16:19:34 2008
@@ -28,7 +28,8 @@
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HServerAddress;
-import org.apache.hadoop.hbase.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
+
/**
*
*/
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java Sat Feb 23 16:19:34 2008
@@ -42,19 +42,18 @@
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HRegionInterface;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.LocalHBaseCluster;
-import org.apache.hadoop.hbase.HStoreKey;
-import org.apache.hadoop.hbase.HRegionInterface;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.NoServerForRegionException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
+
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
/**
* A non-instantiable class that manages connections to multiple tables in
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java Sat Feb 23 16:19:34 2008
@@ -47,14 +47,13 @@
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HScannerInterface;
-import org.apache.hadoop.hbase.HRegionInterface;
-import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.NotServingRegionException;
-import org.apache.hadoop.hbase.HRegionInterface;
-import org.apache.hadoop.hbase.HStoreKey;
+
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
/**
* Used to communicate with a single HBase table
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java Sat Feb 23 16:19:34 2008
@@ -35,7 +35,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.hbase.HLogEdit;
+import org.apache.hadoop.hbase.regionserver.HLogEdit;
/**
* Implementation of RowFilterInterface that can filter by rowkey regular
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/generated/regionserver/regionserver_jsp.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/generated/regionserver/regionserver_jsp.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/generated/regionserver/regionserver_jsp.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/generated/regionserver/regionserver_jsp.java Sat Feb 23 16:19:34 2008
@@ -6,8 +6,8 @@
import java.util.*;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.VersionInfo;
-import org.apache.hadoop.hbase.HRegionServer;
-import org.apache.hadoop.hbase.HRegion;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.HRegionInfo;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java Sat Feb 23 16:19:34 2008
@@ -31,7 +31,7 @@
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HScannerInterface;
-import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.Shell;
import org.apache.hadoop.hbase.filter.RowFilterInterface;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java Sat Feb 23 16:19:34 2008
@@ -30,10 +30,11 @@
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.ReflectionUtils;
+
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
/**
* A Writable Map.
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java Sat Feb 23 16:19:34 2008
@@ -25,7 +25,7 @@
import java.util.Map;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java Sat Feb 23 16:19:34 2008
@@ -21,7 +21,7 @@
import java.io.IOException;
-import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.OutputCollector;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java Sat Feb 23 16:19:34 2008
@@ -38,7 +38,7 @@
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HScannerInterface;
-import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.client.HTable;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java Sat Feb 23 16:19:34 2008
@@ -22,7 +22,7 @@
import java.io.IOException;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/AddColumn.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/AddColumn.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/AddColumn.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/AddColumn.java Sat Feb 23 16:19:34 2008
@@ -23,7 +23,7 @@
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
import org.apache.hadoop.io.Text;
/** Instantiated to add a column family to a table */
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java Sat Feb 23 16:19:34 2008
@@ -32,8 +32,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Chore;
-import org.apache.hadoop.hbase.HRegion;
-import org.apache.hadoop.hbase.HRegionInterface;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.io.Text;
@@ -44,10 +42,14 @@
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HStoreFile;
-import org.apache.hadoop.hbase.HStore;
-import org.apache.hadoop.hbase.HLog;
import org.apache.hadoop.hbase.io.BatchUpdate;
+
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
+import org.apache.hadoop.hbase.regionserver.HStore;
+import org.apache.hadoop.hbase.regionserver.HLog;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
+
/**
* Base HRegion scanner class. Holds utilty common to <code>ROOT</code> and
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java Sat Feb 23 16:19:34 2008
@@ -25,7 +25,7 @@
import java.util.Map;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.Text;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java Sat Feb 23 16:19:34 2008
@@ -22,7 +22,7 @@
import java.io.IOException;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.util.Writables;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java Sat Feb 23 16:19:34 2008
@@ -23,9 +23,9 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionInterface;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HStoreFile;
+import org.apache.hadoop.hbase.regionserver.HStoreFile;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
/** Instantiated to remove a column family from a table */
class DeleteColumn extends ColumnOperation {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java Sat Feb 23 16:19:34 2008
@@ -62,23 +62,25 @@
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.Leases;
import org.apache.hadoop.hbase.HServerAddress;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.HRegionInterface;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HServerLoad;
-import org.apache.hadoop.hbase.HRegion;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.HMsg;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.LocalHBaseCluster;
-import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.LeaseListener;
+
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
/**
* HMaster is the "master server" for a HBase.
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyColumn.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyColumn.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyColumn.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyColumn.java Sat Feb 23 16:19:34 2008
@@ -22,7 +22,7 @@
import java.util.Map;
import java.io.IOException;
import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.io.Text;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionClose.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionClose.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionClose.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionClose.java Sat Feb 23 16:19:34 2008
@@ -21,7 +21,7 @@
import java.io.IOException;
-import org.apache.hadoop.hbase.HRegion;
+import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionOpen.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionOpen.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionOpen.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionOpen.java Sat Feb 23 16:19:34 2008
@@ -25,7 +25,7 @@
import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.util.Writables;
-import org.apache.hadoop.hbase.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionStatusChange.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionStatusChange.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionStatusChange.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionStatusChange.java Sat Feb 23 16:19:34 2008
@@ -22,7 +22,7 @@
import java.io.IOException;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.io.Text;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessServerShutdown.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessServerShutdown.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessServerShutdown.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessServerShutdown.java Sat Feb 23 16:19:34 2008
@@ -31,13 +31,14 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionInterface;
-import org.apache.hadoop.hbase.HRegion;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.util.Writables;
-import org.apache.hadoop.hbase.HLog;
+
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HLog;
/**
* Instantiated when a server's lease has expired, meaning it has crashed.
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RegionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RegionManager.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RegionManager.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RegionManager.java Sat Feb 23 16:19:34 2008
@@ -45,8 +45,8 @@
import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.HServerLoad;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegion;
-import org.apache.hadoop.hbase.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
import org.apache.hadoop.hbase.HMsg;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.io.BatchUpdate;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RowMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RowMap.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RowMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RowMap.java Sat Feb 23 16:19:34 2008
@@ -29,7 +29,7 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
/*
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java Sat Feb 23 16:19:34 2008
@@ -23,12 +23,13 @@
import java.util.HashSet;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HRegion;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionInterface;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.io.Text;
+
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
/**
* Instantiated to delete a table
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java?rev=630550&r1=630549&r2=630550&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java Sat Feb 23 16:19:34 2008
@@ -28,7 +28,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionInterface;
+import org.apache.hadoop.hbase.regionserver.HRegionInterface;
import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
Added: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CacheFlushListener.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CacheFlushListener.java?rev=630550&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CacheFlushListener.java (added)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CacheFlushListener.java Sat Feb 23 16:19:34 2008
@@ -0,0 +1,36 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver;
+
+/**
+ * Implementors of this interface want to be notified when an HRegion
+ * determines that a cache flush is needed. A CacheFlushListener (or null)
+ * must be passed to the HRegion constructor.
+ */
+public interface CacheFlushListener {
+
+ /**
+ * Tell the listener the cache needs to be flushed.
+ *
+ * @param region the HRegion requesting the cache flush
+ */
+ void flushRequested(HRegion region);
+}
Added: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java?rev=630550&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java (added)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java Sat Feb 23 16:19:34 2008
@@ -0,0 +1,280 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.Vector;
+import java.util.Map.Entry;
+import java.util.regex.Pattern;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.io.Text;
+
+/**
+ * Abstract base class that implements the HScannerInterface.
+ * Used by the concrete HMemcacheScanner and HStoreScanners
+ */
+public abstract class HAbstractScanner implements HInternalScannerInterface {
+ final Log LOG = LogFactory.getLog(this.getClass().getName());
+
+ // Pattern to determine if a column key is a regex
+ static Pattern isRegexPattern =
+ Pattern.compile("^.*[\\\\+|^&*$\\[\\]\\}{)(]+.*$");
+
+ /** The kind of match we are doing on a column: */
+ private static enum MATCH_TYPE {
+ /** Just check the column family name */
+ FAMILY_ONLY,
+ /** Column family + matches regex */
+ REGEX,
+ /** Literal matching */
+ SIMPLE
+ }
+
+ /**
+ * This class provides column matching functions that are more sophisticated
+ * than a simple string compare. There are three types of matching:
+ * <ol>
+ * <li>Match on the column family name only</li>
+ * <li>Match on the column family + column key regex</li>
+ * <li>Simple match: compare column family + column key literally</li>
+ * </ul>
+ */
+ private static class ColumnMatcher {
+ private boolean wildCardmatch;
+ private MATCH_TYPE matchType;
+ private Text family;
+ private Pattern columnMatcher;
+ private Text col;
+
+ ColumnMatcher(final Text col) throws IOException {
+ Text qualifier = HStoreKey.extractQualifier(col);
+ try {
+ if(qualifier == null || qualifier.getLength() == 0) {
+ this.matchType = MATCH_TYPE.FAMILY_ONLY;
+ this.family = HStoreKey.extractFamily(col).toText();
+ this.wildCardmatch = true;
+ } else if(isRegexPattern.matcher(qualifier.toString()).matches()) {
+ this.matchType = MATCH_TYPE.REGEX;
+ this.columnMatcher = Pattern.compile(col.toString());
+ this.wildCardmatch = true;
+ } else {
+ this.matchType = MATCH_TYPE.SIMPLE;
+ this.col = col;
+ this.wildCardmatch = false;
+ }
+ } catch(Exception e) {
+ throw new IOException("Column: " + col + ": " + e.getMessage());
+ }
+ }
+
+ /** Matching method */
+ boolean matches(Text c) throws IOException {
+ if(this.matchType == MATCH_TYPE.SIMPLE) {
+ return c.equals(this.col);
+ } else if(this.matchType == MATCH_TYPE.FAMILY_ONLY) {
+ return HStoreKey.extractFamily(c).equals(this.family);
+ } else if(this.matchType == MATCH_TYPE.REGEX) {
+ return this.columnMatcher.matcher(c.toString()).matches();
+ } else {
+ throw new IOException("Invalid match type: " + this.matchType);
+ }
+ }
+
+ boolean isWildCardMatch() {
+ return this.wildCardmatch;
+ }
+ }
+
+ protected TreeMap<Text, Vector<ColumnMatcher>> okCols; // Holds matchers for each column family
+
+ protected boolean scannerClosed = false; // True when scanning is done
+
+ // Keys retrieved from the sources
+ protected HStoreKey keys[];
+ // Values that correspond to those keys
+ protected byte [][] vals;
+
+ protected long timestamp; // The timestamp to match entries against
+ private boolean wildcardMatch;
+ private boolean multipleMatchers;
+
+ /** Constructor for abstract base class */
+ HAbstractScanner(long timestamp, Text[] targetCols) throws IOException {
+ this.timestamp = timestamp;
+ this.wildcardMatch = false;
+ this.multipleMatchers = false;
+ this.okCols = new TreeMap<Text, Vector<ColumnMatcher>>();
+ for(int i = 0; i < targetCols.length; i++) {
+ Text family = HStoreKey.extractFamily(targetCols[i]).toText();
+ Vector<ColumnMatcher> matchers = okCols.get(family);
+ if(matchers == null) {
+ matchers = new Vector<ColumnMatcher>();
+ }
+ ColumnMatcher matcher = new ColumnMatcher(targetCols[i]);
+ if (matcher.isWildCardMatch()) {
+ this.wildcardMatch = true;
+ }
+ matchers.add(matcher);
+ if (matchers.size() > 1) {
+ this.multipleMatchers = true;
+ }
+ okCols.put(family, matchers);
+ }
+ }
+
+ /**
+ * For a particular column i, find all the matchers defined for the column.
+ * Compare the column family and column key using the matchers. The first one
+ * that matches returns true. If no matchers are successful, return false.
+ *
+ * @param i index into the keys array
+ * @return true - if any of the matchers for the column match the column family
+ * and the column key.
+ *
+ * @throws IOException
+ */
+ boolean columnMatch(int i) throws IOException {
+ Text column = keys[i].getColumn();
+ Vector<ColumnMatcher> matchers =
+ okCols.get(HStoreKey.extractFamily(column));
+ if(matchers == null) {
+ return false;
+ }
+ for(int m = 0; m < matchers.size(); m++) {
+ if(matchers.get(m).matches(column)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * If the user didn't want to start scanning at the first row, this method
+ * seeks to the requested row.
+ */
+ abstract boolean findFirstRow(int i, Text firstRow) throws IOException;
+
+ /** The concrete implementations provide a mechanism to find the next set of values */
+ abstract boolean getNext(int i) throws IOException;
+
+ /** Mechanism used by concrete implementation to shut down a particular scanner */
+ abstract void closeSubScanner(int i);
+
+ /** {@inheritDoc} */
+ public boolean isWildcardScanner() {
+ return this.wildcardMatch;
+ }
+
+ /** {@inheritDoc} */
+ public boolean isMultipleMatchScanner() {
+ return this.multipleMatchers;
+ }
+
+ /**
+ * Get the next set of values for this scanner.
+ *
+ * @param key The key that matched
+ * @param results All the results for <code>key</code>
+ * @return true if a match was found
+ * @throws IOException
+ *
+ * @see org.apache.hadoop.hbase.HScannerInterface#next(org.apache.hadoop.hbase.HStoreKey, java.util.SortedMap)
+ */
+ public boolean next(HStoreKey key, SortedMap<Text, byte []> results)
+ throws IOException {
+ if (scannerClosed) {
+ return false;
+ }
+ // Find the next row label (and timestamp)
+ Text chosenRow = null;
+ long chosenTimestamp = -1;
+ for(int i = 0; i < keys.length; i++) {
+ if((keys[i] != null)
+ && (columnMatch(i))
+ && (keys[i].getTimestamp() <= this.timestamp)
+ && ((chosenRow == null)
+ || (keys[i].getRow().compareTo(chosenRow) < 0)
+ || ((keys[i].getRow().compareTo(chosenRow) == 0)
+ && (keys[i].getTimestamp() > chosenTimestamp)))) {
+ chosenRow = new Text(keys[i].getRow());
+ chosenTimestamp = keys[i].getTimestamp();
+ }
+ }
+
+ // Grab all the values that match this row/timestamp
+ boolean insertedItem = false;
+ if(chosenRow != null) {
+ key.setRow(chosenRow);
+ key.setVersion(chosenTimestamp);
+ key.setColumn(new Text(""));
+
+ for(int i = 0; i < keys.length; i++) {
+ // Fetch the data
+ while((keys[i] != null)
+ && (keys[i].getRow().compareTo(chosenRow) == 0)) {
+
+ // If we are doing a wild card match or there are multiple matchers
+ // per column, we need to scan all the older versions of this row
+ // to pick up the rest of the family members
+
+ if(!wildcardMatch
+ && !multipleMatchers
+ && (keys[i].getTimestamp() != chosenTimestamp)) {
+ break;
+ }
+
+ if(columnMatch(i)) {
+ // We only want the first result for any specific family member
+ if(!results.containsKey(keys[i].getColumn())) {
+ results.put(new Text(keys[i].getColumn()), vals[i]);
+ insertedItem = true;
+ }
+ }
+
+ if(!getNext(i)) {
+ closeSubScanner(i);
+ }
+ }
+
+ // Advance the current scanner beyond the chosen row, to
+ // a valid timestamp, so we're ready next time.
+
+ while((keys[i] != null)
+ && ((keys[i].getRow().compareTo(chosenRow) <= 0)
+ || (keys[i].getTimestamp() > this.timestamp)
+ || (! columnMatch(i)))) {
+ getNext(i);
+ }
+ }
+ }
+ return insertedItem;
+ }
+
+ /** {@inheritDoc} */
+ public Iterator<Entry<HStoreKey, SortedMap<Text, byte[]>>> iterator() {
+ throw new UnsupportedOperationException("Unimplemented serverside. " +
+ "next(HStoreKey, StortedMap(...) is more efficient");
+ }
+}
Added: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HInternalScannerInterface.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HInternalScannerInterface.java?rev=630550&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HInternalScannerInterface.java (added)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HInternalScannerInterface.java Sat Feb 23 16:19:34 2008
@@ -0,0 +1,37 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import org.apache.hadoop.hbase.HScannerInterface;
+
+/**
+ * Internally, we need to be able to determine if the scanner is doing wildcard
+ * column matches (when only a column family is specified or if a column regex
+ * is specified) or if multiple members of the same column family were
+ * specified. If so, we need to ignore the timestamp to ensure that we get all
+ * the family members, as they may have been last updated at different times.
+ */
+public interface HInternalScannerInterface extends HScannerInterface {
+ /** @return true if the scanner is matching a column family or regex */
+ public boolean isWildcardScanner();
+
+ /** @return true if the scanner is matching multiple column family members */
+ public boolean isMultipleMatchScanner();
+}
\ No newline at end of file
Added: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java?rev=630550&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java (added)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java Sat Feb 23 16:19:34 2008
@@ -0,0 +1,635 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.SequenceFile.Reader;
+
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+
+/**
+ * HLog stores all the edits to the HStore.
+ *
+ * It performs logfile-rolling, so external callers are not aware that the
+ * underlying file is being rolled.
+ *
+ * <p>
+ * A single HLog is used by several HRegions simultaneously.
+ *
+ * <p>
+ * Each HRegion is identified by a unique long <code>int</code>. HRegions do
+ * not need to declare themselves before using the HLog; they simply include
+ * their HRegion-id in the <code>append</code> or
+ * <code>completeCacheFlush</code> calls.
+ *
+ * <p>
+ * An HLog consists of multiple on-disk files, which have a chronological order.
+ * As data is flushed to other (better) on-disk structures, the log becomes
+ * obsolete. We can destroy all the log messages for a given HRegion-id up to
+ * the most-recent CACHEFLUSH message from that HRegion.
+ *
+ * <p>
+ * It's only practical to delete entire files. Thus, we delete an entire on-disk
+ * file F when all of the messages in F have a log-sequence-id that's older
+ * (smaller) than the most-recent CACHEFLUSH message for every HRegion that has
+ * a message in F.
+ *
+ * <p>
+ * Synchronized methods can never execute in parallel. However, between the
+ * start of a cache flush and the completion point, appends are allowed but log
+ * rolling is not. To prevent log rolling taking place during this period, a
+ * separate reentrant lock is used.
+ *
+ * <p>
+ * TODO: Vuk Ercegovac also pointed out that keeping HBase HRegion edit logs in
+ * HDFS is currently flawed. HBase writes edits to logs and to a memcache. The
+ * 'atomic' write to the log is meant to serve as insurance against abnormal
+ * RegionServer exit: on startup, the log is rerun to reconstruct an HRegion's
+ * last wholesome state. But files in HDFS do not 'exist' until they are cleanly
+ * closed -- something that will not happen if RegionServer exits without
+ * running its 'close'.
+ */
+public class HLog implements HConstants {
+ private static final Log LOG = LogFactory.getLog(HLog.class);
+ private static final String HLOG_DATFILE = "hlog.dat.";
+ static final Text METACOLUMN = new Text("METACOLUMN:");
+ static final Text METAROW = new Text("METAROW");
+ final FileSystem fs;
+ final Path dir;
+ final Configuration conf;
+ final LogRollListener listener;
+ final long threadWakeFrequency;
+ private final int maxlogentries;
+
+ /*
+ * Current log file.
+ */
+ SequenceFile.Writer writer;
+
+ /*
+ * Map of all log files but the current one.
+ */
+ final SortedMap<Long, Path> outputfiles =
+ Collections.synchronizedSortedMap(new TreeMap<Long, Path>());
+
+ /*
+ * Map of region to last sequence/edit id.
+ */
+ final Map<Text, Long> lastSeqWritten = new ConcurrentHashMap<Text, Long>();
+
+ volatile boolean closed = false;
+
+ private final Integer sequenceLock = new Integer(0);
+ volatile long logSeqNum = 0;
+
+ volatile long filenum = 0;
+ volatile long old_filenum = -1;
+
+ volatile int numEntries = 0;
+
+ // This lock prevents starting a log roll during a cache flush.
+ // synchronized is insufficient because a cache flush spans two method calls.
+ private final Lock cacheFlushLock = new ReentrantLock();
+
+ // We synchronize on updateLock to prevent updates and to prevent a log roll
+ // during an update
+ private final Integer updateLock = new Integer(0);
+
+ /**
+ * Create an edit log at the given <code>dir</code> location.
+ *
+ * You should never have to load an existing log. If there is a log at
+ * startup, it should have already been processed and deleted by the time the
+ * HLog object is started up.
+ *
+ * @param fs
+ * @param dir
+ * @param conf
+ * @param listener
+ * @throws IOException
+ */
+ public HLog(final FileSystem fs, final Path dir, final Configuration conf,
+ final LogRollListener listener) throws IOException {
+ this.fs = fs;
+ this.dir = dir;
+ this.conf = conf;
+ this.listener = listener;
+ this.threadWakeFrequency = conf.getLong(THREAD_WAKE_FREQUENCY, 10 * 1000);
+ this.maxlogentries =
+ conf.getInt("hbase.regionserver.maxlogentries", 30 * 1000);
+ if (fs.exists(dir)) {
+ throw new IOException("Target HLog directory already exists: " + dir);
+ }
+ fs.mkdirs(dir);
+ rollWriter();
+ }
+
+ /**
+ * Get the compression type for the hlog files.
+ * @param c Configuration to use.
+ * @return the kind of compression to use
+ */
+ private static CompressionType getCompressionType(final Configuration c) {
+ String name = c.get("hbase.io.seqfile.compression.type");
+ return name == null? CompressionType.NONE: CompressionType.valueOf(name);
+ }
+
+ /**
+ * Called by HRegionServer when it opens a new region to ensure that log
+ * sequence numbers are always greater than the latest sequence number of the
+ * region being brought on-line.
+ *
+ * @param newvalue We'll set log edit/sequence number to this value if it
+ * is greater than the current value.
+ */
+ void setSequenceNumber(long newvalue) {
+ synchronized (sequenceLock) {
+ if (newvalue > logSeqNum) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("changing sequence number from " + logSeqNum + " to " +
+ newvalue);
+ }
+ logSeqNum = newvalue;
+ }
+ }
+ }
+
+ /**
+ * Roll the log writer. That is, start writing log messages to a new file.
+ *
+ * Because a log cannot be rolled during a cache flush, and a cache flush
+ * spans two method calls, a special lock needs to be obtained so that a cache
+ * flush cannot start when the log is being rolled and the log cannot be
+ * rolled during a cache flush.
+ *
+ * <p>Note that this method cannot be synchronized because it is possible that
+ * startCacheFlush runs, obtaining the cacheFlushLock, then this method could
+ * start which would obtain the lock on this but block on obtaining the
+ * cacheFlushLock and then completeCacheFlush could be called which would wait
+ * for the lock on this and consequently never release the cacheFlushLock
+ *
+ * @throws IOException
+ */
+ void rollWriter() throws IOException {
+ this.cacheFlushLock.lock();
+ try {
+ if (closed) {
+ return;
+ }
+ synchronized (updateLock) {
+ if (this.writer != null) {
+ // Close the current writer, get a new one.
+ this.writer.close();
+ Path p = computeFilename(old_filenum);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Closing current log writer " + p.toString() +
+ " to get a new one");
+ }
+ if (filenum > 0) {
+ synchronized (this.sequenceLock) {
+ this.outputfiles.put(Long.valueOf(this.logSeqNum - 1), p);
+ }
+ }
+ }
+ old_filenum = filenum;
+ filenum = System.currentTimeMillis();
+ Path newPath = computeFilename(filenum);
+ this.writer = SequenceFile.createWriter(this.fs, this.conf, newPath,
+ HLogKey.class, HLogEdit.class, getCompressionType(this.conf));
+ LOG.info("new log writer created at " + newPath);
+
+ // Can we delete any of the old log files?
+ if (this.outputfiles.size() > 0) {
+ if (this.lastSeqWritten.size() <= 0) {
+ LOG.debug("Last sequence written is empty. Deleting all old hlogs");
+ // If so, then no new writes have come in since all regions were
+ // flushed (and removed from the lastSeqWritten map). Means can
+ // remove all but currently open log file.
+ for (Map.Entry<Long, Path> e : this.outputfiles.entrySet()) {
+ deleteLogFile(e.getValue(), e.getKey());
+ }
+ this.outputfiles.clear();
+ } else {
+ // Get oldest edit/sequence id. If logs are older than this id,
+ // then safe to remove.
+ Long oldestOutstandingSeqNum =
+ Collections.min(this.lastSeqWritten.values());
+ // Get the set of all log files whose final ID is older than or
+ // equal to the oldest pending region operation
+ TreeSet<Long> sequenceNumbers =
+ new TreeSet<Long>(this.outputfiles.headMap(
+ (Long.valueOf(oldestOutstandingSeqNum.longValue() + 1L))).keySet());
+ // Now remove old log files (if any)
+ if (LOG.isDebugEnabled()) {
+ // Find region associated with oldest key -- helps debugging.
+ Text oldestRegion = null;
+ for (Map.Entry<Text, Long> e: this.lastSeqWritten.entrySet()) {
+ if (e.getValue().longValue() == oldestOutstandingSeqNum.longValue()) {
+ oldestRegion = e.getKey();
+ break;
+ }
+ }
+ if (LOG.isDebugEnabled() && sequenceNumbers.size() > 0) {
+ LOG.debug("Found " + sequenceNumbers.size() +
+ " logs to remove " +
+ "using oldest outstanding seqnum of " +
+ oldestOutstandingSeqNum + " from region " + oldestRegion);
+ }
+ }
+ if (sequenceNumbers.size() > 0) {
+ for (Long seq : sequenceNumbers) {
+ deleteLogFile(this.outputfiles.remove(seq), seq);
+ }
+ }
+ }
+ }
+ this.numEntries = 0;
+ }
+ } finally {
+ this.cacheFlushLock.unlock();
+ }
+ }
+
+ private void deleteLogFile(final Path p, final Long seqno) throws IOException {
+ LOG.info("removing old log file " + p.toString() +
+ " whose highest sequence/edit id is " + seqno);
+ this.fs.delete(p);
+ }
+
+ /**
+ * This is a convenience method that computes a new filename with a given
+ * file-number.
+ */
+ Path computeFilename(final long fn) {
+ return new Path(dir, HLOG_DATFILE + new Long(fn).toString());
+ }
+
+ /**
+ * Shut down the log and delete the log directory
+ *
+ * @throws IOException
+ */
+ public void closeAndDelete() throws IOException {
+ close();
+ fs.delete(dir);
+ }
+
+ /**
+ * Shut down the log.
+ *
+ * @throws IOException
+ */
+ void close() throws IOException {
+ cacheFlushLock.lock();
+ try {
+ synchronized (updateLock) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("closing log writer in " + this.dir.toString());
+ }
+ this.writer.close();
+ this.closed = true;
+ }
+ } finally {
+ cacheFlushLock.unlock();
+ }
+ }
+
+ /**
+ * Append a set of edits to the log. Log edits are keyed by regionName,
+ * rowname, and log-sequence-id.
+ *
+ * Later, if we sort by these keys, we obtain all the relevant edits for a
+ * given key-range of the HRegion (TODO). Any edits that do not have a
+ * matching {@link HConstants#COMPLETE_CACHEFLUSH} message can be discarded.
+ *
+ * <p>
+ * Logs cannot be restarted once closed, or once the HLog process dies. Each
+ * time the HLog starts, it must create a new log. This means that other
+ * systems should process the log appropriately upon each startup (and prior
+ * to initializing HLog).
+ *
+ * synchronized prevents appends during the completion of a cache flush or for
+ * the duration of a log roll.
+ *
+ * @param regionName
+ * @param tableName
+ * @param row
+ * @param columns
+ * @param timestamp
+ * @throws IOException
+ */
+ void append(Text regionName, Text tableName,
+ TreeMap<HStoreKey, byte[]> edits) throws IOException {
+
+ if (closed) {
+ throw new IOException("Cannot append; log is closed");
+ }
+ synchronized (updateLock) {
+ long seqNum[] = obtainSeqNum(edits.size());
+ // The 'lastSeqWritten' map holds the sequence number of the oldest
+ // write for each region. When the cache is flushed, the entry for the
+ // region being flushed is removed if the sequence number of the flush
+ // is greater than or equal to the value in lastSeqWritten.
+ if (!this.lastSeqWritten.containsKey(regionName)) {
+ this.lastSeqWritten.put(regionName, Long.valueOf(seqNum[0]));
+ }
+ int counter = 0;
+ for (Map.Entry<HStoreKey, byte[]> es : edits.entrySet()) {
+ HStoreKey key = es.getKey();
+ HLogKey logKey =
+ new HLogKey(regionName, tableName, key.getRow(), seqNum[counter++]);
+ HLogEdit logEdit =
+ new HLogEdit(key.getColumn(), es.getValue(), key.getTimestamp());
+ this.writer.append(logKey, logEdit);
+ this.numEntries++;
+ }
+ }
+ if (this.numEntries > this.maxlogentries) {
+ if (listener != null) {
+ listener.logRollRequested();
+ }
+ }
+ }
+
+ /** @return How many items have been added to the log */
+ int getNumEntries() {
+ return numEntries;
+ }
+
+ /**
+ * Obtain a log sequence number.
+ */
+ private long obtainSeqNum() {
+ long value;
+ synchronized (sequenceLock) {
+ value = logSeqNum++;
+ }
+ return value;
+ }
+
+ /** @return the number of log files in use */
+ int getNumLogFiles() {
+ return outputfiles.size();
+ }
+
+ /**
+ * Obtain a specified number of sequence numbers
+ *
+ * @param num number of sequence numbers to obtain
+ * @return array of sequence numbers
+ */
+ private long[] obtainSeqNum(int num) {
+ long[] results = new long[num];
+ synchronized (this.sequenceLock) {
+ for (int i = 0; i < num; i++) {
+ results[i] = this.logSeqNum++;
+ }
+ }
+ return results;
+ }
+
+ /**
+ * By acquiring a log sequence ID, we can allow log messages to continue while
+ * we flush the cache.
+ *
+ * Acquire a lock so that we do not roll the log between the start and
+ * completion of a cache-flush. Otherwise the log-seq-id for the flush will
+ * not appear in the correct logfile.
+ *
+ * @return sequence ID to pass {@link #completeCacheFlush(Text, Text, long)}
+ * @see #completeCacheFlush(Text, Text, long)
+ * @see #abortCacheFlush()
+ */
+ long startCacheFlush() {
+ this.cacheFlushLock.lock();
+ return obtainSeqNum();
+ }
+
+ /**
+ * Complete the cache flush
+ *
+ * Protected by cacheFlushLock
+ *
+ * @param regionName
+ * @param tableName
+ * @param logSeqId
+ * @throws IOException
+ */
+ void completeCacheFlush(final Text regionName, final Text tableName,
+ final long logSeqId) throws IOException {
+
+ try {
+ if (this.closed) {
+ return;
+ }
+ synchronized (updateLock) {
+ this.writer.append(new HLogKey(regionName, tableName, HLog.METAROW, logSeqId),
+ new HLogEdit(HLog.METACOLUMN, HLogEdit.completeCacheFlush.get(),
+ System.currentTimeMillis()));
+ this.numEntries++;
+ Long seq = this.lastSeqWritten.get(regionName);
+ if (seq != null && logSeqId >= seq.longValue()) {
+ this.lastSeqWritten.remove(regionName);
+ }
+ }
+ } finally {
+ this.cacheFlushLock.unlock();
+ }
+ }
+
+ /**
+ * Abort a cache flush.
+ * Call if the flush fails. Note that the only recovery for an aborted flush
+ * currently is a restart of the regionserver so the snapshot content dropped
+ * by the failure gets restored to the memcache.
+ */
+ void abortCacheFlush() {
+ this.cacheFlushLock.unlock();
+ }
+
+ /**
+ * Split up a bunch of log files, that are no longer being written to, into
+ * new files, one per region. Delete the old log files when finished.
+ *
+ * @param rootDir qualified root directory of the HBase instance
+ * @param srcDir Directory of log files to split: e.g.
+ * <code>${ROOTDIR}/log_HOST_PORT</code>
+ * @param fs FileSystem
+ * @param conf HBaseConfiguration
+ * @throws IOException
+ */
+ public static void splitLog(Path rootDir, Path srcDir, FileSystem fs,
+ Configuration conf) throws IOException {
+ Path logfiles[] = fs.listPaths(new Path[] { srcDir });
+ LOG.info("splitting " + logfiles.length + " log(s) in " +
+ srcDir.toString());
+ Map<Text, SequenceFile.Writer> logWriters =
+ new HashMap<Text, SequenceFile.Writer>();
+ try {
+ for (int i = 0; i < logfiles.length; i++) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Splitting " + i + " of " + logfiles.length + ": " +
+ logfiles[i]);
+ }
+ // Check for empty file.
+ if (fs.getFileStatus(logfiles[i]).getLen() <= 0) {
+ LOG.info("Skipping " + logfiles[i].toString() +
+ " because zero length");
+ continue;
+ }
+ HLogKey key = new HLogKey();
+ HLogEdit val = new HLogEdit();
+ SequenceFile.Reader in = new SequenceFile.Reader(fs, logfiles[i], conf);
+ try {
+ int count = 0;
+ for (; in.next(key, val); count++) {
+ Text tableName = key.getTablename();
+ Text regionName = key.getRegionName();
+ SequenceFile.Writer w = logWriters.get(regionName);
+ if (w == null) {
+ Path logfile = new Path(
+ HRegion.getRegionDir(
+ HTableDescriptor.getTableDir(rootDir, tableName),
+ HRegionInfo.encodeRegionName(regionName)
+ ),
+ HREGION_OLDLOGFILE_NAME
+ );
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Creating new log file writer for path " + logfile +
+ "; map content " + logWriters.toString());
+ }
+ w = SequenceFile.createWriter(fs, conf, logfile, HLogKey.class,
+ HLogEdit.class, getCompressionType(conf));
+ // Use copy of regionName; regionName object is reused inside in
+ // HStoreKey.getRegionName so its content changes as we iterate.
+ logWriters.put(new Text(regionName), w);
+ }
+ if (count % 10000 == 0 && count > 0 && LOG.isDebugEnabled()) {
+ LOG.debug("Applied " + count + " edits");
+ }
+ w.append(key, val);
+ }
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Applied " + count + " total edits");
+ }
+ } finally {
+ in.close();
+ }
+ }
+ } finally {
+ for (SequenceFile.Writer w : logWriters.values()) {
+ w.close();
+ }
+ }
+
+ if (fs.exists(srcDir)) {
+ if (!fs.delete(srcDir)) {
+ LOG.error("Cannot delete: " + srcDir);
+ if (!FileUtil.fullyDelete(new File(srcDir.toString()))) {
+ throw new IOException("Cannot delete: " + srcDir);
+ }
+ }
+ }
+ LOG.info("log file splitting completed for " + srcDir.toString());
+ }
+
+ private static void usage() {
+ System.err.println("Usage: java org.apache.hbase.HLog" +
+ " {--dump <logfile>... | --split <logdir>...}");
+ }
+
+ /**
+ * Pass one or more log file names and it will either dump out a text version
+ * on <code>stdout</code> or split the specified log files.
+ *
+ * @param args
+ * @throws IOException
+ */
+ public static void main(String[] args) throws IOException {
+ if (args.length < 2) {
+ usage();
+ System.exit(-1);
+ }
+ boolean dump = true;
+ if (args[0].compareTo("--dump") != 0) {
+ if (args[0].compareTo("--split") == 0) {
+ dump = false;
+
+ } else {
+ usage();
+ System.exit(-1);
+ }
+ }
+ Configuration conf = new HBaseConfiguration();
+ FileSystem fs = FileSystem.get(conf);
+ Path baseDir = new Path(conf.get(HBASE_DIR));
+
+ for (int i = 1; i < args.length; i++) {
+ Path logPath = new Path(args[i]);
+ if (!fs.exists(logPath)) {
+ throw new FileNotFoundException(args[i] + " does not exist");
+ }
+ if (dump) {
+ if (!fs.isFile(logPath)) {
+ throw new IOException(args[i] + " is not a file");
+ }
+ Reader log = new SequenceFile.Reader(fs, logPath, conf);
+ try {
+ HLogKey key = new HLogKey();
+ HLogEdit val = new HLogEdit();
+ while (log.next(key, val)) {
+ System.out.println(key.toString() + " " + val.toString());
+ }
+ } finally {
+ log.close();
+ }
+ } else {
+ if (!fs.getFileStatus(logPath).isDir()) {
+ throw new IOException(args[i] + " is not a directory");
+ }
+ splitLog(baseDir, logPath, fs, conf);
+ }
+ }
+ }
+}
Added: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogEdit.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogEdit.java?rev=630550&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogEdit.java (added)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogEdit.java Sat Feb 23 16:19:34 2008
@@ -0,0 +1,140 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.*;
+
+import java.io.*;
+
+import org.apache.hadoop.hbase.HConstants;
+
+/**
+ * A log value.
+ *
+ * These aren't sortable; you need to sort by the matching HLogKey.
+ * The table and row are already identified in HLogKey.
+ * This just indicates the column and value.
+ */
+public class HLogEdit implements Writable, HConstants {
+
+ /** Value stored for a deleted item */
+ public static ImmutableBytesWritable deleteBytes = null;
+
+ /** Value written to HLog on a complete cache flush */
+ public static ImmutableBytesWritable completeCacheFlush = null;
+
+ static {
+ try {
+ deleteBytes =
+ new ImmutableBytesWritable("HBASE::DELETEVAL".getBytes(UTF8_ENCODING));
+
+ completeCacheFlush =
+ new ImmutableBytesWritable("HBASE::CACHEFLUSH".getBytes(UTF8_ENCODING));
+
+ } catch (UnsupportedEncodingException e) {
+ assert(false);
+ }
+ }
+
+ /**
+ * @param value
+ * @return True if an entry and its content is {@link #deleteBytes}.
+ */
+ public static boolean isDeleted(final byte [] value) {
+ return (value == null)? false: deleteBytes.compareTo(value) == 0;
+ }
+
+ private Text column = new Text();
+ private byte [] val;
+ private long timestamp;
+ private static final int MAX_VALUE_LEN = 128;
+
+ /**
+ * Default constructor used by Writable
+ */
+ public HLogEdit() {
+ super();
+ }
+
+ /**
+ * Construct a fully initialized HLogEdit
+ * @param column column name
+ * @param bval value
+ * @param timestamp timestamp for modification
+ */
+ public HLogEdit(Text column, byte [] bval, long timestamp) {
+ this.column.set(column);
+ this.val = bval;
+ this.timestamp = timestamp;
+ }
+
+ /** @return the column */
+ public Text getColumn() {
+ return this.column;
+ }
+
+ /** @return the value */
+ public byte [] getVal() {
+ return this.val;
+ }
+
+ /** @return the timestamp */
+ public long getTimestamp() {
+ return this.timestamp;
+ }
+
+ /**
+ * @return First column name, timestamp, and first 128 bytes of the value
+ * bytes as a String.
+ */
+ @Override
+ public String toString() {
+ String value = "";
+ try {
+ value = (this.val.length > MAX_VALUE_LEN)?
+ new String(this.val, 0, MAX_VALUE_LEN, HConstants.UTF8_ENCODING) +
+ "...":
+ new String(getVal(), HConstants.UTF8_ENCODING);
+ } catch (UnsupportedEncodingException e) {
+ throw new RuntimeException("UTF8 encoding not present?", e);
+ }
+ return "(" + getColumn().toString() + "/" + getTimestamp() + "/" +
+ value + ")";
+ }
+
+ // Writable
+
+ /** {@inheritDoc} */
+ public void write(DataOutput out) throws IOException {
+ this.column.write(out);
+ out.writeInt(this.val.length);
+ out.write(this.val);
+ out.writeLong(timestamp);
+ }
+
+ /** {@inheritDoc} */
+ public void readFields(DataInput in) throws IOException {
+ this.column.readFields(in);
+ this.val = new byte[in.readInt()];
+ in.readFully(this.val);
+ this.timestamp = in.readLong();
+ }
+}
Added: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java?rev=630550&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java (added)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java Sat Feb 23 16:19:34 2008
@@ -0,0 +1,159 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import org.apache.hadoop.io.*;
+
+import java.io.*;
+
+/**
+ * A Key for an entry in the change log.
+ *
+ * The log intermingles edits to many tables and rows, so each log entry
+ * identifies the appropriate table and row. Within a table and row, they're
+ * also sorted.
+ */
+public class HLogKey implements WritableComparable {
+ Text regionName = new Text();
+ Text tablename = new Text();
+ Text row = new Text();
+ long logSeqNum = 0L;
+
+ /** Create an empty key useful when deserializing */
+ public HLogKey() {
+ super();
+ }
+
+ /**
+ * Create the log key!
+ * We maintain the tablename mainly for debugging purposes.
+ * A regionName is always a sub-table object.
+ *
+ * @param regionName - name of region
+ * @param tablename - name of table
+ * @param row - row key
+ * @param logSeqNum - log sequence number
+ */
+ public HLogKey(Text regionName, Text tablename, Text row, long logSeqNum) {
+ // TODO: Is this copy of the instances necessary? They are expensive.
+ this.regionName.set(regionName);
+ this.tablename.set(tablename);
+ this.row.set(row);
+ this.logSeqNum = logSeqNum;
+ }
+
+ //////////////////////////////////////////////////////////////////////////////
+ // A bunch of accessors
+ //////////////////////////////////////////////////////////////////////////////
+
+ Text getRegionName() {
+ return regionName;
+ }
+
+ Text getTablename() {
+ return tablename;
+ }
+
+ Text getRow() {
+ return row;
+ }
+
+ long getLogSeqNum() {
+ return logSeqNum;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public String toString() {
+ return tablename + "/" + regionName + "/" + row + "/" + logSeqNum;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object obj) {
+ return compareTo(obj) == 0;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ int result = this.regionName.hashCode();
+ result ^= this.row.hashCode();
+ result ^= this.logSeqNum;
+ return result;
+ }
+
+ //
+ // Comparable
+ //
+
+ /**
+ * {@inheritDoc}
+ */
+ public int compareTo(Object o) {
+ HLogKey other = (HLogKey) o;
+ int result = this.regionName.compareTo(other.regionName);
+
+ if(result == 0) {
+ result = this.row.compareTo(other.row);
+
+ if(result == 0) {
+
+ if (this.logSeqNum < other.logSeqNum) {
+ result = -1;
+
+ } else if (this.logSeqNum > other.logSeqNum) {
+ result = 1;
+ }
+ }
+ }
+ return result;
+ }
+
+ //
+ // Writable
+ //
+
+ /**
+ * {@inheritDoc}
+ */
+ public void write(DataOutput out) throws IOException {
+ this.regionName.write(out);
+ this.tablename.write(out);
+ this.row.write(out);
+ out.writeLong(logSeqNum);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void readFields(DataInput in) throws IOException {
+ this.regionName.readFields(in);
+ this.tablename.readFields(in);
+ this.row.readFields(in);
+ this.logSeqNum = in.readLong();
+ }
+}
\ No newline at end of file