You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2008/02/09 02:54:57 UTC

svn commit: r620059 - in /hadoop/hbase/branches/0.1: ./ conf/ src/java/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/util/ src/test/ src/test/org/apache/hadoop/hbase/ src/test/org/apache/hadoop/hbase/mapred/ src/test/org/apache/ha...

Author: stack
Date: Fri Feb  8 17:54:55 2008
New Revision: 620059

URL: http://svn.apache.org/viewvc?rev=620059&view=rev
Log:
HBASE-426  hbase can't find remote filesystem
Backport

Modified:
    hadoop/hbase/branches/0.1/CHANGES.txt
    hadoop/hbase/branches/0.1/conf/hbase-default.xml
    hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HConnectionManager.java
    hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HConstants.java
    hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HLog.java
    hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HMaster.java
    hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HMerge.java
    hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/util/Migrate.java
    hadoop/hbase/branches/0.1/src/java/overview.html
    hadoop/hbase/branches/0.1/src/test/hbase-site.xml
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestCompaction.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestDeleteAll.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestDeleteFamily.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestGet.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestGet2.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHLog.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHRegion.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHStoreFile.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestLogRolling.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestMergeMeta.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestScanner.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestSplit.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestTimestamp.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
    hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/util/TestMigrate.java

Modified: hadoop/hbase/branches/0.1/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/CHANGES.txt?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/CHANGES.txt (original)
+++ hadoop/hbase/branches/0.1/CHANGES.txt Fri Feb  8 17:54:55 2008
@@ -24,6 +24,7 @@
                server death
    HBASE-425   Fix doc. so it accomodates new hbase untethered context
    HBase-421   TestRegionServerExit broken
+   HBASE-426   hbase can't find remote filesystem
 
   IMPROVEMENTS
    HADOOP-2555 Refactor the HTable#get and HTable#getRow methods to avoid

Modified: hadoop/hbase/branches/0.1/conf/hbase-default.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/conf/hbase-default.xml?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/conf/hbase-default.xml (original)
+++ hadoop/hbase/branches/0.1/conf/hbase-default.xml Fri Feb  8 17:54:55 2008
@@ -31,6 +31,13 @@
     </description>
   </property>
   <property>
+    <name>hbase.rootdir</name>
+    <value>file:///tmp/hbase-${user.home}/hbase</value>
+     <description>The directory shared by region servers.
+      Should be fully-qualified to include the filesystem to use.
+     </description>
+  </property>
+  <property>
     <name>hbase.master.info.port</name>
     <value>60010</value>
     <description>The port for the hbase master web UI
@@ -67,12 +74,6 @@
     <value>org.apache.hadoop.hbase.HRegionInterface</value>
     <description>An interface that is assignable to HRegionInterface.  Used in HClient for
     opening proxy to remote region server.
-    </description>
-  </property>
-  <property>
-    <name>hbase.rootdir</name>
-    <value>${hadoop.tmp.dir}/hbase</value>
-    <description>The directory shared by region servers.
     </description>
   </property>
   <property>

Modified: hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HConnectionManager.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HConnectionManager.java (original)
+++ hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HConnectionManager.java Fri Feb  8 17:54:55 2008
@@ -69,7 +69,7 @@
   public static HConnection getConnection(HBaseConfiguration conf) {
     TableServers connection;
     synchronized (HBASE_INSTANCES) {
-      String instanceName = conf.get(HBASE_DIR, DEFAULT_HBASE_DIR);
+      String instanceName = conf.get(HBASE_DIR);
 
       connection = HBASE_INSTANCES.get(instanceName);
 
@@ -88,7 +88,7 @@
   public static void deleteConnection(HBaseConfiguration conf) {
     synchronized (HBASE_INSTANCES) {
       TableServers instance =
-        HBASE_INSTANCES.remove(conf.get(HBASE_DIR, DEFAULT_HBASE_DIR));
+        HBASE_INSTANCES.remove(conf.get(HBASE_DIR));
       if (instance != null) {
         instance.closeAll();
       }

Modified: hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HConstants.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HConstants.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HConstants.java (original)
+++ hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HConstants.java Fri Feb  8 17:54:55 2008
@@ -83,9 +83,6 @@
   /** Parameter name for HBase instance root directory */
   static final String HBASE_DIR = "hbase.rootdir";
   
-  /** Default HBase instance root directory */
-  static final String DEFAULT_HBASE_DIR = "/hbase";
-  
   /** Used to construct the name of the log directory for a region server */
   static final String HREGION_LOGDIR_NAME = "log";
 

Modified: hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HLog.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HLog.java (original)
+++ hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HLog.java Fri Feb  8 17:54:55 2008
@@ -596,7 +596,7 @@
     }
     Configuration conf = new HBaseConfiguration();
     FileSystem fs = FileSystem.get(conf);
-    Path baseDir = new Path(conf.get(HBASE_DIR, DEFAULT_HBASE_DIR));
+    Path baseDir = new Path(conf.get(HBASE_DIR));
 
     for (int i = 1; i < args.length; i++) {
       Path logPath = new Path(args[i]);

Modified: hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HMaster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HMaster.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HMaster.java (original)
+++ hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HMaster.java Fri Feb  8 17:54:55 2008
@@ -848,28 +848,28 @@
    * @throws IOException
    */
   public HMaster(HBaseConfiguration conf) throws IOException {
-    this(new Path(conf.get(HBASE_DIR, DEFAULT_HBASE_DIR)),
+    this(new Path(conf.get(HBASE_DIR)),
         new HServerAddress(conf.get(MASTER_ADDRESS, DEFAULT_MASTER_ADDRESS)),
         conf);
   }
 
   /** 
    * Build the HMaster
-   * @param rootdir base directory of this HBase instance
+   * @param rd base directory of this HBase instance.  Must be fully
+   * qualified so includes filesystem to use.
    * @param address server address and port number
    * @param conf configuration
    * 
    * @throws IOException
    */
-  public HMaster(Path rootdir, HServerAddress address, HBaseConfiguration conf)
-    throws IOException {
+  public HMaster(Path rd, HServerAddress address, HBaseConfiguration conf)
+  throws IOException {
     
     this.conf = conf;
     this.fs = FileSystem.get(conf);
-    this.rootdir = fs.makeQualified(rootdir);
+    this.rootdir = this.fs.makeQualified(rd);
     this.conf.set(HConstants.HBASE_DIR, this.rootdir.toString());
     this.rand = new Random();
-    
     Path rootRegionDir =
       HRegion.getRegionDir(rootdir, HRegionInfo.rootRegionInfo);
     LOG.info("Root region dir: " + rootRegionDir.toString());

Modified: hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HMerge.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HMerge.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HMerge.java (original)
+++ hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/HMerge.java Fri Feb  8 17:54:55 2008
@@ -99,7 +99,7 @@
         conf.getLong("hbase.hregion.max.filesize", DEFAULT_MAX_FILE_SIZE);
 
       this.tabledir = new Path(
-          fs.makeQualified(new Path(conf.get(HBASE_DIR, DEFAULT_HBASE_DIR))),
+          fs.makeQualified(new Path(conf.get(HBASE_DIR))),
           tableName.toString()
       );
       Path logdir = new Path(tabledir, "merge_" + System.currentTimeMillis() +
@@ -319,7 +319,7 @@
       super(conf, fs, META_TABLE_NAME);
 
       Path rootTableDir = HTableDescriptor.getTableDir(
-          fs.makeQualified(new Path(conf.get(HBASE_DIR, DEFAULT_HBASE_DIR))),
+          fs.makeQualified(new Path(conf.get(HBASE_DIR))),
           ROOT_TABLE_NAME);
 
       // Scan root region to find all the meta regions

Modified: hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/util/Migrate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/util/Migrate.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/util/Migrate.java (original)
+++ hadoop/hbase/branches/0.1/src/java/org/apache/hadoop/hbase/util/Migrate.java Fri Feb  8 17:54:55 2008
@@ -148,8 +148,8 @@
 
       LOG.info("Starting upgrade" + (readOnly ? " check" : ""));
 
-      Path rootdir = fs.makeQualified(new Path(           // get HBase root dir
-          conf.get(HConstants.HBASE_DIR, HConstants.DEFAULT_HBASE_DIR)));
+      Path rootdir =
+        fs.makeQualified(new Path(this.conf.get(HConstants.HBASE_DIR)));
 
       if (!fs.exists(rootdir)) {
         throw new FileNotFoundException("HBase root directory " +

Modified: hadoop/hbase/branches/0.1/src/java/overview.html
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/java/overview.html?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/java/overview.html (original)
+++ hadoop/hbase/branches/0.1/src/java/overview.html Fri Feb  8 17:54:55 2008
@@ -39,12 +39,12 @@
 </p>
 <p>
 <ul>
-<li><code>${HBASE_HOME}</code>: The Hbase root.
+<li><code>${HBASE_HOME}</code>: The HBase root.
 </li>
 </ul>
 </p>
 <p>Edit <code>${HBASE_HOME}/conf/hbase-env.sh</code>.  In this file you can
-set the heapsize for Hbase, etc.  At a minimum, set
+set the heapsize for HBase, etc.  At a minimum, set
 <code>JAVA_HOME</code> to the root of your Java installation.
 <p>
 If you are running a standalone operation, proceed to <a href=#runandconfirm>Running 
@@ -54,11 +54,11 @@
 <h2><a name="distributed" >Distributed Operation</a></h2>
 <p>Distributed mode requires an instance of the Hadoop Distributed File System (DFS).
 See the Hadoop <a href="http://lucene.apache.org/hadoop/api/overview-summary.html#overview_description">
-requirements and instructions</a> for running a distributed operation.  Configuring Hbase for a
+requirements and instructions</a> for running a distributed operation.  Configuring HBase for a
 distributed operation requires modification of the following two files:
 <code>${HBASE_HOME}/conf/hbase-site.xml</code> and <code>${HBASE_HOME}/conf/regionservers</code>.  
 The former needs to be pointed at the running Hadoop DFS instance.  The latter file lists
-all members of the Hbase cluster.
+all members of the HBase cluster.
 </p>
 <p>
 <code>hbase-site.xml</code> allows the user to override the properties defined in 
@@ -67,21 +67,22 @@
 <code>hbase.rootdir</code> properties should be redefined 
 in <code>hbase-site.xml</code> to define the <code>host:port</code> pair on which to run the 
 HMaster (<a href="http://wiki.apache.org/lucene-hadoop/Hbase/HbaseArchitecture">read about the 
-Hbase master, regionservers, etc</a>) and to point hbase the Hadoop filesystem to use:
+Hbase master, regionservers, etc</a>) and to point hbase at the Hadoop filesystem to use. For
+example:
 </p>
 <pre>
 &lt;configuration&gt;
 
   &lt;property&gt;
     &lt;name&gt;hbase.master&lt;/name&gt;
-    &lt;value&gt;[YOUR_HOST]:[PORT]&lt;/value&gt;
-    &lt;description&gt;The host and port that the Hbase master runs at.
+    &lt;value&gt;http://localhost:60000&lt;/value&gt;
+    &lt;description&gt;The host and port that the HBase master runs at.
     &lt;/description&gt;
   &lt;/property&gt;
 
   &lt;property&gt;
     &lt;name&gt;hbase.rootdir&lt;/name&gt;
-    &lt;value&gt;FULLY_QUALIFIED_PATH&lt;/value&gt;
+    &lt;value&gt;hdfs://localhost:9000/hbase&lt;/value&gt;
     &lt;description&gt;The directory shared by region servers.
     &lt;/description&gt;
   &lt;/property&gt;
@@ -98,41 +99,41 @@
 <p>If you are running in standalone, non-distributed mode, hbase by default uses
 the local filesystem.</p>
 <p>If you are running a distributed cluster you will need to start the Hadoop DFS daemons 
-before starting Hbase and stop the daemons after Hbase has shut down.  Start and 
+before starting HBase and stop the daemons after HBase has shut down.  Start and 
 stop the Hadoop DFS daemons as per the Hadoop 
-<a href="http://lucene.apache.org/hadoop/api/overview-summary.html">instructions</a>. Hbase
+<a href="http://lucene.apache.org/hadoop/api/overview-summary.html">instructions</a>. HBase
 does not normally use the mapreduce daemons.  These do not need to be started.</p>
 
-<p>Start Hbase with the following command:
+<p>Start HBase with the following command:
 </p>
 <pre>
 ${HBASE_HOME}/bin/start-hbase.sh
 </pre>
 <p>
-Once Hbase has started, enter <code>${HBASE_HOME}/bin/hbase shell</code> to obtain a 
-shell against Hbase from which you can execute HQL commands (HQL is a severe subset of SQL).  
-In the Hbase shell, type 
-<code>help;</code> to see a list of supported HQL commands.  Note that all commands in the Hbase 
+Once HBase has started, enter <code>${HBASE_HOME}/bin/hbase shell</code> to obtain a 
+shell against HBase from which you can execute HQL commands (HQL is a severe subset of SQL).  
+In the HBase shell, type 
+<code>help;</code> to see a list of supported HQL commands.  Note that all commands in the HBase 
 shell must end with <code>;</code>.  Test your installation by creating, viewing, and dropping 
 a table, as per the help instructions.  Be patient with the <code>create</code> and 
 <code>drop</code> operations as they may each take 10 seconds or more.  To stop hbase, exit the 
-Hbase shell and enter:
+HBase shell and enter:
 </p>
 <pre>
 ${HBASE_HOME}/bin/stop-hbase.sh
 </pre>
 <p>
-If you are running a distributed operation, be sure to wait until Hbase has shut down completely 
+If you are running a distributed operation, be sure to wait until HBase has shut down completely 
 before stopping the Hadoop daemons.
 </p>
 <p>
 The default location for logs is <code>${HBASE_HOME}/logs</code>.
 </p>
-<p>Hbase also puts up a UI listing vital attributes.  By default its deployed on the master host
+<p>HBase also puts up a UI listing vital attributes.  By default its deployed on the master host
 at port 60010.</p>
 
 <h2><a name="upgrading" >Upgrading</a></h2>
-<p>After installing the new Hbase, before starting your cluster, run the
+<p>After installing the new HBase, before starting your cluster, run the
 <code>${HBASE_DIR}/bin/hbase migrate</code> migration script. It will make any
 adjustments to the filesystem data under <code>hbase.rootdir</code> necessary to run
 the hbase version (It does not change your install unless you explicitly ask it to).
@@ -140,8 +141,8 @@
 
 <h2><a name="related" >Related Documentation</a></h2>
 <ul>
-  <li><a href="http://wiki.apache.org/lucene-hadoop/Hbase">Hbase Home Page</a>
-  <li><a href="http://wiki.apache.org/lucene-hadoop/Hbase/HbaseArchitecture">Hbase Architecture</a>
+  <li><a href="http://wiki.apache.org/lucene-hadoop/Hbase">HBase Home Page</a>
+  <li><a href="http://wiki.apache.org/lucene-hadoop/Hbase/HbaseArchitecture">HBase Architecture</a>
 </ul>
 
 </body>

Modified: hadoop/hbase/branches/0.1/src/test/hbase-site.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/hbase-site.xml?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/hbase-site.xml (original)
+++ hadoop/hbase/branches/0.1/src/test/hbase-site.xml Fri Feb  8 17:54:55 2008
@@ -23,13 +23,6 @@
 -->
 <configuration>
   <property>
-    <name>fs.default.name</name>
-    <value></value>
-    <description>Use hdfs as file system by default. Modify this to run on
-    local file system.
-    </description>
-  </property>
-  <property>
     <name>hbase.regionserver.msginterval</name>
     <value>1000</value>
     <description>Interval between messages from the RegionServer to HMaster
@@ -103,11 +96,6 @@
     Amount of time to wait since the last time a region was flushed before
     invoking an optional cache flush. Default 60,000.
     </description>
-  </property>
-  <property>
-  	<name>hbase.rootdir</name>
-  	<value>/hbase</value>
-  	<description>location of HBase instance in dfs</description>
   </property>
   <property>
     <name>hbase.hregion.max.filesize</name>

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java Fri Feb  8 17:54:55 2008
@@ -68,6 +68,9 @@
   public void setUp() throws Exception {
     conf.setLong("hbase.hregion.max.filesize", 64L * 1024L * 1024L);
     dfsCluster = new MiniDFSCluster(conf, 2, true, (String[])null);
+    // Set the hbase.rootdir to be the home directory in mini dfs.
+    this.conf.set(HConstants.HBASE_DIR,
+      this.dfsCluster.getFileSystem().getHomeDirectory().toString());
     
     // Note: we must call super.setUp after starting the mini cluster or
     // we will end up with a local file system

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/HBaseTestCase.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/HBaseTestCase.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/HBaseTestCase.java Fri Feb  8 17:54:55 2008
@@ -113,9 +113,8 @@
           fs.delete(testDir);
         }
       } else {
-        this.testDir = fs.makeQualified(
-            new Path(conf.get(HConstants.HBASE_DIR, HConstants.DEFAULT_HBASE_DIR))
-        );
+        this.testDir =
+          this.fs.makeQualified(new Path(conf.get(HConstants.HBASE_DIR)));
       }
     } catch (Exception e) {
       LOG.fatal("error during setup", e);
@@ -147,10 +146,10 @@
   protected HRegion createNewHRegion(HTableDescriptor desc, Text startKey,
       Text endKey) throws IOException {
     
-    FileSystem fs = FileSystem.get(conf);
-    Path rootdir = fs.makeQualified(
-        new Path(conf.get(HConstants.HBASE_DIR, HConstants.DEFAULT_HBASE_DIR)));
-    fs.mkdirs(rootdir);
+    FileSystem filesystem = FileSystem.get(conf);
+    Path rootdir = filesystem.makeQualified(
+        new Path(conf.get(HConstants.HBASE_DIR)));
+    filesystem.mkdirs(rootdir);
     
     return HRegion.createHRegion(new HRegionInfo(desc, startKey, endKey),
         rootdir, conf);

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java Fri Feb  8 17:54:55 2008
@@ -138,8 +138,9 @@
 
   private void init(final int nRegionNodes) throws IOException {
     try {
-      this.parentdir = new Path(conf.get(HBASE_DIR, DEFAULT_HBASE_DIR));
-      fs.mkdirs(parentdir);
+      this.parentdir = this.fs.getHomeDirectory();
+      this.conf.set(HConstants.HBASE_DIR, this.parentdir.toString());
+      this.fs.mkdirs(parentdir);
       FSUtils.setVersion(fs, parentdir);
       this.hbaseCluster = new LocalHBaseCluster(this.conf, nRegionNodes);
       this.hbaseCluster.startup();

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestCompaction.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestCompaction.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestCompaction.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestCompaction.java Fri Feb  8 17:54:55 2008
@@ -59,6 +59,9 @@
   @Override
   public void setUp() throws Exception {
     this.cluster = new MiniDFSCluster(conf, 2, true, (String[])null);
+    // Make the hbase rootdir match the minidfs we just span up
+    this.conf.set(HConstants.HBASE_DIR,
+      this.cluster.getFileSystem().getHomeDirectory().toString());
     super.setUp();
     HTableDescriptor htd = createTableDescriptor(getName());
     this.r = createNewHRegion(htd, null, null);

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestDeleteAll.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestDeleteAll.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestDeleteAll.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestDeleteAll.java Fri Feb  8 17:54:55 2008
@@ -38,6 +38,9 @@
     super.setUp();
     try {
       this.miniHdfs = new MiniDFSCluster(this.conf, 1, true, null);
+      // Set the hbase.rootdir to be the home directory in mini dfs.
+      this.conf.set(HConstants.HBASE_DIR,
+        this.miniHdfs.getFileSystem().getHomeDirectory().toString());
     } catch (Exception e) {
       LOG.fatal("error starting MiniDFSCluster", e);
       throw e;

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestDeleteFamily.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestDeleteFamily.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestDeleteFamily.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestDeleteFamily.java Fri Feb  8 17:54:55 2008
@@ -38,6 +38,9 @@
   protected void setUp() throws Exception {
     super.setUp();
     this.miniHdfs = new MiniDFSCluster(this.conf, 1, true, null);
+    // Set the hbase.rootdir to be the home directory in mini dfs.
+    this.conf.set(HConstants.HBASE_DIR,
+      this.miniHdfs.getFileSystem().getHomeDirectory().toString());
   }
   
   /**

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestGet.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestGet.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestGet.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestGet.java Fri Feb  8 17:54:55 2008
@@ -78,6 +78,9 @@
       // Initialization
       
       cluster = new MiniDFSCluster(conf, 2, true, (String[])null);
+      // Set the hbase.rootdir to be the home directory in mini dfs.
+      this.conf.set(HConstants.HBASE_DIR,
+        cluster.getFileSystem().getHomeDirectory().toString());
       
       HTableDescriptor desc = new HTableDescriptor("test");
       desc.addFamily(new HColumnDescriptor(CONTENTS.toString()));

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestGet2.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestGet2.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestGet2.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestGet2.java Fri Feb  8 17:54:55 2008
@@ -40,6 +40,9 @@
   protected void setUp() throws Exception {
     super.setUp();
     this.miniHdfs = new MiniDFSCluster(this.conf, 1, true, null);
+    // Set the hbase.rootdir to be the home directory in mini dfs.
+    this.conf.set(HConstants.HBASE_DIR,
+      this.miniHdfs.getFileSystem().getHomeDirectory().toString());
   }
   
   /**

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHLog.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHLog.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHLog.java Fri Feb  8 17:54:55 2008
@@ -37,6 +37,9 @@
   @Override
   public void setUp() throws Exception {
     cluster = new MiniDFSCluster(conf, 2, true, (String[])null);
+    // Set the hbase.rootdir to be the home directory in mini dfs.
+    this.conf.set(HConstants.HBASE_DIR,
+      this.cluster.getFileSystem().getHomeDirectory().toString());
     super.setUp();
     this.dir = new Path("/hbase", getName());
     if (fs.exists(dir)) {

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHRegion.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHRegion.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHRegion.java Fri Feb  8 17:54:55 2008
@@ -96,6 +96,9 @@
   private void setup() throws IOException {
 
     cluster = new MiniDFSCluster(conf, 2, true, (String[])null);
+    // Set the hbase.rootdir to be the home directory in mini dfs.
+    this.conf.set(HConstants.HBASE_DIR,
+      this.cluster.getFileSystem().getHomeDirectory().toString());
 
     desc = new HTableDescriptor("test");
     desc.addFamily(new HColumnDescriptor("contents:"));

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHStoreFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHStoreFile.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHStoreFile.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestHStoreFile.java Fri Feb  8 17:54:55 2008
@@ -45,6 +45,9 @@
   public void setUp() throws Exception {
     try {
       this.cluster = new MiniDFSCluster(this.conf, 2, true, (String[])null);
+      // Set the hbase.rootdir to be the home directory in mini dfs.
+      this.conf.set(HConstants.HBASE_DIR,
+        this.cluster.getFileSystem().getHomeDirectory().toString());
       this.dir = new Path(DIR, getName());
     } catch (IOException e) {
       StaticTestEnvironment.shutdownDfs(cluster);

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestLogRolling.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestLogRolling.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestLogRolling.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestLogRolling.java Fri Feb  8 17:54:55 2008
@@ -95,6 +95,9 @@
     try {
       super.setUp();
       dfs = new MiniDFSCluster(conf, 2, true, (String[]) null);
+      // Set the hbase.rootdir to be the home directory in mini dfs.
+      this.conf.set(HConstants.HBASE_DIR,
+        this.dfs.getFileSystem().getHomeDirectory().toString());
     } catch (Exception e) {
       StaticTestEnvironment.shutdownDfs(dfs);
       LOG.fatal("error during setUp: ", e);

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestMergeMeta.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestMergeMeta.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestMergeMeta.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestMergeMeta.java Fri Feb  8 17:54:55 2008
@@ -39,4 +39,4 @@
     assertNotNull(dfsCluster);
     HMerge.merge(conf, dfsCluster.getFileSystem(), HConstants.META_TABLE_NAME);
   }
-}  
+}

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestScanner.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestScanner.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestScanner.java Fri Feb  8 17:54:55 2008
@@ -56,6 +56,9 @@
   @Override
   public void setUp() throws Exception {
     cluster = new MiniDFSCluster(conf, 2, true, (String[])null);
+    // Set the hbase.rootdir to be the home directory in mini dfs.
+    this.conf.set(HConstants.HBASE_DIR,
+      this.cluster.getFileSystem().getHomeDirectory().toString());
     super.setUp();
     
   }

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestSplit.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestSplit.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestSplit.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestSplit.java Fri Feb  8 17:54:55 2008
@@ -69,6 +69,9 @@
     HRegion region = null;
     try {
       cluster = new MiniDFSCluster(conf, 2, true, (String[])null);
+      // Set the hbase.rootdir to be the home directory in mini dfs.
+      this.conf.set(HConstants.HBASE_DIR,
+        cluster.getFileSystem().getHomeDirectory().toString());
       HTableDescriptor htd = createTableDescriptor(getName());
       region = createNewHRegion(htd, null, null);
       basicSplit(region);

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestTimestamp.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestTimestamp.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestTimestamp.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/TestTimestamp.java Fri Feb  8 17:54:55 2008
@@ -57,6 +57,9 @@
   @Override
   public void setUp() throws Exception {
     this.cluster = new MiniDFSCluster(conf, 2, true, (String[])null);
+    // Set the hbase.rootdir to be the home directory in mini dfs.
+    this.conf.set(HConstants.HBASE_DIR,
+      this.cluster.getFileSystem().getHomeDirectory().toString());
     super.setUp();
   }
 

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java Fri Feb  8 17:54:55 2008
@@ -101,6 +101,9 @@
     desc.addFamily(new HColumnDescriptor(OUTPUT_COLUMN));
 
     dfsCluster = new MiniDFSCluster(conf, 1, true, (String[]) null);
+    // Set the hbase.rootdir to be the home directory in mini dfs.
+    this.conf.set(HConstants.HBASE_DIR,
+      this.dfsCluster.getFileSystem().getHomeDirectory().toString());
 
     // Must call super.setUp after mini dfs cluster is started or else
     // filesystem ends up being local

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java Fri Feb  8 17:54:55 2008
@@ -115,6 +115,9 @@
   @Override
   public void setUp() throws Exception {
     dfsCluster = new MiniDFSCluster(conf, 1, true, (String[])null);
+    // Set the hbase.rootdir to be the home directory in mini dfs.
+    this.conf.set(HConstants.HBASE_DIR,
+      this.dfsCluster.getFileSystem().getHomeDirectory().toString());
 
     // Must call super.setup() after starting mini dfs cluster. Otherwise
     // we get a local file system instead of hdfs

Modified: hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/util/TestMigrate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/util/TestMigrate.java?rev=620059&r1=620058&r2=620059&view=diff
==============================================================================
--- hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/util/TestMigrate.java (original)
+++ hadoop/hbase/branches/0.1/src/test/org/apache/hadoop/hbase/util/TestMigrate.java Fri Feb  8 17:54:55 2008
@@ -75,9 +75,11 @@
     MiniDFSCluster dfsCluster = null;
     try {
       dfsCluster = new MiniDFSCluster(conf, 2, true, (String[])null);
+      // Set the hbase.rootdir to be the home directory in mini dfs.
+      this.conf.set(HConstants.HBASE_DIR,
+        dfsCluster.getFileSystem().getHomeDirectory().toString());
       FileSystem dfs = dfsCluster.getFileSystem();
-      Path root = dfs.makeQualified(new Path(
-          conf.get(HConstants.HBASE_DIR, HConstants.DEFAULT_HBASE_DIR)));
+      Path root = dfs.makeQualified(new Path(conf.get(HConstants.HBASE_DIR)));
       dfs.mkdirs(root);
 
       /*