You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2011/10/14 01:12:31 UTC

svn commit: r1183158 [3/3] - in /hbase/trunk: ./ src/main/java/org/apache/hadoop/hbase/ src/main/java/org/apache/hadoop/hbase/catalog/ src/main/java/org/apache/hadoop/hbase/client/ src/main/java/org/apache/hadoop/hbase/ipc/ src/main/java/org/apache/had...

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java?rev=1183158&r1=1183157&r2=1183158&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java Thu Oct 13 23:12:30 2011
@@ -47,6 +47,9 @@ import org.apache.hadoop.hbase.ServerNam
 import org.apache.hadoop.hbase.TableDescriptors;
 import org.apache.hadoop.hbase.TableExistsException;
 import org.apache.hadoop.hbase.catalog.CatalogTracker;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HConnectionTestingUtility;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.executor.ExecutorService;
 import org.apache.hadoop.hbase.io.Reference;
@@ -61,20 +64,27 @@ import org.mockito.Mockito;
 public class TestCatalogJanitor {
   /**
    * Pseudo server for below tests.
+   * Be sure to call stop on the way out else could leave some mess around.
    */
   class MockServer implements Server {
+    private final HConnection connection;
     private final Configuration c;
     private final CatalogTracker ct;
 
     MockServer(final HBaseTestingUtility htu)
-    throws NotAllMetaRegionsOnlineException, IOException {
+    throws NotAllMetaRegionsOnlineException, IOException, InterruptedException {
       this.c = htu.getConfiguration();
+      // Mock an HConnection and a HRegionInterface implementation.  Have the
+      // HConnection return the HRI.  Have the HRI return a few mocked up responses
+      // to make our test work.
+      this.connection = HConnectionTestingUtility.getMockedConnection(this.c);
       // Set hbase.rootdir into test dir.
       FileSystem fs = FileSystem.get(this.c);
       Path rootdir = fs.makeQualified(new Path(this.c.get(HConstants.HBASE_DIR)));
       this.c.set(HConstants.HBASE_DIR, rootdir.toString());
       this.ct = Mockito.mock(CatalogTracker.class);
       HRegionInterface hri = Mockito.mock(HRegionInterface.class);
+      Mockito.when(this.ct.getConnection()).thenReturn(this.connection);
       Mockito.when(ct.waitForMetaServerConnectionDefault()).thenReturn(hri);
     }
 
@@ -115,9 +125,13 @@ public class TestCatalogJanitor {
 
     @Override
     public void stop(String why) {
-      //no-op
+      if (this.ct != null) {
+        this.ct.stop();
+      }
+      if (this.connection != null) {
+        HConnectionManager.deleteConnection(this.connection.getConfiguration(), true);
+      }
     }
-    
   }
 
   /**
@@ -267,42 +281,53 @@ public class TestCatalogJanitor {
   }
 
   @Test
-  public void testCleanParent() throws IOException {
+  public void testCleanParent() throws IOException, InterruptedException {
     HBaseTestingUtility htu = new HBaseTestingUtility();
     setRootDirAndCleanIt(htu, "testCleanParent");
     Server server = new MockServer(htu);
-    MasterServices services = new MockMasterServices(server);
-    CatalogJanitor janitor = new CatalogJanitor(server, services);
-    // Create regions.
-    HTableDescriptor htd = createHTableDescriptor();
-    HRegionInfo parent =
-      new HRegionInfo(htd.getName(), Bytes.toBytes("aaa"),
-          Bytes.toBytes("eee"));
-    HRegionInfo splita =
-      new HRegionInfo(htd.getName(), Bytes.toBytes("aaa"),
-          Bytes.toBytes("ccc"));
-    HRegionInfo splitb =
-      new HRegionInfo(htd.getName(), Bytes.toBytes("ccc"),
-          Bytes.toBytes("eee"));
-    // Test that when both daughter regions are in place, that we do not
-    // remove the parent.
-    Result r = createResult(parent, splita, splitb);
-    // Add a reference under splitA directory so we don't clear out the parent.
-    Path rootdir = services.getMasterFileSystem().getRootDir();
-    Path tabledir =
-      HTableDescriptor.getTableDir(rootdir, htd.getName());
-    Path storedir = Store.getStoreHomedir(tabledir, splita.getEncodedName(),
-      htd.getColumnFamilies()[0].getName());
-    Reference ref = new Reference(Bytes.toBytes("ccc"), Reference.Range.top);
-    long now = System.currentTimeMillis();
-    // Reference name has this format: StoreFile#REF_NAME_PARSER
-    Path p = new Path(storedir, Long.toString(now) + "." + parent.getEncodedName());
-    FileSystem fs = services.getMasterFileSystem().getFileSystem();
-    ref.write(fs, p);
-    assertFalse(janitor.cleanParent(parent, r));
-    // Remove the reference file and try again.
-    assertTrue(fs.delete(p, true));
-    assertTrue(janitor.cleanParent(parent, r));
+    try {
+      MasterServices services = new MockMasterServices(server);
+      CatalogJanitor janitor = new CatalogJanitor(server, services);
+      // Create regions.
+      HTableDescriptor htd = new HTableDescriptor("table");
+      htd.addFamily(new HColumnDescriptor("f"));
+      HRegionInfo parent =
+        new HRegionInfo(htd.getName(), Bytes.toBytes("aaa"),
+            Bytes.toBytes("eee"));
+      HRegionInfo splita =
+        new HRegionInfo(htd.getName(), Bytes.toBytes("aaa"),
+            Bytes.toBytes("ccc"));
+      HRegionInfo splitb =
+        new HRegionInfo(htd.getName(), Bytes.toBytes("ccc"),
+            Bytes.toBytes("eee"));
+      // Test that when both daughter regions are in place, that we do not
+      // remove the parent.
+      List<KeyValue> kvs = new ArrayList<KeyValue>();
+      kvs.add(new KeyValue(parent.getRegionName(), HConstants.CATALOG_FAMILY,
+          HConstants.SPLITA_QUALIFIER, Writables.getBytes(splita)));
+      kvs.add(new KeyValue(parent.getRegionName(), HConstants.CATALOG_FAMILY,
+          HConstants.SPLITB_QUALIFIER, Writables.getBytes(splitb)));
+      Result r = new Result(kvs);
+      // Add a reference under splitA directory so we don't clear out the parent.
+      Path rootdir = services.getMasterFileSystem().getRootDir();
+      Path tabledir =
+        HTableDescriptor.getTableDir(rootdir, htd.getName());
+      Path storedir = Store.getStoreHomedir(tabledir, splita.getEncodedName(),
+          htd.getColumnFamilies()[0].getName());
+      Reference ref = new Reference(Bytes.toBytes("ccc"), Reference.Range.top);
+      long now = System.currentTimeMillis();
+      // Reference name has this format: StoreFile#REF_NAME_PARSER
+      Path p = new Path(storedir, Long.toString(now) + "." + parent.getEncodedName());
+      FileSystem fs = services.getMasterFileSystem().getFileSystem();
+      Path path = ref.write(fs, p);
+      assertTrue(fs.exists(path));
+      assertFalse(janitor.cleanParent(parent, r));
+      // Remove the reference file and try again.
+      assertTrue(fs.delete(p, true));
+      assertTrue(janitor.cleanParent(parent, r));
+    } finally {
+      server.stop("shutdown");
+    }
   }
 
   /**
@@ -459,4 +484,4 @@ public class TestCatalogJanitor {
     htd.addFamily(new HColumnDescriptor("f"));
     return htd;
   }
-}
\ No newline at end of file
+}

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java?rev=1183158&r1=1183157&r2=1183158&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java Thu Oct 13 23:12:30 2011
@@ -56,6 +56,7 @@ import org.apache.hadoop.hbase.regionser
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
 import org.apache.hadoop.hbase.zookeeper.ZKAssign;
 import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
@@ -98,6 +99,9 @@ public class TestDistributedLogSplitting
     LOG.info("Waiting for active/ready master");
     cluster.waitForActiveAndReadyMaster();
     master = cluster.getMaster();
+    while (cluster.getLiveRegionServerThreads().size() < num_rs) {
+      Threads.sleep(1);
+    }
   }
 
   @After
@@ -111,7 +115,7 @@ public class TestDistributedLogSplitting
     final int NUM_REGIONS_TO_CREATE = 40;
     final int NUM_ROWS_PER_REGION = 100;
 
-    startCluster(NUM_RS);
+    startCluster(NUM_RS); // NUM_RS=6.
 
     ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf,
         "distributed log splitting test", null);

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java?rev=1183158&r1=1183157&r2=1183158&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java Thu Oct 13 23:12:30 2011
@@ -86,21 +86,20 @@ public class TestMaster {
         tableRegions.get(0).getFirst().getEndKey());
 
     // Now trigger a split and stop when the split is in progress
-
-    CountDownLatch aboutToOpen = new CountDownLatch(1);
+    CountDownLatch split = new CountDownLatch(1);
     CountDownLatch proceed = new CountDownLatch(1);
-    RegionOpenListener list = new RegionOpenListener(aboutToOpen, proceed);
+    RegionSplitListener list = new RegionSplitListener(split, proceed);
     cluster.getMaster().executorService.
-      registerListener(EventType.RS_ZK_REGION_OPENED, list);
+      registerListener(EventType.RS_ZK_REGION_SPLIT, list);
 
     LOG.info("Splitting table");
     admin.split(TABLENAME);
     LOG.info("Waiting for split result to be about to open");
-    aboutToOpen.await(60, TimeUnit.SECONDS);
+    split.await(60, TimeUnit.SECONDS);
     try {
       LOG.info("Making sure we can call getTableRegions while opening");
-      tableRegions = MetaReader.getTableRegionsAndLocations(
-          m.getCatalogTracker(), Bytes.toString(TABLENAME));
+      tableRegions = MetaReader.getTableRegionsAndLocations(m.getCatalogTracker(),
+        TABLENAME, false);
 
       LOG.info("Regions: " + Joiner.on(',').join(tableRegions));
       // We have three regions because one is split-in-progress
@@ -118,22 +117,21 @@ public class TestMaster {
     }
   }
 
-  static class RegionOpenListener implements EventHandlerListener {
-    CountDownLatch aboutToOpen, proceed;
+  static class RegionSplitListener implements EventHandlerListener {
+    CountDownLatch split, proceed;
 
-    public RegionOpenListener(CountDownLatch aboutToOpen, CountDownLatch proceed)
-    {
-      this.aboutToOpen = aboutToOpen;
+    public RegionSplitListener(CountDownLatch split, CountDownLatch proceed) {
+      this.split = split;
       this.proceed = proceed;
     }
 
     @Override
     public void afterProcess(EventHandler event) {
-      if (event.getEventType() != EventType.RS_ZK_REGION_OPENED) {
+      if (event.getEventType() != EventType.RS_ZK_REGION_SPLIT) {
         return;
       }
       try {
-        aboutToOpen.countDown();
+        split.countDown();
         proceed.await(60, TimeUnit.SECONDS);
       } catch (InterruptedException ie) {
         throw new RuntimeException(ie);

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java?rev=1183158&r1=1183157&r2=1183158&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java Thu Oct 13 23:12:30 2011
@@ -23,7 +23,6 @@ import static org.junit.Assert.assertEqu
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
@@ -34,11 +33,18 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.executor.RegionTransitionData;
+import org.apache.hadoop.hbase.Abortable;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.executor.EventHandler.EventType;
+import org.apache.hadoop.hbase.executor.RegionTransitionData;
 import org.apache.hadoop.hbase.master.AssignmentManager.RegionState;
-import org.apache.hadoop.hbase.master.RegionPlan;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -49,8 +55,6 @@ import org.apache.hadoop.hbase.util.JVMC
 import org.apache.hadoop.hbase.zookeeper.ZKAssign;
 import org.apache.hadoop.hbase.zookeeper.ZKTable;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.KeeperException.NodeExistsException;
 import org.junit.Test;
 
 public class TestMasterFailover {
@@ -131,7 +135,7 @@ public class TestMasterFailover {
     // Stop the cluster
     TEST_UTIL.shutdownMiniCluster();
   }
-  
+
   @Test
   public void testShouldCheckMasterFailOverWhenMETAIsInOpenedState()
       throws Exception {
@@ -1006,4 +1010,4 @@ public class TestMasterFailover {
   private void log(String string) {
     LOG.info("\n\n" + string + " \n\n");
   }
-}
+}
\ No newline at end of file

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java?rev=1183158&r1=1183157&r2=1183158&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java Thu Oct 13 23:12:30 2011
@@ -128,8 +128,10 @@ public class TestMergeTable {
       assertTrue("originalTableRegions=" + originalTableRegions.size() +
         ", postMergeTableRegions=" + postMergeTableRegions.size(),
         postMergeTableRegions.size() < originalTableRegions.size());
+      LOG.info("Done with merge");
     } finally {
       UTIL.shutdownMiniCluster();
+      LOG.info("After cluster shutdown");
     }
   }
 

Modified: hbase/trunk/src/test/ruby/hbase/admin_test.rb
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/ruby/hbase/admin_test.rb?rev=1183158&r1=1183157&r2=1183158&view=diff
==============================================================================
--- hbase/trunk/src/test/ruby/hbase/admin_test.rb (original)
+++ hbase/trunk/src/test/ruby/hbase/admin_test.rb Thu Oct 13 23:12:30 2011
@@ -152,7 +152,7 @@ module Hbase
       assert_equal(['a:', 'b:'], table(@create_test_name).get_all_columns.sort)
      end
 
-    define_test "create hould work with hash column args" do
+    define_test "create should work with hash column args" do
       drop_test_table(@create_test_name)
       admin.create(@create_test_name, { NAME => 'a'}, { NAME => 'b'})
       assert_equal(['a:', 'b:'], table(@create_test_name).get_all_columns.sort)
@@ -160,14 +160,14 @@ module Hbase
 
     #-------------------------------------------------------------------------------
 
-#    define_test "close should work without region server name" do
-#      if admin.exists?(@create_test_name)
-#        admin.disable(@create_test_name)
-#        admin.drop(@create_test_name)
-#      end
-#      admin.create(@create_test_name, 'foo')
-#      admin.close_region(@create_test_name + ',,0')
-#    end
+    define_test "close should work without region server name" do
+      if admin.exists?(@create_test_name)
+        admin.disable(@create_test_name)
+        admin.drop(@create_test_name)
+      end
+      admin.create(@create_test_name, 'foo')
+      admin.close_region(@create_test_name + ',,0', nil)
+    end
 
     #-------------------------------------------------------------------------------
 
@@ -187,13 +187,14 @@ module Hbase
       table(@test_name).put(1, "x:a", 1)
       table(@test_name).put(2, "x:a", 2)
       assert_equal(2, table(@test_name).count)
-      admin.truncate(@test_name)
+      # This is hacky.  Need to get the configuration into admin instance
+      admin.truncate(@test_name, $TEST_CLUSTER.getConfiguration)
       assert_equal(0, table(@test_name).count)
     end
 
     define_test "truncate should yield log records" do
       logs = []
-      admin.truncate(@test_name) do |log|
+      admin.truncate(@test_name, $TEST_CLUSTER.getConfiguration) do |log|
         assert_kind_of(String, log)
         logs << log
       end

Modified: hbase/trunk/src/test/ruby/shell/shell_test.rb
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/ruby/shell/shell_test.rb?rev=1183158&r1=1183157&r2=1183158&view=diff
==============================================================================
--- hbase/trunk/src/test/ruby/shell/shell_test.rb (original)
+++ hbase/trunk/src/test/ruby/shell/shell_test.rb Thu Oct 13 23:12:30 2011
@@ -25,7 +25,7 @@ require 'shell/formatter'
 class ShellTest < Test::Unit::TestCase
   def setup
     @formatter = ::Shell::Formatter::Console.new()
-    @hbase = ::Hbase::Hbase.new
+    @hbase = ::Hbase::Hbase.new($TEST_CLUSTER.getConfiguration)
     @shell = Shell::Shell.new(@hbase, @formatter)
   end