You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sqoop.apache.org by ja...@apache.org on 2012/11/29 17:30:38 UTC

git commit: SQOOP-725: Enable HBase tests in sqoop 1

Updated Branches:
  refs/heads/trunk dc4a82102 -> 33a7a8141


SQOOP-725: Enable HBase tests in sqoop 1

(Hari Shreedharan via Jarek Jarcec Cecho)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/33a7a814
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/33a7a814
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/33a7a814

Branch: refs/heads/trunk
Commit: 33a7a81412d789371a235323ac15f875e1870894
Parents: dc4a821
Author: Jarek Jarcec Cecho <ja...@apache.org>
Authored: Thu Nov 29 08:29:44 2012 -0800
Committer: Jarek Jarcec Cecho <ja...@apache.org>
Committed: Thu Nov 29 08:29:44 2012 -0800

----------------------------------------------------------------------
 build.xml                                          |    2 +-
 ivy/ivysettings.xml                                |    1 +
 .../com/cloudera/sqoop/hbase/HBaseTestCase.java    |  108 +++++++++++----
 .../com/cloudera/sqoop/hbase/TestHBaseImport.java  |   18 ---
 .../cloudera/sqoop/hbase/TestHBaseQueryImport.java |    6 -
 5 files changed, 81 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/33a7a814/build.xml
----------------------------------------------------------------------
diff --git a/build.xml b/build.xml
index 12ba785..a0b6326 100644
--- a/build.xml
+++ b/build.xml
@@ -77,7 +77,7 @@
       <equals arg1="${hadoopversion}" arg2="200" />
       <then>
         <property name="hadoop.version" value="2.0.0-alpha" />
-        <property name="hbase.version" value="0.92.0" />
+        <property name="hbase.version" value="0.94.2" />
         <property name="zookeeper.version" value="3.4.2" />
         <property name="hadoop.version.full" value="2.0.0-alpha" />
       </then>

http://git-wip-us.apache.org/repos/asf/sqoop/blob/33a7a814/ivy/ivysettings.xml
----------------------------------------------------------------------
diff --git a/ivy/ivysettings.xml b/ivy/ivysettings.xml
index 251131c..c4cc561 100644
--- a/ivy/ivysettings.xml
+++ b/ivy/ivysettings.xml
@@ -66,6 +66,7 @@ under the License.
 
     <chain name="default" dual="true" checkmodified="true"
         changingPattern=".*SNAPSHOT">
+      <resolver ref="fs"/>
       <resolver ref="apache-snapshot"/> 
       <resolver ref="cloudera-releases"/>
       <resolver ref="cloudera-staging"/> 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/33a7a814/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java b/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
index 3f5899e..65ff87b 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
+++ b/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
@@ -28,13 +28,17 @@ import org.apache.hadoop.conf.Configuration;
 
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.master.HMaster;
+import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
+
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.util.Bytes;
 
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.VersionInfo;
 
 import org.junit.After;
 import org.junit.Before;
@@ -42,6 +46,10 @@ import org.junit.Before;
 import com.cloudera.sqoop.testutil.CommonArgs;
 import com.cloudera.sqoop.testutil.HsqldbTestServer;
 import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import java.io.File;
+import java.lang.reflect.Method;
+import java.util.UUID;
+import org.apache.commons.io.FileUtils;
 
 /**
  * Utility methods that facilitate HBase import tests.
@@ -105,56 +113,94 @@ public abstract class HBaseTestCase extends ImportJobTestCase {
 
     return args.toArray(new String[0]);
   }
-
+  // Starts a mini hbase cluster in this process.
   // Starts a mini hbase cluster in this process.
   private HBaseTestingUtility hbaseTestUtil;
-
-  private void startMaster() throws Exception {
-    if (null == hbaseTestUtil) {
-      Configuration conf = new Configuration();
-      conf = HBaseConfiguration.addHbaseResources(conf);
-      hbaseTestUtil = new HBaseTestingUtility(conf);
-      hbaseTestUtil.startMiniCluster(1);
-    }
-  }
+  private String workDir = createTempDir().getAbsolutePath();
+  private MiniZooKeeperCluster zookeeperCluster;
+  private MiniHBaseCluster hbaseCluster;
 
   @Override
   @Before
   public void setUp() {
-    if (!isHadoop20()) {
-      return;
-    }
-    HBaseTestCase.recordTestBuildDataProperty();
     try {
-      startMaster();
-    } catch (Exception e) {
-      fail(e.toString());
+      HBaseTestCase.recordTestBuildDataProperty();
+      String hbaseDir = new File(workDir, "hbase").getAbsolutePath();
+      String hbaseRoot = "file://" + hbaseDir;
+      Configuration hbaseConf = HBaseConfiguration.create();
+      hbaseConf.set(HConstants.HBASE_DIR, hbaseRoot);
+      //Hbase 0.90 does not have HConstants.ZOOKEEPER_CLIENT_PORT
+      hbaseConf.setInt("hbase.zookeeper.property.clientPort", 21818);
+      hbaseConf.set(HConstants.ZOOKEEPER_QUORUM, "0.0.0.0");
+      hbaseConf.setInt("hbase.master.info.port", -1);
+      hbaseConf.setInt("hbase.zookeeper.property.maxClientCnxns", 500);
+      String zookeeperDir = new File(workDir, "zk").getAbsolutePath();
+      int zookeeperPort = 21818;
+      zookeeperCluster = new MiniZooKeeperCluster();
+      Method m;
+      Class<?> zkParam[] = {Integer.TYPE};
+      try {
+        m = MiniZooKeeperCluster.class.getDeclaredMethod("setDefaultClientPort",
+                zkParam);
+      } catch (NoSuchMethodException e) {
+        m = MiniZooKeeperCluster.class.getDeclaredMethod("setClientPort",
+                zkParam);
+      }
+      m.invoke(zookeeperCluster, new Object[]{new Integer(zookeeperPort)});
+      zookeeperCluster.startup(new File(zookeeperDir));
+      hbaseCluster = new MiniHBaseCluster(hbaseConf, 1);
+      HMaster master = hbaseCluster.getMaster();
+      Object serverName = master.getServerName();
+
+      String hostAndPort;
+      if (serverName instanceof String) {
+        System.out.println("Server name is string, using HServerAddress.");
+        m = HMaster.class.getDeclaredMethod("getMasterAddress",
+                new Class<?>[]{});
+        Class<?> clazz = Class.forName("org.apache.hadoop.hbase.HServerAddress");
+        /*
+         * Call method to get server address
+         */
+        Object serverAddr = clazz.cast(m.invoke(master, new Object[]{}));
+        //returns the address as hostname:port
+        hostAndPort = serverAddr.toString();
+      } else {
+        System.out.println("ServerName is org.apache.hadoop.hbase.ServerName,"
+                + "using getHostAndPort()");
+        Class<?> clazz = Class.forName("org.apache.hadoop.hbase.ServerName");
+        m = clazz.getDeclaredMethod("getHostAndPort", new Class<?>[]{});
+        hostAndPort = m.invoke(serverName, new Object[]{}).toString();
+      }
+      hbaseConf.set("hbase.master", hostAndPort);
+      hbaseTestUtil = new HBaseTestingUtility(hbaseConf);
+      hbaseTestUtil.setZkCluster(zookeeperCluster);
+      hbaseCluster.startMaster();
+      super.setUp();
+    } catch (Throwable e) {
+      throw new RuntimeException(e);
     }
-    super.setUp();
   }
 
-
   public void shutdown() throws Exception {
     LOG.info("In shutdown() method");
     if (null != hbaseTestUtil) {
       LOG.info("Shutting down HBase cluster");
-      hbaseTestUtil.shutdownMiniCluster();
-      this.hbaseTestUtil = null;
+      hbaseCluster.shutdown();
+      zookeeperCluster.shutdown();
+      hbaseTestUtil = null;
     }
+    FileUtils.deleteDirectory(new File(workDir));
     LOG.info("shutdown() method returning.");
   }
 
   @Override
   @After
   public void tearDown() {
-    if (!isHadoop20()) {
-      return;
-    }
     try {
       shutdown();
     } catch (Exception e) {
       LOG.warn("Error shutting down HBase minicluster: "
-          + StringUtils.stringifyException(e));
+              + StringUtils.stringifyException(e));
     }
     HBaseTestCase.restoreTestBuidlDataProperty();
     super.tearDown();
@@ -180,8 +226,12 @@ public abstract class HBaseTestCase extends ImportJobTestCase {
       table.close();
     }
   }
-
-  protected boolean isHadoop20() {
-    return VersionInfo.getVersion().startsWith("0.20");
+  public static File createTempDir() {
+    File baseDir = new File(System.getProperty("java.io.tmpdir"));
+    File tempDir = new File(baseDir, UUID.randomUUID().toString());
+    if (tempDir.mkdir()) {
+      return tempDir;
+    }
+    throw new IllegalStateException("Failed to create directory");
   }
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/33a7a814/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java b/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java
index 2108e28..d5e15e8 100644
--- a/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java
+++ b/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java
@@ -29,9 +29,6 @@ public class TestHBaseImport extends HBaseTestCase {
 
   @Test
   public void testBasicUsage() throws IOException {
-    if (!isHadoop20()) {
-      return;
-    }
     // Create the HBase table in Sqoop as we run the job.
     String [] argv = getArgv(true, "BasicUsage", "BasicColFam", true, null);
     String [] types = { "INT", "INT" };
@@ -43,9 +40,6 @@ public class TestHBaseImport extends HBaseTestCase {
 
   @Test
   public void testMissingTableFails() throws IOException {
-    if (!isHadoop20()) {
-      return;
-    }
     // Test that if the table doesn't exist, we fail unless we
     // explicitly create the table.
     String [] argv = getArgv(true, "MissingTable", "MissingFam", false, null);
@@ -62,9 +56,6 @@ public class TestHBaseImport extends HBaseTestCase {
 
   @Test
   public void testOverwriteSucceeds() throws IOException {
-    if (!isHadoop20()) {
-      return;
-    }
     // Test that we can create a table and then import immediately
     // back on top of it without problem.
     String [] argv = getArgv(true, "OverwriteT", "OverwriteF", true, null);
@@ -80,9 +71,6 @@ public class TestHBaseImport extends HBaseTestCase {
 
   @Test
   public void testStrings() throws IOException {
-    if (!isHadoop20()) {
-      return;
-    }
     String [] argv = getArgv(true, "stringT", "stringF", true, null);
     String [] types = { "INT", "VARCHAR(32)" };
     String [] vals = { "0", "'abc'" };
@@ -93,9 +81,6 @@ public class TestHBaseImport extends HBaseTestCase {
 
   @Test
   public void testNulls() throws IOException {
-    if (!isHadoop20()) {
-      return;
-    }
     String [] argv = getArgv(true, "nullT", "nullF", true, null);
     String [] types = { "INT", "INT", "INT" };
     String [] vals = { "0", "42", "null" };
@@ -111,9 +96,6 @@ public class TestHBaseImport extends HBaseTestCase {
 
   @Test
   public void testExitFailure() throws IOException {
-    if (!isHadoop20()) {
-      return;
-    }
     String [] types = { "INT", "INT", "INT" };
     String [] vals = { "0", "42", "43" };
     createTableWithColTypes(types, vals);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/33a7a814/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java b/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java
index 705dcae..74eff50 100644
--- a/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java
+++ b/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java
@@ -29,9 +29,6 @@ public class TestHBaseQueryImport extends HBaseTestCase {
 
   @Test
   public void testImportFromQuery() throws IOException {
-    if (!isHadoop20()) {
-      return;
-    }
     String [] types = { "INT", "INT", "INT" };
     String [] vals = { "0", "42", "43" };
     createTableWithColTypes(types, vals);
@@ -50,9 +47,6 @@ public class TestHBaseQueryImport extends HBaseTestCase {
 
   @Test
   public void testExitFailure() throws IOException {
-    if (!isHadoop20()) {
-      return;
-    }
     String [] types = { "INT", "INT", "INT" };
     String [] vals = { "0", "42", "43" };
     createTableWithColTypes(types, vals);