You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2013/05/30 23:44:08 UTC

[2/2] git commit: BIGTOP-949. Add Sqoop tests

BIGTOP-949. Add Sqoop tests


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/b10e974b
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/b10e974b
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/b10e974b

Branch: refs/heads/master
Commit: b10e974b128d9222ea24dafa9488a752f12e85f0
Parents: b709cd5
Author: Anatoli Fomenko <an...@cloudera.com>
Authored: Thu May 30 10:58:13 2013 -0700
Committer: Roman Shaposhnik <rv...@cloudera.com>
Committed: Thu May 30 14:43:35 2013 -0700

----------------------------------------------------------------------
 bigtop-packages/src/common/hadoop/init-hdfs.sh     |   13 +-
 bigtop-tests/test-artifacts/sqoop/pom.xml          |   12 +
 .../bigtop/itest/sqoop/TestSqoopExport.groovy      |  182 ++++++++++---
 .../bigtop/itest/sqoop/TestSqoopImport.groovy      |  215 +++++++++++----
 .../test-execution/smokes/sqoop-smokes/pom.xml     |   90 ------
 bigtop-tests/test-execution/smokes/sqoop/pom.xml   |  111 +++-----
 pom.xml                                            |   10 +
 7 files changed, 366 insertions(+), 267 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/b10e974b/bigtop-packages/src/common/hadoop/init-hdfs.sh
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/common/hadoop/init-hdfs.sh b/bigtop-packages/src/common/hadoop/init-hdfs.sh
index 35559af..43f3d62 100755
--- a/bigtop-packages/src/common/hadoop/init-hdfs.sh
+++ b/bigtop-packages/src/common/hadoop/init-hdfs.sh
@@ -51,7 +51,12 @@ su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown root /user/root'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /user/hue'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /user/hue'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown hue /user/hue'
+su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /user/sqoop'
+su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /user/sqoop'
+su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown sqoop /user/sqoop'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /user/oozie'
+su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /user/oozie'
+su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown -R oozie /user/oozie'
 # Do more setup for oozie
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /user/oozie/share'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /user/oozie/share/lib'
@@ -59,7 +64,6 @@ su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /user/oozie/share/lib/hive'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /user/oozie/share/lib/mapreduce-streaming'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /user/oozie/share/lib/distcp'
 su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /user/oozie/share/lib/pig'
-su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir /user/oozie/share/lib/sqoop'
 # Copy over files from local filesystem to HDFS that oozie might need
 if ls /usr/lib/hive/lib/*.jar &> /dev/null; then
   su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -put /usr/lib/hive/lib/*.jar /user/oozie/share/lib/hive'
@@ -76,10 +80,3 @@ fi
 if ls /usr/lib/pig/{lib/,}*.jar &> /dev/null; then
   su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -put /usr/lib/pig/{lib/,}*.jar /user/oozie/share/lib/pig'
 fi
-
-if ls /usr/lib/sqoop/{lib/,}*.jar &> /dev/null; then
-  su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -put /usr/lib/sqoop/{lib/,}*.jar /user/share/lib/sqoop'
-fi
-
-su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /user/oozie'
-su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown -R oozie /user/oozie'

http://git-wip-us.apache.org/repos/asf/bigtop/blob/b10e974b/bigtop-tests/test-artifacts/sqoop/pom.xml
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/sqoop/pom.xml b/bigtop-tests/test-artifacts/sqoop/pom.xml
index a1d5fa4..a429eeb 100644
--- a/bigtop-tests/test-artifacts/sqoop/pom.xml
+++ b/bigtop-tests/test-artifacts/sqoop/pom.xml
@@ -31,4 +31,16 @@
   <artifactId>sqoop-smoke</artifactId>
   <version>0.6.0-SNAPSHOT</version>
   <name>sqoopsmokes</name>
+
+  <dependencies>
+      <dependency>
+          <groupId>org.apache.sqoop</groupId>
+          <artifactId>sqoop-core</artifactId>
+      </dependency>
+      <dependency>
+          <groupId>org.apache.sqoop</groupId>
+          <artifactId>sqoop-client</artifactId>
+      </dependency>
+  </dependencies>
+
 </project>

http://git-wip-us.apache.org/repos/asf/bigtop/blob/b10e974b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopExport.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopExport.groovy b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopExport.groovy
index b9c561c..145c1a6 100644
--- a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopExport.groovy
+++ b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopExport.groovy
@@ -16,10 +16,19 @@
  * limitations under the License.
  */
 
-package org.apache.itest.sqoop;
+package org.apache.bigtop.itest.sqoop
+
+import org.apache.sqoop.client.SqoopClient
+import org.apache.sqoop.model.MConnection
+import org.apache.sqoop.model.MFormList
+import org.apache.sqoop.model.MJob
+import org.apache.sqoop.model.MPersistableEntity
+import org.apache.sqoop.model.MSubmission
+import org.apache.sqoop.validation.Status;
 
 import static org.junit.Assert.assertEquals
 import static org.junit.Assert.assertNotNull
+import static org.junit.Assert.assertNotSame
 import static org.junit.Assert.assertTrue
 import org.junit.AfterClass
 import org.junit.BeforeClass
@@ -38,34 +47,13 @@ class TestSqoopExport {
   private static final String MYSQL_PASSWORD =
     (mysql_password == null) ? "" : mysql_password;
   private static final String MYSQL_HOST = System.getProperty("mysql.host", "localhost");
-  private static final String HADOOP_HOME =
-    System.getenv('HADOOP_HOME');
-  private static String streaming_home = System.getenv('STREAMING_HOME');
-  private static final String STREAMING_HOME =
-    (streaming_home == null) ? HADOOP_HOME + "/contrib/streaming" :
-        streaming_home;
-  private static final String SQOOP_HOME =
-    System.getenv("SQOOP_HOME");
-  static {
-    assertNotNull("HADOOP_HOME is not set", HADOOP_HOME);
-    assertNotNull("SQOOP_HOME is not set", SQOOP_HOME);
-    assertNotNull("mysql connector jar is required to be present in $SQOOP_HOME/lib",
-      JarContent.getJarName("$SQOOP_HOME/lib", "mysql-connector-java.*.jar"));
-  }
-  private static String sqoop_jar =
-    JarContent.getJarName(SQOOP_HOME, "sqoop-1.*.jar");
-  private static String streaming_jar =
-    JarContent.getJarName(STREAMING_HOME, "hadoop.*streaming.*.jar");
-  static {
-    assertNotNull("Can't find sqoop.jar", sqoop_jar);
-    assertNotNull("Can't find hadoop-streaming.jar", streaming_jar);
-  }
-  private static final String SQOOP_JAR = SQOOP_HOME + "/" + sqoop_jar;
-  private static final String STREAMING_JAR = STREAMING_HOME + "/" + streaming_jar;
+
   private static final String MYSQL_COMMAND =
-    "mysql --user=$MYSQL_USER" +
+    "mysql -h $MYSQL_HOST --user=$MYSQL_USER" +
     (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
   private static final String MYSQL_DBNAME = System.getProperty("mysql.dbname", "mysqltestdb");
+  private static final String SQOOP_CONNECTION_STRING =
+    "jdbc:mysql://$MYSQL_HOST/$MYSQL_DBNAME";
   private static final String SQOOP_CONNECTION =
     "--connect jdbc:mysql://$MYSQL_HOST/$MYSQL_DBNAME --username=$MYSQL_USER" +
     (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
@@ -73,7 +61,8 @@ class TestSqoopExport {
     System.out.println("SQOOP_CONNECTION string is " + SQOOP_CONNECTION );
   }
   private static final String DATA_DIR = System.getProperty("data.dir", "mysql-files");
-  private static final String INPUT = System.getProperty("input.dir", "input-dir");
+  private static final String INPUT = System.getProperty("input.dir", "/tmp/input-dir");
+  private static final String SQOOP_SERVER_URL = System.getProperty("sqoop.server.url", "http://localhost:12000/sqoop/");
   private static Shell sh = new Shell("/bin/bash -s");
 
   @BeforeClass
@@ -86,10 +75,25 @@ class TestSqoopExport {
     }
     sh.exec("hadoop fs -mkdir $INPUT");
     assertTrue("Could not create $INPUT directory", sh.getRet() == 0);
+
+    sh.exec("hadoop fs -mkdir $INPUT/testtable");
+    assertTrue("Could not create $INPUT/testtable directory", sh.getRet() == 0);
+    sh.exec("hadoop fs -mkdir $INPUT/t_bool");
+    assertTrue("Could not create $INPUT/t_bool directory", sh.getRet() == 0);
+    sh.exec("hadoop fs -mkdir $INPUT/t_date");
+    assertTrue("Could not create $INPUT/t_date directory", sh.getRet() == 0);
+    sh.exec("hadoop fs -mkdir $INPUT/t_string");
+    assertTrue("Could not create $INPUT/t_string directory", sh.getRet() == 0);
+    sh.exec("hadoop fs -mkdir $INPUT/t_fp");
+    assertTrue("Could not create $INPUT/t_fp directory", sh.getRet() == 0);
+    sh.exec("hadoop fs -mkdir $INPUT/t_int");
+    assertTrue("Could not create $INPUT/t_int directory", sh.getRet() == 0);
+
     // unpack resource
     JarContent.unpackJarContainer(TestSqoopExport.class, '.' , null)
+
     // upload data to HDFS 
-    sh.exec("hadoop fs -put $DATA_DIR/sqoop-testtable.out input-dir/testtable/part-m-00000");
+    sh.exec("hadoop fs -put $DATA_DIR/sqoop-testtable.out $INPUT/testtable/part-m-00000");
     sh.exec("hadoop fs -put $DATA_DIR/sqoop-t_bool.out $INPUT/t_bool/part-m-00000");
     sh.exec("hadoop fs -put $DATA_DIR/sqoop-t_date-export.out $INPUT/t_date/part-m-00000");
     sh.exec("hadoop fs -put $DATA_DIR/sqoop-t_string.out $INPUT/t_string/part-m-00000");
@@ -107,27 +111,116 @@ class TestSqoopExport {
     if ('YES'.equals(System.getProperty('delete.testdata','no').toUpperCase())) {
       sh.exec("hadoop fs -test -e $INPUT");
       if (sh.getRet() == 0) {
-       // sh.exec("hadoop fs -rmr -skipTrash $INPUT");
+        sh.exec("hadoop fs -rmr -skipTrash $INPUT");
         assertTrue("Deletion of $INPUT from HDFS failed",
             sh.getRet() == 0);
       }
     }
   }
 
+  protected SqoopClient getClient() {
+    String sqoopServerUrl = "$SQOOP_SERVER_URL".toString();
+    return new SqoopClient(sqoopServerUrl);
+  }
+
+  /**
+   * Fill connection form based on currently active provider.
+   *
+   * @param connection MConnection object to fill
+   */
+  protected void fillConnectionForm(MConnection connection) {
+    MFormList forms = connection.getConnectorPart();
+    forms.getStringInput("connection.jdbcDriver").setValue("com.mysql.jdbc.Driver");
+    forms.getStringInput("connection.connectionString").setValue("$SQOOP_CONNECTION_STRING".toString());
+    forms.getStringInput("connection.username").setValue("$MYSQL_USER".toString());
+    forms.getStringInput("connection.password").setValue("$MYSQL_PASSWORD".toString());
+  }
+
+  /**
+   * Fill output form with specific storage and output type. Mapreduce output directory
+   * will be set to default test value.
+   *
+   * @param job MJOb object to fill
+   * @param storage Storage type that should be set
+   * @param output Output type that should be set
+   */
+  protected void fillInputForm(MJob job, String inputDir) {
+    MFormList forms = job.getFrameworkPart();
+    forms.getStringInput("input.inputDirectory").setValue(inputDir);
+  }
+
+  /**
+   * Create connection.
+   *
+   * With asserts to make sure that it was created correctly.
+   *
+   * @param connection
+   */
+  protected void createConnection(MConnection connection) {
+    assertEquals(Status.FINE, getClient().createConnection(connection));
+    assertNotSame(MPersistableEntity.PERSISTANCE_ID_DEFAULT, connection.getPersistenceId());
+  }
+
+  /**
+   * Create job.
+   *
+   * With asserts to make sure that it was created correctly.
+   *
+   * @param job
+   */
+  protected void createJob(MJob job) {
+    assertEquals(Status.FINE, getClient().createJob(job));
+    assertNotSame(MPersistableEntity.PERSISTANCE_ID_DEFAULT, job.getPersistenceId());
+  }
+
+  protected void runSqoopClientExport(String tableName) {
+    // Connection creation
+    MConnection connection = getClient().newConnection(1L);
+    fillConnectionForm(connection);
+    createConnection(connection);
+
+    // Job creation
+    MJob job = getClient().newJob(connection.getPersistenceId(), MJob.Type.EXPORT);
+
+    // Connector values
+    MFormList forms = job.getConnectorPart();
+    forms.getStringInput("table.schemaName").setValue("mysqltestdb");
+    forms.getStringInput("table.tableName").setValue(tableName);
+    // Framework values
+    fillInputForm(job, "$INPUT".toString() + "/" + tableName);
+    createJob(job);
+
+    MSubmission submission = getClient().startSubmission(job.getPersistenceId());
+    assertTrue(submission.getStatus().isRunning());
+
+    // Wait until the job finish - this active waiting will be removed once
+    // Sqoop client API will get blocking support.
+    while (true) {
+      Thread.sleep(5000);
+      submission = getClient().getSubmissionStatus(job.getPersistenceId());
+      if (!submission.getStatus().isRunning())
+        break;
+    }
+  }
+
+
   @Test
   public void testBooleanExport() {
-    sh.exec("sqoop export $SQOOP_CONNECTION --table t_bool --export-dir $INPUT/t_bool");
-    assertTrue("Sqoop job failed!", sh.getRet() == 0);
+    String tableName = "t_bool";
+
+    runSqoopClientExport(tableName);
+
     sh.exec("echo 'use mysqltestdb;select * from t_bool' | $MYSQL_COMMAND --skip-column-names | sed 's/\t/,/g' > t_bool.out");
     assertEquals("sqoop export did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-t_bool-export.out t_bool.out").getRet());
   }
 
-  
   @Test
   public void testIntegerExport() {
-    sh.exec("sqoop export $SQOOP_CONNECTION --table t_int --export-dir $INPUT/t_int");
-    assertTrue("Sqoop job failed!", sh.getRet() == 0);
+    String tableName = "t_int";
+
+    runSqoopClientExport(tableName);
+
     sh.exec("echo 'use mysqltestdb;select * from t_int' | $MYSQL_COMMAND --skip-column-names | sed 's/\t/,/g' > t_int.out");
     assertEquals("sqoop export did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-t_int.out t_int.out").getRet());
@@ -135,8 +228,10 @@ class TestSqoopExport {
 
   @Test
   public void testFixedPointFloatingPointExport() {
-    sh.exec("sqoop export $SQOOP_CONNECTION --table t_fp --export-dir $INPUT/t_fp");
-    assertTrue("Sqoop job failed!", sh.getRet() == 0);
+    String tableName = "t_fp";
+
+    runSqoopClientExport(tableName);
+
     sh.exec("echo 'use mysqltestdb;select * from t_fp' | $MYSQL_COMMAND --skip-column-names | sed 's/\t/,/g' > t_fp.out");
     assertEquals("sqoop export did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-t_fp.out t_fp.out").getRet());
@@ -144,8 +239,10 @@ class TestSqoopExport {
 
   @Test
   public void testDateTimeExport() {
-    sh.exec("sqoop export $SQOOP_CONNECTION --table t_date --export-dir $INPUT/t_date");
-    assertTrue("Sqoop job failed!", sh.getRet() == 0);
+    String tableName = "t_date";
+
+    runSqoopClientExport(tableName);
+
     sh.exec("echo 'use mysqltestdb;select * from t_date' | $MYSQL_COMMAND --skip-column-names | sed 's/\t/,/g' > t_date.out");
     assertEquals("sqoop export did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-t_date.out t_date.out").getRet());
@@ -153,12 +250,13 @@ class TestSqoopExport {
 
   @Test
   public void testStringExport() {
-    sh.exec("sqoop export $SQOOP_CONNECTION --table t_string --export-dir $INPUT/t_string");
-    assertTrue("Sqoop job failed!", sh.getRet() == 0);
+    String tableName = "t_string";
+
+    runSqoopClientExport(tableName);
+
     sh.exec("echo 'use mysqltestdb;select * from t_string' | $MYSQL_COMMAND --skip-column-names | sed 's/\t/,/g' > t_string.out");
     assertEquals("sqoop export did not write expected data",
-        0, sh.exec("diff -u $DATA_DIR/sqoop-t_string.out t_string.out").getRet());
+            0, sh.exec("diff -u $DATA_DIR/sqoop-t_string.out t_string.out").getRet());
   }
 
 }
-

http://git-wip-us.apache.org/repos/asf/bigtop/blob/b10e974b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopImport.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopImport.groovy b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopImport.groovy
index 5845ee2..0aaaed6 100644
--- a/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopImport.groovy
+++ b/bigtop-tests/test-artifacts/sqoop/src/main/groovy/org/apache/bigtop/itest/sqoop/TestSqoopImport.groovy
@@ -16,10 +16,16 @@
  * limitations under the License.
  */
 
-package org.apache.itest.sqoop;
+package org.apache.bigtop.itest.sqoop
+
+import org.apache.sqoop.client.SqoopClient
+import org.apache.sqoop.model.MPersistableEntity
+import org.apache.sqoop.validation.Status
+import org.junit.Ignore
 
 import static org.junit.Assert.assertEquals
 import static org.junit.Assert.assertNotNull
+import static org.junit.Assert.assertNotSame
 import static org.junit.Assert.assertTrue
 import org.junit.AfterClass
 import org.junit.BeforeClass
@@ -28,6 +34,13 @@ import org.junit.Test
 import org.apache.bigtop.itest.JarContent
 import org.apache.bigtop.itest.shell.Shell
 
+import org.apache.sqoop.framework.configuration.OutputFormat
+import org.apache.sqoop.framework.configuration.StorageType
+import org.apache.sqoop.model.MConnection
+import org.apache.sqoop.model.MFormList
+import org.apache.sqoop.model.MJob
+import org.apache.sqoop.model.MSubmission;
+
 class TestSqoopImport {
   private static String mysql_user =
     System.getenv("MYSQL_USER");
@@ -38,34 +51,13 @@ class TestSqoopImport {
   private static final String MYSQL_PASSWORD =
     (mysql_password == null) ? "" : mysql_password;
   private static final String MYSQL_HOST = System.getProperty("mysql.host", "localhost");
-  private static final String HADOOP_HOME =
-    System.getenv('HADOOP_HOME');
-  private static String streaming_home = System.getenv('STREAMING_HOME');
-  private static final String STREAMING_HOME =
-    (streaming_home == null) ? HADOOP_HOME + "/contrib/streaming" :
-        streaming_home;
-  private static final String SQOOP_HOME =
-    System.getenv("SQOOP_HOME");
-  static {
-    assertNotNull("HADOOP_HOME is not set", HADOOP_HOME);
-    assertNotNull("SQOOP_HOME is not set", SQOOP_HOME);
-    assertNotNull("mysql connector jar is required to be present in $SQOOP_HOME/lib",
-      JarContent.getJarName("$SQOOP_HOME/lib", "mysql-connector-java.*.jar"));
-  }
-  private static String sqoop_jar =
-    JarContent.getJarName(SQOOP_HOME, "sqoop-1.*.jar");
-  private static String streaming_jar =
-    JarContent.getJarName(STREAMING_HOME, "hadoop.*streaming.*.jar");
-  static {
-    assertNotNull("Can't find sqoop.jar", sqoop_jar);
-    assertNotNull("Can't find hadoop-streaming.jar", streaming_jar);
-  }
-  private static final String SQOOP_JAR = SQOOP_HOME + "/" + sqoop_jar;
-  private static final String STREAMING_JAR = STREAMING_HOME + "/" + streaming_jar;
+
   private static final String MYSQL_COMMAND =
-    "mysql --user=$MYSQL_USER" +
+    "mysql -h $MYSQL_HOST --user=$MYSQL_USER" +
     (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
   private static final String MYSQL_DBNAME = System.getProperty("mysql.dbname", "mysqltestdb");
+  private static final String SQOOP_CONNECTION_STRING =
+    "jdbc:mysql://$MYSQL_HOST/$MYSQL_DBNAME";
   private static final String SQOOP_CONNECTION =
     "--connect jdbc:mysql://$MYSQL_HOST/$MYSQL_DBNAME --username=$MYSQL_USER" +
     (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
@@ -73,7 +65,8 @@ class TestSqoopImport {
     System.out.println("SQOOP_CONNECTION string is " + SQOOP_CONNECTION );
   }
   private static final String DATA_DIR = System.getProperty("data.dir", "mysql-files");
-  private static final String OUTPUT = System.getProperty("output.dir", "output-dir");
+  private static final String OUTPUT = System.getProperty("output.dir", "/tmp/output-dir");
+  private static final String SQOOP_SERVER_URL = System.getProperty("sqoop.server.url", "http://localhost:12000/sqoop/");
   private static Shell sh = new Shell("/bin/bash -s");
 
   @BeforeClass
@@ -84,10 +77,9 @@ class TestSqoopImport {
       assertTrue("Deletion of previous $OUTPUT from HDFS failed",
           sh.getRet() == 0);
     }
-    sh.exec("hadoop fs -mkdir $OUTPUT");
-    assertTrue("Could not create $OUTPUT directory", sh.getRet() == 0);
     // unpack resource
     JarContent.unpackJarContainer(TestSqoopImport.class, '.' , null)
+
     // create the database
     sh.exec("cat $DATA_DIR/mysql-create-db.sql | $MYSQL_COMMAND");
     //create tables
@@ -108,20 +100,114 @@ class TestSqoopImport {
     }
   }
 
+  protected SqoopClient getClient() {
+    String sqoopServerUrl = "$SQOOP_SERVER_URL".toString();
+    return new SqoopClient(sqoopServerUrl);
+  }
+
+  /**
+   * Fill connection form based on currently active provider.
+   *
+   * @param connection MConnection object to fill
+   */
+  protected void fillConnectionForm(MConnection connection) {
+    MFormList forms = connection.getConnectorPart();
+    forms.getStringInput("connection.jdbcDriver").setValue("com.mysql.jdbc.Driver");
+    forms.getStringInput("connection.connectionString").setValue("$SQOOP_CONNECTION_STRING".toString());
+    forms.getStringInput("connection.username").setValue("$MYSQL_USER".toString());
+    forms.getStringInput("connection.password").setValue("$MYSQL_PASSWORD".toString());
+  }
+
+  /**
+   * Fill output form with specific storage and output type. Mapreduce output directory
+   * will be set to default test value.
+   *
+   * @param job MJOb object to fill
+   * @param storage Storage type that should be set
+   * @param output Output type that should be set
+   */
+  protected void fillOutputForm(MJob job, StorageType storage, OutputFormat output, String outputDir) {
+    MFormList forms = job.getFrameworkPart();
+    forms.getEnumInput("output.storageType").setValue(storage);
+    forms.getEnumInput("output.outputFormat").setValue(output);
+    forms.getStringInput("output.outputDirectory").setValue(outputDir);
+  }
+
+  /**
+   * Create connection.
+   *
+   * With asserts to make sure that it was created correctly.
+   *
+   * @param connection
+   */
+  protected void createConnection(MConnection connection) {
+    assertEquals(Status.FINE, getClient().createConnection(connection));
+    assertNotSame(MPersistableEntity.PERSISTANCE_ID_DEFAULT, connection.getPersistenceId());
+  }
+
+  /**
+   * Create job.
+   *
+   * With asserts to make sure that it was created correctly.
+   *
+   * @param job
+   */
+  protected void createJob(MJob job) {
+    assertEquals(Status.FINE, getClient().createJob(job));
+    assertNotSame(MPersistableEntity.PERSISTANCE_ID_DEFAULT, job.getPersistenceId());
+  }
+
+  protected void runSqoopClient(String tableName, String partitionColumn) {
+    // Connection creation
+    MConnection connection = getClient().newConnection(1L);
+    fillConnectionForm(connection);
+    createConnection(connection);
+
+    // Job creation
+    MJob job = getClient().newJob(connection.getPersistenceId(), MJob.Type.IMPORT);
+
+    // Connector values
+    MFormList forms = job.getConnectorPart();
+    forms.getStringInput("table.tableName").setValue(tableName);
+    forms.getStringInput("table.partitionColumn").setValue(partitionColumn);
+    // Framework values
+    fillOutputForm(job, StorageType.HDFS, OutputFormat.TEXT_FILE, "$OUTPUT".toString() + "/" + tableName);
+    createJob(job);
+
+    MSubmission submission = getClient().startSubmission(job.getPersistenceId());
+    assertTrue(submission.getStatus().isRunning());
+
+    // Wait until the job finish - this active waiting will be removed once
+    // Sqoop client API will get blocking support.
+    while (true) {
+      Thread.sleep(5000);
+      submission = getClient().getSubmissionStatus(job.getPersistenceId());
+      if (!submission.getStatus().isRunning())
+        break;
+    }
+  }
+
+
   @Test
   public void testBooleanImport() {
-    sh.exec("sqoop import $SQOOP_CONNECTION --table t_bool --target-dir $OUTPUT/t_bool");
-    assertTrue("Sqoop job failed!", sh.getRet() == 0);
+    String tableName = "t_bool";
+    String partitionColumn = "pri";
+
+    runSqoopClient(tableName, partitionColumn);
+
     sh.exec("hadoop fs -cat $OUTPUT/t_bool/part-* > t_bool.out");
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-t_bool.out t_bool.out").getRet());
   }
 
-  
+
   @Test
   public void testIntegerImport() {
-    sh.exec("sqoop import $SQOOP_CONNECTION --table t_int --target-dir $OUTPUT/t_int");
-    assertTrue("Sqoop job failed!", sh.getRet() == 0);
+    String tableName = "t_int";
+    String partitionColumn = "pri";
+
+    runSqoopClient(tableName, partitionColumn);
+
     sh.exec("hadoop fs -cat $OUTPUT/t_int/part-* > t_int.out");
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-t_int.out t_int.out").getRet());
@@ -129,8 +215,11 @@ class TestSqoopImport {
 
   @Test
   public void testFixedPointFloatingPointImport() {
-    sh.exec("sqoop import $SQOOP_CONNECTION --table t_fp --target-dir $OUTPUT/t_fp");
-    assertTrue("Sqoop job failed!", sh.getRet() == 0);
+    String tableName = "t_fp";
+    String partitionColumn = "pri";
+
+    runSqoopClient(tableName, partitionColumn);
+
     sh.exec("hadoop fs -cat $OUTPUT/t_fp/part-* > t_fp.out");
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-t_fp.out t_fp.out").getRet());
@@ -138,8 +227,11 @@ class TestSqoopImport {
 
   @Test
   public void testDateTimeImport() {
-    sh.exec("sqoop import $SQOOP_CONNECTION --table t_date --target-dir $OUTPUT/t_date");
-    assertTrue("Sqoop job failed!", sh.getRet() == 0);
+    String tableName = "t_date";
+    String partitionColumn = "pri";
+
+    runSqoopClient(tableName, partitionColumn);
+
     sh.exec("hadoop fs -cat $OUTPUT/t_date/part-* > t_date.out");
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-t_date.out t_date.out").getRet());
@@ -147,13 +239,17 @@ class TestSqoopImport {
 
   @Test
   public void testStringImport() {
-    sh.exec("sqoop import $SQOOP_CONNECTION --table t_string --target-dir $OUTPUT/t_string");
-    assertTrue("Sqoop job failed!", sh.getRet() == 0);
+    String tableName = "t_string";
+    String partitionColumn = "pri";
+
+    runSqoopClient(tableName, partitionColumn);
+
     sh.exec("hadoop fs -cat $OUTPUT/t_string/part-* > t_string.out");
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-t_string.out t_string.out").getRet());
-  }
-  
+    }
+
+  @Ignore("Backward Compatibility")
   @Test
   public void testAppendImport() {
     sh.exec("sqoop import $SQOOP_CONNECTION --table testtable --target-dir $OUTPUT/append");
@@ -165,7 +261,8 @@ class TestSqoopImport {
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-append.out append.out").getRet());
   }
-  
+
+  @Ignore("Backward Compatibility")
   @Test
   public void testColumnsImport() {
     sh.exec("sqoop import $SQOOP_CONNECTION --table testtable --columns id,fname --target-dir $OUTPUT/columns");
@@ -175,60 +272,67 @@ class TestSqoopImport {
         0, sh.exec("diff -u $DATA_DIR/sqoop-columns.out columns.out").getRet());
   }
 
+  @Ignore("Backward Compatibility")
   @Test
   public void testDirectImport() {
     sh.exec("sqoop import $SQOOP_CONNECTION --table testtable --direct --target-dir $OUTPUT/direct");
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
-    sh.exec("hadoop fs -cat $OUTPUT/direct/part-* > direct.out");    
+    sh.exec("hadoop fs -cat $OUTPUT/direct/part-* > direct.out");
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out direct.out").getRet());
   }
-  
+
+  @Ignore("Backward Compatibility")
   @Test
   public void testNumMappersImport() {
     sh.exec("sqoop import $SQOOP_CONNECTION --table testtable --num-mappers 1 --target-dir $OUTPUT/num-mappers");
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
-    sh.exec("hadoop fs -cat $OUTPUT/num-mappers/part-*0 > num-mappers.out");    
+    sh.exec("hadoop fs -cat $OUTPUT/num-mappers/part-*0 > num-mappers.out");
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out num-mappers.out").getRet());
   }
 
+  @Ignore("Backward Compatibility")
   @Test
   public void testQueryImport() {
     sh.exec("sqoop import $SQOOP_CONNECTION --query 'select t1.id as id, t2.fname as fname from testtable as t1 join testtable2 as t2 on (t1.id = t2.id        ) where t1.id < 3 AND \$CONDITIONS' --split-by t1.id --target-dir $OUTPUT/query");
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
-    sh.exec("hadoop fs -cat $OUTPUT/query/part-* > query.out");    
+    sh.exec("hadoop fs -cat $OUTPUT/query/part-* > query.out");
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-query.out query.out").getRet());
   }
 
+  @Ignore("Backward Compatibility")
   @Test
   public void testSplityByImport() {
     sh.exec("sqoop import $SQOOP_CONNECTION --table testtable --split-by fname --target-dir $OUTPUT/split-by");
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
-    sh.exec("hadoop fs -cat $OUTPUT/split-by/part-* > split-by.out");    
+    sh.exec("hadoop fs -cat $OUTPUT/split-by/part-* > split-by.out");
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out split-by.out").getRet());
   }
 
+  @Ignore("Backward Compatibility")
   @Test
   public void testWarehouseDirImport() {
     sh.exec("sqoop import $SQOOP_CONNECTION --table testtable --warehouse-dir $OUTPUT/warehouse-dir");
     assertTrue("Sqoop job failed!", sh.getRet() == 0);
-    sh.exec("hadoop fs -cat $OUTPUT/warehouse-dir/testtable/part-* > warehouse-dir.out");    
+    sh.exec("hadoop fs -cat $OUTPUT/warehouse-dir/testtable/part-* > warehouse-dir.out");
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-testtable.out warehouse-dir.out").getRet());
   }
 
+  @Ignore("Backward Compatibility")
   @Test
   public void testWhereClauseImport() {
     sh.exec("sqoop import $SQOOP_CONNECTION --table testtable --where \"id < 5\" --target-dir $OUTPUT/where-clause");
-    assertTrue("Sqoop job failed!", sh.getRet() == 0);    
-    sh.exec("hadoop fs -cat $OUTPUT/where-clause/part-* > where-clause.out");    
+    assertTrue("Sqoop job failed!", sh.getRet() == 0);
+    sh.exec("hadoop fs -cat $OUTPUT/where-clause/part-* > where-clause.out");
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-where-clause.out where-clause.out").getRet());
   }
 
+  @Ignore("Backward Compatibility")
   @Test
   public void testNullStringImport() {
     sh.exec("sqoop import $SQOOP_CONNECTION --table testnullvalues --null-string mynullstring --target-dir $OUTPUT/null-string");
@@ -238,6 +342,7 @@ class TestSqoopImport {
         0, sh.exec("diff -u $DATA_DIR/sqoop-null-string.out null-string.out").getRet());
   }
 
+  @Ignore("Backward Compatibility")
   @Test
   public void testNullNonStringImport() {
     sh.exec("sqoop import $SQOOP_CONNECTION --table testnullvalues --null-non-string 10 --target-dir $OUTPUT/non-null-string");
@@ -246,10 +351,11 @@ class TestSqoopImport {
     assertEquals("sqoop import did not write expected data",
         0, sh.exec("diff -u $DATA_DIR/sqoop-null-non-string.out non-null-string.out").getRet());
   }
-  
-  //database name is hardcoded here 
+
+  //database name is hardcoded here
+  @Ignore("Backward Compatibility")
   @Test
-    public void testImportAllTables() {
+  public void testImportAllTables() {
     String SQOOP_CONNECTION_IMPORT_ALL =
     "--connect jdbc:mysql://$MYSQL_HOST/mysqltestdb2 --username=$MYSQL_USER" +
     (("".equals(MYSQL_PASSWORD)) ? "" : " --password=$MYSQL_PASSWORD");
@@ -261,4 +367,3 @@ class TestSqoopImport {
         0, sh.exec("diff -u $DATA_DIR/sqoop-all-tables.out all-tables.out").getRet());
   }
 }
-

http://git-wip-us.apache.org/repos/asf/bigtop/blob/b10e974b/bigtop-tests/test-execution/smokes/sqoop-smokes/pom.xml
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-execution/smokes/sqoop-smokes/pom.xml b/bigtop-tests/test-execution/smokes/sqoop-smokes/pom.xml
deleted file mode 100644
index 8d33ed7..0000000
--- a/bigtop-tests/test-execution/smokes/sqoop-smokes/pom.xml
+++ /dev/null
@@ -1,90 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-                  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
-  <parent>
-    <groupId>org.apache.bigtop.itest</groupId>
-    <artifactId>smoke-tests</artifactId>
-    <version>0.6.0-SNAPSHOT</version>
-    <relativePath>../pom.xml</relativePath>
-  </parent>
-
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.apache.bigtop.itest</groupId>
-  <artifactId>sqoop-smoke-execution</artifactId>
-  <version>0.6.0-SNAPSHOT</version>
-  <name>Sqoop smoke test execution</name>
-  
-  <properties>
-    <!--Additional environment variables are required-->
-    <SQOOP_HOME>${env.SQOOP_HOME}</SQOOP_HOME>
-    <!-- Integration tests are special -->
-    <org.apache.maven-dependency-plugin.groupId>org.apache.bigtop.itest</org.apache.maven-dependency-plugin.groupId>
-    <org.apache.maven-dependency-plugin.artifactId>sqoopsmokes</org.apache.maven-dependency-plugin.artifactId>
-    <org.apache.maven-dependency-plugin.version>0.6.0-SNAPSHOT</org.apache.maven-dependency-plugin.version>
-    <org.apache.maven-dependency-plugin.type>jar</org.apache.maven-dependency-plugin.type>
-  </properties>
-
-  <dependencies>
-    <dependency>
-      <groupId>${org.apache.maven-dependency-plugin.groupId}</groupId>
-      <artifactId>${org.apache.maven-dependency-plugin.artifactId}</artifactId>
-      <version>${org.apache.maven-dependency-plugin.version}</version>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-enforcer-plugin</artifactId>
-        <version>1.0</version>
-        <executions>
-          <execution>
-            <id>enforce-property</id>
-            <goals>
-              <goal>enforce</goal>
-            </goals>
-            <configuration>
-              <rules>
-                <requireProperty>
-                  <property>SQOOP_HOME</property>
-                  <message>SQOOP_HOME env. variable has to be set</message>
-                </requireProperty>
-              </rules>
-              <fail>true</fail>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-failsafe-plugin</artifactId>
-        <version>2.11</version>
-      </plugin>
-    </plugins>
-  </build>
-</project>

http://git-wip-us.apache.org/repos/asf/bigtop/blob/b10e974b/bigtop-tests/test-execution/smokes/sqoop/pom.xml
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-execution/smokes/sqoop/pom.xml b/bigtop-tests/test-execution/smokes/sqoop/pom.xml
index c53b31d..7eae008 100644
--- a/bigtop-tests/test-execution/smokes/sqoop/pom.xml
+++ b/bigtop-tests/test-execution/smokes/sqoop/pom.xml
@@ -17,7 +17,7 @@
 -->
 <project xmlns="http://maven.apache.org/POM/4.0.0"
          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+                  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
   <parent>
     <groupId>org.apache.bigtop.itest</groupId>
@@ -28,18 +28,19 @@
 
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.bigtop.itest</groupId>
-  <artifactId>sqoop-test-execution</artifactId>
+  <artifactId>sqoop-smoke-execution</artifactId>
   <version>0.6.0-SNAPSHOT</version>
   <name>Sqoop smoke test execution</name>
-
+  
   <properties>
-    <org.codehaus.maven-failsafe-plugin.dbhost>172.29.12.207</org.codehaus.maven-failsafe-plugin.dbhost>
-    <org.codehaus.maven-failsafe-plugin.dbname>db2</org.codehaus.maven-failsafe-plugin.dbname>
-
-    <org.apache.maven-dependency-plugin.groupId>org.apache.sqoop</org.apache.maven-dependency-plugin.groupId>
-    <org.apache.maven-dependency-plugin.artifactId>sqoop</org.apache.maven-dependency-plugin.artifactId>
-    <org.apache.maven-dependency-plugin.version>${sqoop.version}</org.apache.maven-dependency-plugin.version>
-    <org.apache.maven-dependency-plugin.output>${project.build.directory}</org.apache.maven-dependency-plugin.output>
+    <!--Additional environment variables are required-->
+    <SQOOP_URL>${env.SQOOP_URL}</SQOOP_URL>
+    <MYSQL_HOST>${env.MYSQL_HOST}</MYSQL_HOST>
+ 
+    <!-- Integration tests are special -->
+    <org.apache.maven-dependency-plugin.groupId>org.apache.bigtop.itest</org.apache.maven-dependency-plugin.groupId>
+    <org.apache.maven-dependency-plugin.artifactId>sqoop-smoke</org.apache.maven-dependency-plugin.artifactId>
+    <org.apache.maven-dependency-plugin.version>0.6.0-SNAPSHOT</org.apache.maven-dependency-plugin.version>
     <org.apache.maven-dependency-plugin.type>jar</org.apache.maven-dependency-plugin.type>
   </properties>
 
@@ -49,23 +50,6 @@
       <artifactId>${org.apache.maven-dependency-plugin.artifactId}</artifactId>
       <version>${org.apache.maven-dependency-plugin.version}</version>
     </dependency>
-    <dependency>
-      <groupId>${org.apache.maven-dependency-plugin.groupId}</groupId>
-      <artifactId>${org.apache.maven-dependency-plugin.artifactId}</artifactId>
-      <version>${org.apache.maven-dependency-plugin.version}</version>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-      <scope>test</scope>
-    </dependency>
   </dependencies>
 
   <build>
@@ -74,63 +58,46 @@
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-dependency-plugin</artifactId>
       </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-failsafe-plugin</artifactId>
-        <version>2.11</version>
-        <configuration>
-          <includes>
-            <include>**/TestColumnTypes*</include>
-            <include>**/TestAllTables*</include>
-            <include>**/TestAppendUtils*</include>
-            <include>**/TestExport*</include>
-            <include>**/TestExportUpdate*</include>
-            <include>**/TestBlobRef*</include>
-            <include>**/TestClobRef*</include>
-          </includes>
-          <forkMode>always</forkMode>
-          <argLine>
-            -Dhsql.server.host=hsql://${org.codehaus.maven-failsafe-plugin.dbhost} -Dhsql.database.name=${org.codehaus.maven-failsafe-plugin.dbname}
-          </argLine>
-          <additionalClasspathElements>
-            <additionalClasspathElement>
-              ${HADOOP_CONF_DIR}
-            </additionalClasspathElement>
-            <additionalClasspathElement>
-              <!-- the value of ${lzo.jar} property is set by a groovy
-                   script during pre-integration-test phase below-->
-              ${HADOOP_HOME}/lib/${lzo.jar}
-            </additionalClasspathElement>
-          </additionalClasspathElements>
-        </configuration>
-      </plugin>
 
       <plugin>
-        <groupId>org.codehaus.groovy.maven</groupId>
-        <artifactId>gmaven-plugin</artifactId>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-enforcer-plugin</artifactId>
         <version>1.0</version>
         <executions>
           <execution>
-            <id>find-lzo-jar</id>
-            <phase>pre-integration-test</phase>
+            <id>enforce-property</id>
             <goals>
-              <goal>execute</goal>
+              <goal>enforce</goal>
             </goals>
             <configuration>
-              <source>
-                if (!System.getProperties().keySet().contains('enforcer.skip')) {
-                  project.properties['lzo.jar'] =
-                    org.apache.bigtop.itest.JarContent.getJarName(
-                        "${HADOOP_HOME}/lib", 'hadoop.*lzo.*.jar');
-                  if ( project.properties['lzo.jar'] == null ) {
-                      throw new IOException ("hadoop-lzo.jar isn't found");
-                  }
-                }
-              </source>
+              <rules>
+                <requireProperty>
+                  <property>SQOOP_URL</property>
+                  <message>SQOOP_URL env. variable has to be set</message>
+                </requireProperty>
+                <requireProperty>
+                  <property>MYSQL_HOST</property>
+                  <message>MYSQL_HOST env. variable has to be set</message>
+                </requireProperty>
+              </rules>
+              <fail>true</fail>
             </configuration>
           </execution>
         </executions>
       </plugin>
+
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-failsafe-plugin</artifactId>
+        <version>2.11</version>
+        <configuration>
+          <systemPropertyVariables>
+            <sqoop.server.url>${SQOOP_URL}</sqoop.server.url>
+            <mysql.host>${MYSQL_HOST}</mysql.host>
+          </systemPropertyVariables>
+          <forkMode>always</forkMode>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
 </project>

http://git-wip-us.apache.org/repos/asf/bigtop/blob/b10e974b/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d7470c5..6dcd0b6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -205,6 +205,16 @@
         <artifactId>sqoop</artifactId>
         <version>${sqoop.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.sqoop</groupId>
+        <artifactId>sqoop-core</artifactId>
+        <version>${sqoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.sqoop</groupId>
+        <artifactId>sqoop-client</artifactId>
+        <version>${sqoop.version}</version>
+      </dependency>
     </dependencies>
   </dependencyManagement>