You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sqoop.apache.org by an...@apache.org on 2017/05/15 08:10:10 UTC

[2/4] sqoop git commit: SQOOP-3174: Add SQLServer manual tests to 3rd party test suite

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerTest.java
new file mode 100644
index 0000000..67d8f1b
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerManagerTest.java
@@ -0,0 +1,368 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.Map;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.StringUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import com.cloudera.sqoop.ConnFactory;
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.manager.ConnManager;
+import com.cloudera.sqoop.metastore.JobData;
+import com.cloudera.sqoop.testutil.HsqldbTestServer;
+import com.cloudera.sqoop.tool.ImportTool;
+import com.cloudera.sqoop.tool.SqoopTool;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
+
+/**
+ * Test methods of the generic SqlManager implementation.
+ *
+ * This uses JDBC to import data from an SQLServer database to HDFS.
+ *
+ * Since this requires an SQLServer installation,
+ * this class is named in such a way that Sqoop's default QA process does
+ * not run it. You need to run this manually with
+ * -Dtestcase=SQLServerManagerTest or -Dthirdparty=true.
+ *
+ * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
+ * where Sqoop will be able to access it (since this library cannot be checked
+ * into Apache's tree for licensing reasons) and set it's path through -Dsqoop.thirdparty.lib.dir.
+ *
+ * To set up your test environment:
+ *   Install SQL Server Express 2012
+ *   Create a database SQOOPTEST
+ *   Create a login SQOOPUSER with password PASSWORD and grant all
+ *   access for SQOOPTEST to SQOOPUSER.
+ *   Set these through -Dsqoop.test.sqlserver.connectstring.host_url, -Dsqoop.test.sqlserver.database and
+ *   -Dms.sqlserver.password
+ */
+public class SQLServerManagerTest {
+
+  public static final Log LOG = LogFactory.getLog(
+    SQLServerManagerTest.class.getName());
+
+  /** the name of a table that doesn't exist. */
+  static final String MISSING_TABLE = "MISSING_TABLE";
+
+  // instance variables populated during setUp, used during tests
+  private HsqldbTestServer testServer;
+  private ConnManager manager;
+
+  @Before
+  public void setUp() {
+    MSSQLTestUtils utils = new MSSQLTestUtils();
+    try {
+      utils.createTableFromSQL(MSSQLTestUtils.CREATE_TALBE_LINEITEM);
+      utils.populateLineItem();
+    } catch (SQLException e) {
+      LOG.error("Setup fail with SQLException: " + StringUtils.stringifyException(e));
+      fail("Setup fail with SQLException: " + e.toString());
+    }
+    Configuration conf = getConf();
+    SqoopOptions opts = getSqoopOptions(conf);
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+    opts.setUsername(username);
+    opts.setPassword(password);
+    opts.setConnectString(getConnectString());
+    ConnFactory f = new ConnFactory(conf);
+    try {
+      this.manager = f.getManager(new JobData(opts, new ImportTool()));
+      System.out.println("Manger : " + this.manager);
+    } catch (IOException ioe) {
+      LOG.error("Setup fail with IOException: " + StringUtils.stringifyException(ioe));
+      fail("IOException instantiating manager: "
+          + StringUtils.stringifyException(ioe));
+    }
+  }
+
+  @After
+  public void tearDown() {
+    try {
+
+      MSSQLTestUtils utils = new MSSQLTestUtils();
+      utils.dropTableIfExists("TPCH1M_LINEITEM");
+      manager.close();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    }
+  }
+
+  @Test
+  public void testListColNames() {
+    String[] colNames = manager.getColumnNames(getTableName());
+    assertNotNull("manager returned no colname list", colNames);
+    assertEquals("Table list should be length 2", 16, colNames.length);
+    String[] knownFields = MSSQLTestUtils.getColumns();
+    for (int i = 0; i < colNames.length; i++) {
+      assertEquals(knownFields[i], colNames[i]);
+    }
+  }
+
+  @Test
+  public void testListColTypes() {
+    Map<String, Integer> types = manager.getColumnTypes(getTableName());
+
+    assertNotNull("manager returned no types map", types);
+    assertEquals("Map should be size=16", 16, types.size());
+    assertEquals(types.get("L_ORDERKEY").intValue(), Types.INTEGER);
+    assertEquals(types.get("L_COMMENT").intValue(), Types.VARCHAR);
+  }
+
+  @Test
+  public void testMissingTableColNames() {
+    // SQL Server returns an empty column list which gets translated as a
+    // zero length array
+    // how ever also check in case it returns null, which is also correct
+    String[] colNames = manager.getColumnNames(MISSING_TABLE);
+    if (colNames == null) {
+      assertNull("No column names should be returned for missing table",
+          colNames);
+    }
+    int numItems = colNames.length;
+    assertEquals(0, numItems);
+  }
+
+  @Test
+  public void testMissingTableColTypes() {
+    Map<String, Integer> colTypes = manager.getColumnTypes(MISSING_TABLE);
+    assertNull("No column types should be returned for missing table",
+        colTypes);
+  }
+
+  // constants related to testReadTable()
+  static final int EXPECTED_NUM_ROWS = 4;
+  static final int EXPECTED_COL1_SUM = 10;
+  static final int EXPECTED_COL2_SUM = 14;
+
+  @Test
+  public void testReadTable() {
+    ResultSet results = null;
+    try {
+      results = manager.readTable(getTableName(), MSSQLTestUtils
+          .getColumns());
+
+      assertNotNull("ResultSet from readTable() is null!", results);
+
+      ResultSetMetaData metaData = results.getMetaData();
+      assertNotNull("ResultSetMetadata is null in readTable()", metaData);
+
+      // ensure that we get the correct number of columns back
+      assertEquals("Number of returned columns was unexpected!", metaData
+          .getColumnCount(), 16);
+
+      // should get back 4 rows. They are:
+      // 1 2
+      // 3 4
+      // 5 6
+      // 7 8
+      // .. so while order isn't guaranteed, we should get back 16 on the
+      // left
+      // and 20 on the right.
+      int sumCol1 = 0, sumCol2 = 0, rowCount = 0;
+      while (results.next()) {
+        rowCount++;
+        sumCol1 += results.getInt(1);
+        sumCol2 += results.getInt(2);
+      }
+
+      assertEquals("Expected 4 rows back", EXPECTED_NUM_ROWS, rowCount);
+      assertEquals("Expected left sum of 10", EXPECTED_COL1_SUM, sumCol1);
+      assertEquals("Expected right sum of 14", EXPECTED_COL2_SUM, sumCol2);
+    } catch (SQLException sqlException) {
+      LOG.error(StringUtils.stringifyException(sqlException));
+      fail("SQL Exception: " + sqlException.toString());
+    } finally {
+      if (null != results) {
+        try {
+          results.close();
+        } catch (SQLException sqlE) {
+          LOG.error(StringUtils.stringifyException(sqlE));
+          fail("SQL Exception in ResultSet.close(): "
+              + sqlE.toString());
+        }
+      }
+
+      manager.release();
+    }
+  }
+
+  @Test
+  public void testReadMissingTable() {
+    ResultSet results = null;
+    try {
+      String[] colNames = { "*" };
+      results = manager.readTable(MISSING_TABLE, colNames);
+      assertNull("Expected null resultset from readTable(MISSING_TABLE)",
+          results);
+    } catch (SQLException sqlException) {
+      // we actually expect this pass.
+    } finally {
+      if (null != results) {
+        try {
+          results.close();
+        } catch (SQLException sqlE) {
+          fail("SQL Exception in ResultSet.close(): "
+              + sqlE.toString());
+        }
+      }
+
+      manager.release();
+    }
+  }
+
+  @Test
+  public void testgetPrimaryKeyFromMissingTable() {
+    String primaryKey = manager.getPrimaryKey(MISSING_TABLE);
+    assertNull("Expected null pkey for missing table", primaryKey);
+  }
+
+  @Test
+  public void testgetPrimaryKeyFromTableWithoutKey() {
+    String primaryKey = manager.getPrimaryKey(getTableName());
+    assertNull("Expected null pkey for table without key", primaryKey);
+  }
+
+  // constants for getPrimaryKeyFromTable()
+  static final String TABLE_WITH_KEY = "TABLE_WITH_KEY";
+  static final String KEY_FIELD_NAME = "KEYFIELD";
+
+  @Test
+  public void testgetPrimaryKeyFromTable() {
+    // first, create a table with a primary key
+    Connection conn = null;
+    try {
+      conn = getManager().getConnection();
+      dropTableIfExists(TABLE_WITH_KEY);
+      PreparedStatement statement = conn.prepareStatement("CREATE TABLE "
+          + TABLE_WITH_KEY + "(" + KEY_FIELD_NAME
+          + " INT NOT NULL PRIMARY KEY, foo INT)",
+          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+      statement.executeUpdate();
+      statement.close();
+
+      String primaryKey = getManager().getPrimaryKey(TABLE_WITH_KEY);
+      assertEquals("Expected null pkey for table without key",
+          primaryKey, KEY_FIELD_NAME);
+
+    } catch (SQLException sqlException) {
+      LOG.error(StringUtils.stringifyException(sqlException));
+      fail("Could not create table with primary key: "
+          + sqlException.toString());
+    } finally {
+      if (null != conn) {
+        try {
+          conn.close();
+        } catch (SQLException sqlE) {
+          LOG.warn("Got SQLException during close: "
+              + sqlE.toString());
+        }
+      }
+    }
+
+  }
+
+  protected boolean useHsqldbTestServer() {
+    return false;
+  }
+
+  protected String getConnectString() {
+    return MSSQLTestUtils.getDBConnectString();
+  }
+
+  /**
+   * Drop a table if it already exists in the database.
+   *
+   * @param table
+   *            the name of the table to drop.
+   * @throws SQLException
+   *             if something goes wrong.
+   */
+  protected void dropTableIfExists(String table) throws SQLException {
+    Connection conn = getManager().getConnection();
+    String sqlStmt = "IF OBJECT_ID('" + table
+        + "') IS NOT NULL  DROP TABLE " + table;
+    PreparedStatement statement = conn.prepareStatement(sqlStmt,
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+  }
+
+  protected SqoopOptions getSqoopOptions(Configuration conf) {
+    SqoopOptions opt = new SqoopOptions(conf);
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+    SqoopOptions opts = new SqoopOptions(conf);
+    opts.setUsername(username);
+    opts.setPassword(password);
+
+    return opt;
+  }
+
+  SqoopOptions getSqoopOptions(String[] args, SqoopTool tool) {
+    SqoopOptions opts = null;
+    try {
+      opts = tool.parseArguments(args, null, null, true);
+      String username = MSSQLTestUtils.getDBUserName();
+      String password = MSSQLTestUtils.getDBPassWord();
+      opts.setUsername(username);
+      opts.setPassword(password);
+
+    } catch (Exception e) {
+      LOG.error(StringUtils.stringifyException(e));
+      fail("Invalid options: " + e.toString());
+    }
+
+    return opts;
+  }
+
+  protected String getTableName() {
+    return "tpch1m_lineitem";
+  }
+
+  protected ConnManager getManager() {
+    return manager;
+  }
+
+  protected Configuration getConf() {
+    return new Configuration();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsManualTest.java
deleted file mode 100644
index 51d5f75..0000000
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsManualTest.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.manager.sqlserver;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import org.apache.hadoop.conf.Configuration;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.TestMultiCols;
-import org.junit.After;
-import org.junit.Test;
-
-/**
- * Test multiple columns in SQL Server.
- *
- * This uses JDBC to import data from an SQLServer database to HDFS.
- *
- * Since this requires an SQLServer installation,
- * this class is named in such a way that Sqoop's default QA process does
- * not run it. You need to run this manually with
- * -Dtestcase=SQLServerMultiColsManualTest.
- *
- * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
- * where Sqoop will be able to access it (since this library cannot be checked
- * into Apache's tree for licensing reasons).
- *
- * To set up your test environment:
- *   Install SQL Server Express 2012
- *   Create a database SQOOPTEST
- *   Create a login SQOOPUSER with password PASSWORD and grant all
- *   access for SQOOPTEST to SQOOPUSER.
- */
-public class SQLServerMultiColsManualTest extends TestMultiCols {
-
-  protected boolean useHsqldbTestServer() {
-    return false;
-  }
-
-  protected String getConnectString() {
-    return MSSQLTestUtils.getDBConnectString();
-  }
-
-  /**
-  * Drop a table if it already exists in the database.
-  *
-  * @param table
-  *            the name of the table to drop.
-  * @throws SQLException
-  *             if something goes wrong.
-  */
-  protected void dropTableIfExists(String table) throws SQLException {
-    Connection conn = getManager().getConnection();
-    String sqlStmt = "IF OBJECT_ID('" + table
-      + "') IS NOT NULL  DROP TABLE " + table;
-    PreparedStatement statement = conn.prepareStatement(sqlStmt,
-      ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-     statement.executeUpdate();
-     conn.commit();
-    } finally {
-     statement.close();
-    }
-  }
-
-  protected SqoopOptions getSqoopOptions(Configuration conf) {
-
-    String username = MSSQLTestUtils.getDBUserName();
-    String password = MSSQLTestUtils.getDBPassWord();
-    SqoopOptions opts = new SqoopOptions(conf);
-    opts.setUsername(username);
-    opts.setPassword(password);
-
-    return opts;
-
-  }
-
-  @After
-  public void tearDown() {
-    try {
-      dropTableIfExists(getTableName());
-    } catch (SQLException sqle) {
-      LOG.info("Table clean-up failed: " + sqle);
-    } finally {
-      super.tearDown();
-    }
-  }
-
-  @Test
-  public void testMixed4() {
-    // Overridden to bypass test case invalid for MSSQL server
-  }
-
-  @Test
-  public void testMixed5() {
-    // Overridden to bypass test case invalid for MSSQL server
-  }
-
-  @Test
-  public void testMixed6() {
-    // Overridden to bypass test case invalid for MSSQL server
-  }
-
-  @Test
-  public void testSkipFirstCol() {
-    // Overridden to bypass test case invalid for MSSQL server
-  }
-
-  @Test
-  public void testSkipSecondCol() {
-    // Overridden to bypass test case invalid for MSSQL server
-  }
-
-  @Test
-  public void testSkipThirdCol() {
-    // Overridden to bypass test case invalid for MSSQL server
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsTest.java
new file mode 100644
index 0000000..d48de99
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiColsTest.java
@@ -0,0 +1,138 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.manager.sqlserver;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import org.apache.hadoop.conf.Configuration;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.TestMultiCols;
+import org.junit.After;
+import org.junit.Test;
+
+/**
+ * Test multiple columns in SQL Server.
+ *
+ * This uses JDBC to import data from an SQLServer database to HDFS.
+ *
+ * Since this requires an SQLServer installation,
+ * this class is named in such a way that Sqoop's default QA process does
+ * not run it. You need to run this manually with
+ * -Dtestcase=SQLServerMultiColsTest or -Dthirdparty=true.
+ *
+ * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
+ * where Sqoop will be able to access it (since this library cannot be checked
+ * into Apache's tree for licensing reasons) and set it's path through -Dsqoop.thirdparty.lib.dir.
+ *
+ * To set up your test environment:
+ *   Install SQL Server Express 2012
+ *   Create a database SQOOPTEST
+ *   Create a login SQOOPUSER with password PASSWORD and grant all
+ *   access for SQOOPTEST to SQOOPUSER.
+ *   Set these through -Dsqoop.test.sqlserver.connectstring.host_url, -Dsqoop.test.sqlserver.database and
+ *   -Dms.sqlserver.password
+ */
+public class SQLServerMultiColsTest extends TestMultiCols {
+
+  protected boolean useHsqldbTestServer() {
+    return false;
+  }
+
+  protected String getConnectString() {
+    return MSSQLTestUtils.getDBConnectString();
+  }
+
+  /**
+  * Drop a table if it already exists in the database.
+  *
+  * @param table
+  *            the name of the table to drop.
+  * @throws SQLException
+  *             if something goes wrong.
+  */
+  protected void dropTableIfExists(String table) throws SQLException {
+    Connection conn = getManager().getConnection();
+    String sqlStmt = "IF OBJECT_ID('" + table
+      + "') IS NOT NULL  DROP TABLE " + table;
+    PreparedStatement statement = conn.prepareStatement(sqlStmt,
+      ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+     statement.executeUpdate();
+     conn.commit();
+    } finally {
+     statement.close();
+    }
+  }
+
+  protected SqoopOptions getSqoopOptions(Configuration conf) {
+
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+    SqoopOptions opts = new SqoopOptions(conf);
+    opts.setUsername(username);
+    opts.setPassword(password);
+
+    return opts;
+
+  }
+
+  @After
+  public void tearDown() {
+    try {
+      dropTableIfExists(getTableName());
+    } catch (SQLException sqle) {
+      LOG.info("Table clean-up failed: " + sqle);
+    } finally {
+      super.tearDown();
+    }
+  }
+
+  @Test
+  public void testMixed4() {
+    // Overridden to bypass test case invalid for MSSQL server
+  }
+
+  @Test
+  public void testMixed5() {
+    // Overridden to bypass test case invalid for MSSQL server
+  }
+
+  @Test
+  public void testMixed6() {
+    // Overridden to bypass test case invalid for MSSQL server
+  }
+
+  @Test
+  public void testSkipFirstCol() {
+    // Overridden to bypass test case invalid for MSSQL server
+  }
+
+  @Test
+  public void testSkipSecondCol() {
+    // Overridden to bypass test case invalid for MSSQL server
+  }
+
+  @Test
+  public void testSkipThirdCol() {
+    // Overridden to bypass test case invalid for MSSQL server
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsManualTest.java
deleted file mode 100644
index fc9e20d..0000000
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsManualTest.java
+++ /dev/null
@@ -1,320 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sqoop.manager.sqlserver;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.mapred.Utils;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.util.StringUtils;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.tool.SqoopTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-/**
- * Test multiple mapper splits in SQL Server.
- *
- * This uses JDBC to import data from an SQLServer database to HDFS.
- *
- * Since this requires an SQLServer installation,
- * this class is named in such a way that Sqoop's default QA process does
- * not run it. You need to run this manually with
- * -Dtestcase=SQLServerMultiMapsManualTest.
- *
- * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
- * where Sqoop will be able to access it (since this library cannot be checked
- * into Apache's tree for licensing reasons).
- *
- * To set up your test environment:
- *   Install SQL Server Express 2012
- *   Create a database SQOOPTEST
- *   Create a login SQOOPUSER with password PASSWORD and grant all
- *   access for SQOOPTEST to SQOOPUSER.
- */
-public class SQLServerMultiMapsManualTest extends ImportJobTestCase {
-
-  @Before
-  public void setUp() {
-    super.setUp();
-    MSSQLTestUtils utils = new MSSQLTestUtils();
-    try {
-      utils.createTableFromSQL(MSSQLTestUtils.CREATE_TALBE_LINEITEM);
-      utils.populateLineItem();
-    } catch (SQLException e) {
-      LOG.error("Setup fail with SQLException: " + StringUtils.stringifyException(e));
-      fail("Setup fail with SQLException: " + e.toString());
-    }
-
-  }
-
-  @After
-  public void tearDown() {
-    super.tearDown();
-    MSSQLTestUtils utils = new MSSQLTestUtils();
-    try {
-      utils.dropTableIfExists("TPCH1M_LINEITEM");
-    } catch (SQLException e) {
-      LOG.error("TeatDown fail with SQLException: " + StringUtils.stringifyException(e));
-      fail("TearDown fail with SQLException: " + e.toString());
-    }
-  }
-
-  /**
-   * Create the argv to pass to Sqoop.
-   *
-   * @return the argv as an array of strings.
-   */
-  protected String[] getArgv(boolean includeHadoopFlags, String[] colNames,
-      String splitByCol) {
-    String columnsString = "";
-    for (String col : colNames) {
-      columnsString += col + ",";
-    }
-
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-    String username = MSSQLTestUtils.getDBUserName();
-    String password = MSSQLTestUtils.getDBPassWord();
-
-    args.add("--table");
-    args.add(getTableName());
-    args.add("--columns");
-    args.add(columnsString);
-    args.add("--split-by");
-    args.add(splitByCol);
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-    args.add("--connect");
-    args.add(getConnectString());
-    args.add("--username");
-    args.add(username);
-    args.add("--password");
-    args.add(password);
-    args.add("--as-sequencefile");
-    args.add("--num-mappers");
-    args.add("2");
-
-    return args.toArray(new String[0]);
-  }
-
-  // this test just uses the two int table.
-
-  /** @return a list of Path objects for each data file */
-  protected List<Path> getDataFilePaths() throws IOException {
-    List<Path> paths = new ArrayList<Path>();
-    Configuration conf = new Configuration();
-    conf.set("fs.default.name", "file:///");
-    FileSystem fs = FileSystem.get(conf);
-
-    FileStatus[] stats = fs.listStatus(getTablePath(),
-        new Utils.OutputFileUtils.OutputFilesFilter());
-
-    for (FileStatus stat : stats) {
-      paths.add(stat.getPath());
-    }
-
-    return paths;
-  }
-
-  /**
-   * Given a comma-delimited list of integers, grab and parse the first int.
-   *
-   * @param str
-   *            a comma-delimited list of values, the first of which is an
-   *            int.
-   * @return the first field in the string, cast to int
-   */
-  private int getFirstInt(String str) {
-    String[] parts = str.split(",");
-    return Integer.parseInt(parts[0]);
-  }
-
-  public void runMultiMapTest(String splitByCol, int expectedSum)
-      throws IOException {
-
-    String[] columns = MSSQLTestUtils.getColumns();
-    ClassLoader prevClassLoader = null;
-    SequenceFile.Reader reader = null;
-
-    String[] argv = getArgv(true, columns, splitByCol);
-    runImport(argv);
-    try {
-      ImportTool importTool = new ImportTool();
-      SqoopOptions opts = importTool.parseArguments(getArgv(false,
-          columns, splitByCol), null, null, true);
-      String username = MSSQLTestUtils.getDBUserName();
-      String password = MSSQLTestUtils.getDBPassWord();
-      opts.setUsername(username);
-      opts.setPassword(password);
-
-      CompilationManager compileMgr = new CompilationManager(opts);
-      String jarFileName = compileMgr.getJarFilename();
-
-      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
-          getTableName());
-
-      List<Path> paths = getDataFilePaths();
-      Configuration conf = new Configuration();
-      int curSum = 0;
-
-      // We expect multiple files. We need to open all the files and sum
-      // up the
-      // first column across all of them.
-      for (Path p : paths) {
-        reader = SeqFileReader.getSeqFileReader(p.toString());
-
-        // here we can actually instantiate (k, v) pairs.
-        Object key = ReflectionUtils.newInstance(reader.getKeyClass(),
-            conf);
-        Object val = ReflectionUtils.newInstance(
-            reader.getValueClass(), conf);
-
-        // We know that these values are two ints separated by a ','
-        // character. Since this is all dynamic, though, we don't want
-        // to
-        // actually link against the class and use its methods. So we
-        // just
-        // parse this back into int fields manually. Sum them up and
-        // ensure
-        // that we get the expected total for the first column, to
-        // verify that
-        // we got all the results from the db into the file.
-
-        // now sum up everything in the file.
-        while (reader.next(key) != null) {
-          reader.getCurrentValue(val);
-          curSum += getFirstInt(val.toString());
-        }
-
-        IOUtils.closeStream(reader);
-        reader = null;
-      }
-
-      assertEquals("Total sum of first db column mismatch", expectedSum,
-          curSum);
-    } catch (InvalidOptionsException ioe) {
-      LOG.error(StringUtils.stringifyException(ioe));
-      fail(ioe.toString());
-    } catch (ParseException pe) {
-      LOG.error(StringUtils.stringifyException(pe));
-      fail(pe.toString());
-    } finally {
-      IOUtils.closeStream(reader);
-
-      if (null != prevClassLoader) {
-        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
-      }
-    }
-  }
-
-  @Test
-  public void testSplitByFirstCol() throws IOException {
-    runMultiMapTest("L_ORDERKEY", 10);
-  }
-
-  protected boolean useHsqldbTestServer() {
-    return false;
-  }
-
-  protected String getConnectString() {
-    return MSSQLTestUtils.getDBConnectString();
-  }
-
-  /**
-   * Drop a table if it already exists in the database.
-   *
-   * @param table
-   *            the name of the table to drop.
-   * @throws SQLException
-   *             if something goes wrong.
-   */
-  protected void dropTableIfExists(String table) throws SQLException {
-    Connection conn = getManager().getConnection();
-    String sqlStmt = "IF OBJECT_ID('" + table
-        + "') IS NOT NULL  DROP TABLE " + table;
-    PreparedStatement statement = conn.prepareStatement(sqlStmt,
-        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-      statement.executeUpdate();
-      conn.commit();
-    } finally {
-      statement.close();
-    }
-  }
-
-  protected SqoopOptions getSqoopOptions(Configuration conf) {
-    SqoopOptions opt = new SqoopOptions(conf);
-    String username = MSSQLTestUtils.getDBUserName();
-    String password = MSSQLTestUtils.getDBPassWord();
-    SqoopOptions opts = new SqoopOptions(conf);
-    opts.setUsername(username);
-    opts.setPassword(password);
-
-    return opt;
-  }
-
-  SqoopOptions getSqoopOptions(String[] args, SqoopTool tool) {
-    SqoopOptions opts = null;
-    try {
-      opts = tool.parseArguments(args, null, null, true);
-      String username = MSSQLTestUtils.getDBUserName();
-      String password = MSSQLTestUtils.getDBPassWord();
-      opts.setUsername(username);
-      opts.setPassword(password);
-
-    } catch (Exception e) {
-      LOG.error(StringUtils.stringifyException(e));
-      fail("Invalid options: " + e.toString());
-    }
-
-    return opts;
-  }
-
-  protected String getTableName() {
-    return "tpch1m_lineitem";
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsTest.java
new file mode 100644
index 0000000..be42da3
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerMultiMapsTest.java
@@ -0,0 +1,322 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.mapred.Utils;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
+import com.cloudera.sqoop.orm.CompilationManager;
+import com.cloudera.sqoop.testutil.CommonArgs;
+import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import com.cloudera.sqoop.testutil.SeqFileReader;
+import com.cloudera.sqoop.tool.ImportTool;
+import com.cloudera.sqoop.tool.SqoopTool;
+import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+/**
+ * Test multiple mapper splits in SQL Server.
+ *
+ * This uses JDBC to import data from an SQLServer database to HDFS.
+ *
+ * Since this requires an SQLServer installation,
+ * this class is named in such a way that Sqoop's default QA process does
+ * not run it. You need to run this manually with
+ * -Dtestcase=SQLServerMultiMapsTest or -Dthirdparty=true.
+ *
+ * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
+ * where Sqoop will be able to access it (since this library cannot be checked
+ * into Apache's tree for licensing reasons) and set it's path through -Dsqoop.thirdparty.lib.dir.
+ *
+ * To set up your test environment:
+ *   Install SQL Server Express 2012
+ *   Create a database SQOOPTEST
+ *   Create a login SQOOPUSER with password PASSWORD and grant all
+ *   access for SQOOPTEST to SQOOPUSER.
+ *   Set these through -Dsqoop.test.sqlserver.connectstring.host_url, -Dsqoop.test.sqlserver.database and
+ *   -Dms.sqlserver.password
+ */
+public class SQLServerMultiMapsTest extends ImportJobTestCase {
+
+  @Before
+  public void setUp() {
+    super.setUp();
+    MSSQLTestUtils utils = new MSSQLTestUtils();
+    try {
+      utils.createTableFromSQL(MSSQLTestUtils.CREATE_TALBE_LINEITEM);
+      utils.populateLineItem();
+    } catch (SQLException e) {
+      LOG.error("Setup fail with SQLException: " + StringUtils.stringifyException(e));
+      fail("Setup fail with SQLException: " + e.toString());
+    }
+
+  }
+
+  @After
+  public void tearDown() {
+    super.tearDown();
+    MSSQLTestUtils utils = new MSSQLTestUtils();
+    try {
+      utils.dropTableIfExists("TPCH1M_LINEITEM");
+    } catch (SQLException e) {
+      LOG.error("TeatDown fail with SQLException: " + StringUtils.stringifyException(e));
+      fail("TearDown fail with SQLException: " + e.toString());
+    }
+  }
+
+  /**
+   * Create the argv to pass to Sqoop.
+   *
+   * @return the argv as an array of strings.
+   */
+  protected String[] getArgv(boolean includeHadoopFlags, String[] colNames,
+      String splitByCol) {
+    String columnsString = "";
+    for (String col : colNames) {
+      columnsString += col + ",";
+    }
+
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+
+    args.add("--table");
+    args.add(getTableName());
+    args.add("--columns");
+    args.add(columnsString);
+    args.add("--split-by");
+    args.add(splitByCol);
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--connect");
+    args.add(getConnectString());
+    args.add("--username");
+    args.add(username);
+    args.add("--password");
+    args.add(password);
+    args.add("--as-sequencefile");
+    args.add("--num-mappers");
+    args.add("2");
+
+    return args.toArray(new String[0]);
+  }
+
+  // this test just uses the two int table.
+
+  /** @return a list of Path objects for each data file */
+  protected List<Path> getDataFilePaths() throws IOException {
+    List<Path> paths = new ArrayList<Path>();
+    Configuration conf = new Configuration();
+    conf.set("fs.default.name", "file:///");
+    FileSystem fs = FileSystem.get(conf);
+
+    FileStatus[] stats = fs.listStatus(getTablePath(),
+        new Utils.OutputFileUtils.OutputFilesFilter());
+
+    for (FileStatus stat : stats) {
+      paths.add(stat.getPath());
+    }
+
+    return paths;
+  }
+
+  /**
+   * Given a comma-delimited list of integers, grab and parse the first int.
+   *
+   * @param str
+   *            a comma-delimited list of values, the first of which is an
+   *            int.
+   * @return the first field in the string, cast to int
+   */
+  private int getFirstInt(String str) {
+    String[] parts = str.split(",");
+    return Integer.parseInt(parts[0]);
+  }
+
+  public void runMultiMapTest(String splitByCol, int expectedSum)
+      throws IOException {
+
+    String[] columns = MSSQLTestUtils.getColumns();
+    ClassLoader prevClassLoader = null;
+    SequenceFile.Reader reader = null;
+
+    String[] argv = getArgv(true, columns, splitByCol);
+    runImport(argv);
+    try {
+      ImportTool importTool = new ImportTool();
+      SqoopOptions opts = importTool.parseArguments(getArgv(false,
+          columns, splitByCol), null, null, true);
+      String username = MSSQLTestUtils.getDBUserName();
+      String password = MSSQLTestUtils.getDBPassWord();
+      opts.setUsername(username);
+      opts.setPassword(password);
+
+      CompilationManager compileMgr = new CompilationManager(opts);
+      String jarFileName = compileMgr.getJarFilename();
+
+      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+          getTableName());
+
+      List<Path> paths = getDataFilePaths();
+      Configuration conf = new Configuration();
+      int curSum = 0;
+
+      // We expect multiple files. We need to open all the files and sum
+      // up the
+      // first column across all of them.
+      for (Path p : paths) {
+        reader = SeqFileReader.getSeqFileReader(p.toString());
+
+        // here we can actually instantiate (k, v) pairs.
+        Object key = ReflectionUtils.newInstance(reader.getKeyClass(),
+            conf);
+        Object val = ReflectionUtils.newInstance(
+            reader.getValueClass(), conf);
+
+        // We know that these values are two ints separated by a ','
+        // character. Since this is all dynamic, though, we don't want
+        // to
+        // actually link against the class and use its methods. So we
+        // just
+        // parse this back into int fields manually. Sum them up and
+        // ensure
+        // that we get the expected total for the first column, to
+        // verify that
+        // we got all the results from the db into the file.
+
+        // now sum up everything in the file.
+        while (reader.next(key) != null) {
+          reader.getCurrentValue(val);
+          curSum += getFirstInt(val.toString());
+        }
+
+        IOUtils.closeStream(reader);
+        reader = null;
+      }
+
+      assertEquals("Total sum of first db column mismatch", expectedSum,
+          curSum);
+    } catch (InvalidOptionsException ioe) {
+      LOG.error(StringUtils.stringifyException(ioe));
+      fail(ioe.toString());
+    } catch (ParseException pe) {
+      LOG.error(StringUtils.stringifyException(pe));
+      fail(pe.toString());
+    } finally {
+      IOUtils.closeStream(reader);
+
+      if (null != prevClassLoader) {
+        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
+      }
+    }
+  }
+
+  @Test
+  public void testSplitByFirstCol() throws IOException {
+    runMultiMapTest("L_ORDERKEY", 10);
+  }
+
+  protected boolean useHsqldbTestServer() {
+    return false;
+  }
+
+  protected String getConnectString() {
+    return MSSQLTestUtils.getDBConnectString();
+  }
+
+  /**
+   * Drop a table if it already exists in the database.
+   *
+   * @param table
+   *            the name of the table to drop.
+   * @throws SQLException
+   *             if something goes wrong.
+   */
+  protected void dropTableIfExists(String table) throws SQLException {
+    Connection conn = getManager().getConnection();
+    String sqlStmt = "IF OBJECT_ID('" + table
+        + "') IS NOT NULL  DROP TABLE " + table;
+    PreparedStatement statement = conn.prepareStatement(sqlStmt,
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+  }
+
+  protected SqoopOptions getSqoopOptions(Configuration conf) {
+    SqoopOptions opt = new SqoopOptions(conf);
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+    SqoopOptions opts = new SqoopOptions(conf);
+    opts.setUsername(username);
+    opts.setPassword(password);
+
+    return opt;
+  }
+
+  SqoopOptions getSqoopOptions(String[] args, SqoopTool tool) {
+    SqoopOptions opts = null;
+    try {
+      opts = tool.parseArguments(args, null, null, true);
+      String username = MSSQLTestUtils.getDBUserName();
+      String password = MSSQLTestUtils.getDBPassWord();
+      opts.setUsername(username);
+      opts.setPassword(password);
+
+    } catch (Exception e) {
+      LOG.error(StringUtils.stringifyException(e));
+      fail("Invalid options: " + e.toString());
+    }
+
+    return opts;
+  }
+
+  protected String getTableName() {
+    return "tpch1m_lineitem";
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsManualTest.java
deleted file mode 100644
index b28c165..0000000
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsManualTest.java
+++ /dev/null
@@ -1,285 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sqoop.manager.sqlserver;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.FileOutputFormat;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.util.StringUtils;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.config.ConfigurationHelper;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.ReparseMapper;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.fail;
-
-/**
- * Test that the parse() methods generated in user SqoopRecord implementations
- * work in SQL Server.
- *
- * This uses JDBC to import data from an SQLServer database to HDFS.
- *
- * Since this requires an SQLServer installation,
- * this class is named in such a way that Sqoop's default QA process does
- * not run it. You need to run this manually with
- * -Dtestcase=SQLServerParseMethodsManualTest.
- *
- * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
- * where Sqoop will be able to access it (since this library cannot be checked
- * into Apache's tree for licensing reasons).
- *
- * To set up your test environment:
- *   Install SQL Server Express 2012
- *   Create a database SQOOPTEST
- *   Create a login SQOOPUSER with password PASSWORD and grant all
- *   access for SQOOPTEST to SQOOPUSER.
- */
-public class SQLServerParseMethodsManualTest extends ImportJobTestCase {
-
-  @Before
-  public void setUp() {
-    super.setUp();
-    Path p = new Path(getWarehouseDir());
-    try {
-      FileSystem fs = FileSystem.get(new Configuration());
-      fs.delete(p);
-    } catch (IOException e) {
-      LOG.error("Setup fail with IOException: " + StringUtils.stringifyException(e));
-      fail("Setup fail with IOException: " + StringUtils.stringifyException(e));
-    }
-  }
-
-  @After
-  public void tearDown() {
-    try {
-      dropTableIfExists(getTableName());
-    } catch (SQLException sqle) {
-      LOG.info("Table clean-up failed: " + sqle);
-    } finally {
-      super.tearDown();
-    }
-  }
-
-  /**
-   * Create the argv to pass to Sqoop.
-   *
-   * @return the argv as an array of strings.
-   */
-  private String[] getArgv(boolean includeHadoopFlags,
-      String fieldTerminator, String lineTerminator, String encloser,
-      String escape, boolean encloserRequired) {
-
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-
-    args.add("--table");
-    args.add(getTableName());
-    args.add("--warehouse-dir");
-    args.add(getWarehouseDir());
-    args.add("--connect");
-    args.add(getConnectString());
-    args.add("--as-textfile");
-    args.add("--split-by");
-    args.add("DATA_COL0"); // always split by first column.
-    args.add("--fields-terminated-by");
-    args.add(fieldTerminator);
-    args.add("--lines-terminated-by");
-    args.add(lineTerminator);
-    args.add("--escaped-by");
-    args.add(escape);
-    if (encloserRequired) {
-      args.add("--enclosed-by");
-    } else {
-      args.add("--optionally-enclosed-by");
-    }
-    args.add(encloser);
-    args.add("--num-mappers");
-    args.add("1");
-
-    return args.toArray(new String[0]);
-  }
-
-  public void runParseTest(String fieldTerminator, String lineTerminator,
-      String encloser, String escape, boolean encloseRequired)
-      throws IOException {
-
-    ClassLoader prevClassLoader = null;
-
-    String[] argv = getArgv(true, fieldTerminator, lineTerminator,
-        encloser, escape, encloseRequired);
-    runImport(argv);
-    try {
-      String tableClassName = getTableName();
-
-      argv = getArgv(false, fieldTerminator, lineTerminator, encloser,
-          escape, encloseRequired);
-      SqoopOptions opts = new ImportTool().parseArguments(argv, null,
-          null, true);
-
-      CompilationManager compileMgr = new CompilationManager(opts);
-      String jarFileName = compileMgr.getJarFilename();
-
-      // Make sure the user's class is loaded into our address space.
-      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
-          tableClassName);
-
-      JobConf job = new JobConf();
-      job.setJar(jarFileName);
-
-      // Tell the job what class we're testing.
-      job.set(ReparseMapper.USER_TYPE_NAME_KEY, tableClassName);
-
-      // use local mode in the same JVM.
-      ConfigurationHelper.setJobtrackerAddr(job, "local");
-      job.set("fs.default.name", "file:///");
-
-      String warehouseDir = getWarehouseDir();
-      Path warehousePath = new Path(warehouseDir);
-      Path inputPath = new Path(warehousePath, getTableName());
-      Path outputPath = new Path(warehousePath, getTableName() + "-out");
-
-      job.setMapperClass(ReparseMapper.class);
-      job.setNumReduceTasks(0);
-      FileInputFormat.addInputPath(job, inputPath);
-      FileOutputFormat.setOutputPath(job, outputPath);
-
-      job.setOutputKeyClass(Text.class);
-      job.setOutputValueClass(NullWritable.class);
-
-      JobClient.runJob(job);
-    } catch (InvalidOptionsException ioe) {
-      LOG.error(StringUtils.stringifyException(ioe));
-      fail(ioe.toString());
-    } catch (ParseException pe) {
-      LOG.error(StringUtils.stringifyException(pe));
-      fail(pe.toString());
-    } finally {
-      if (null != prevClassLoader) {
-        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
-      }
-    }
-  }
-
-  @Test
-  public void testDefaults() throws IOException {
-    String[] types = { "INTEGER", "VARCHAR(32)", "INTEGER" };
-    String[] vals = { "64", "'foo'", "128" };
-
-    createTableWithColTypes(types, vals);
-    runParseTest(",", "\\n", "\\\"", "\\", false);
-  }
-
-  @Test
-  public void testRequiredEnclose() throws IOException {
-    String[] types = { "INTEGER", "VARCHAR(32)", "INTEGER" };
-    String[] vals = { "64", "'foo'", "128" };
-
-    createTableWithColTypes(types, vals);
-    runParseTest(",", "\\n", "\\\"", "\\", true);
-  }
-
-  @Test
-  public void testStringEscapes() throws IOException {
-    String[] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)",
-        "VARCHAR(32)", "VARCHAR(32)", };
-    String[] vals = { "'foo'", "'foo,bar'", "'foo''bar'", "'foo\\bar'",
-        "'foo,bar''baz'", };
-
-    createTableWithColTypes(types, vals);
-    runParseTest(",", "\\n", "\\\'", "\\", false);
-  }
-
-  @Test
-  public void testNumericTypes() throws IOException {
-    String[] types = { "INTEGER", "REAL", "FLOAT", "DATE", "TIME", "BIT", };
-    String[] vals = { "42", "36.0", "127.1", "'2009-07-02'", "'11:24:00'",
-
-    "1", };
-
-    createTableWithColTypes(types, vals);
-    runParseTest(",", "\\n", "\\\'", "\\", false);
-  }
-
-  protected boolean useHsqldbTestServer() {
-    return false;
-  }
-
-  protected String getConnectString() {
-    return MSSQLTestUtils.getDBConnectString();
-  }
-
-  /**
-   * Drop a table if it already exists in the database.
-   *
-   * @param table
-   *            the name of the table to drop.
-   * @throws SQLException
-   *             if something goes wrong.
-   */
-  protected void dropTableIfExists(String table) throws SQLException {
-    Connection conn = getManager().getConnection();
-    String sqlStmt = "IF OBJECT_ID('" + table
-        + "') IS NOT NULL  DROP TABLE " + table;
-
-    PreparedStatement statement = conn.prepareStatement(sqlStmt,
-        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-      statement.executeUpdate();
-      conn.commit();
-    } finally {
-      statement.close();
-    }
-  }
-
-  protected SqoopOptions getSqoopOptions(Configuration conf) {
-
-    String username = MSSQLTestUtils.getDBUserName();
-    String password = MSSQLTestUtils.getDBPassWord();
-    SqoopOptions opts = new SqoopOptions(conf);
-    opts.setUsername(username);
-    opts.setPassword(password);
-    return opts;
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsTest.java
new file mode 100644
index 0000000..9547d80
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerParseMethodsTest.java
@@ -0,0 +1,287 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.util.StringUtils;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
+import com.cloudera.sqoop.config.ConfigurationHelper;
+import com.cloudera.sqoop.orm.CompilationManager;
+import com.cloudera.sqoop.testutil.CommonArgs;
+import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import com.cloudera.sqoop.testutil.ReparseMapper;
+import com.cloudera.sqoop.tool.ImportTool;
+import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.fail;
+
+/**
+ * Test that the parse() methods generated in user SqoopRecord implementations
+ * work in SQL Server.
+ *
+ * This uses JDBC to import data from an SQLServer database to HDFS.
+ *
+ * Since this requires an SQLServer installation,
+ * this class is named in such a way that Sqoop's default QA process does
+ * not run it. You need to run this manually with
+ * -Dtestcase=SQLServerParseMethodsTest or -Dthirdparty=true.
+ *
+ * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
+ * where Sqoop will be able to access it (since this library cannot be checked
+ * into Apache's tree for licensing reasons) and set it's path through -Dsqoop.thirdparty.lib.dir.
+ *
+ * To set up your test environment:
+ *   Install SQL Server Express 2012
+ *   Create a database SQOOPTEST
+ *   Create a login SQOOPUSER with password PASSWORD and grant all
+ *   access for SQOOPTEST to SQOOPUSER.
+ *   Set these through -Dsqoop.test.sqlserver.connectstring.host_url, -Dsqoop.test.sqlserver.database and
+ *   -Dms.sqlserver.password
+ */
+public class SQLServerParseMethodsTest extends ImportJobTestCase {
+
+  @Before
+  public void setUp() {
+    super.setUp();
+    Path p = new Path(getWarehouseDir());
+    try {
+      FileSystem fs = FileSystem.get(new Configuration());
+      fs.delete(p);
+    } catch (IOException e) {
+      LOG.error("Setup fail with IOException: " + StringUtils.stringifyException(e));
+      fail("Setup fail with IOException: " + StringUtils.stringifyException(e));
+    }
+  }
+
+  @After
+  public void tearDown() {
+    try {
+      dropTableIfExists(getTableName());
+    } catch (SQLException sqle) {
+      LOG.info("Table clean-up failed: " + sqle);
+    } finally {
+      super.tearDown();
+    }
+  }
+
+  /**
+   * Create the argv to pass to Sqoop.
+   *
+   * @return the argv as an array of strings.
+   */
+  private String[] getArgv(boolean includeHadoopFlags,
+      String fieldTerminator, String lineTerminator, String encloser,
+      String escape, boolean encloserRequired) {
+
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+
+    args.add("--table");
+    args.add(getTableName());
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--connect");
+    args.add(getConnectString());
+    args.add("--as-textfile");
+    args.add("--split-by");
+    args.add("DATA_COL0"); // always split by first column.
+    args.add("--fields-terminated-by");
+    args.add(fieldTerminator);
+    args.add("--lines-terminated-by");
+    args.add(lineTerminator);
+    args.add("--escaped-by");
+    args.add(escape);
+    if (encloserRequired) {
+      args.add("--enclosed-by");
+    } else {
+      args.add("--optionally-enclosed-by");
+    }
+    args.add(encloser);
+    args.add("--num-mappers");
+    args.add("1");
+
+    return args.toArray(new String[0]);
+  }
+
+  public void runParseTest(String fieldTerminator, String lineTerminator,
+      String encloser, String escape, boolean encloseRequired)
+      throws IOException {
+
+    ClassLoader prevClassLoader = null;
+
+    String[] argv = getArgv(true, fieldTerminator, lineTerminator,
+        encloser, escape, encloseRequired);
+    runImport(argv);
+    try {
+      String tableClassName = getTableName();
+
+      argv = getArgv(false, fieldTerminator, lineTerminator, encloser,
+          escape, encloseRequired);
+      SqoopOptions opts = new ImportTool().parseArguments(argv, null,
+          null, true);
+
+      CompilationManager compileMgr = new CompilationManager(opts);
+      String jarFileName = compileMgr.getJarFilename();
+
+      // Make sure the user's class is loaded into our address space.
+      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+          tableClassName);
+
+      JobConf job = new JobConf();
+      job.setJar(jarFileName);
+
+      // Tell the job what class we're testing.
+      job.set(ReparseMapper.USER_TYPE_NAME_KEY, tableClassName);
+
+      // use local mode in the same JVM.
+      ConfigurationHelper.setJobtrackerAddr(job, "local");
+      job.set("fs.default.name", "file:///");
+
+      String warehouseDir = getWarehouseDir();
+      Path warehousePath = new Path(warehouseDir);
+      Path inputPath = new Path(warehousePath, getTableName());
+      Path outputPath = new Path(warehousePath, getTableName() + "-out");
+
+      job.setMapperClass(ReparseMapper.class);
+      job.setNumReduceTasks(0);
+      FileInputFormat.addInputPath(job, inputPath);
+      FileOutputFormat.setOutputPath(job, outputPath);
+
+      job.setOutputKeyClass(Text.class);
+      job.setOutputValueClass(NullWritable.class);
+
+      JobClient.runJob(job);
+    } catch (InvalidOptionsException ioe) {
+      LOG.error(StringUtils.stringifyException(ioe));
+      fail(ioe.toString());
+    } catch (ParseException pe) {
+      LOG.error(StringUtils.stringifyException(pe));
+      fail(pe.toString());
+    } finally {
+      if (null != prevClassLoader) {
+        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
+      }
+    }
+  }
+
+  @Test
+  public void testDefaults() throws IOException {
+    String[] types = { "INTEGER", "VARCHAR(32)", "INTEGER" };
+    String[] vals = { "64", "'foo'", "128" };
+
+    createTableWithColTypes(types, vals);
+    runParseTest(",", "\\n", "\\\"", "\\", false);
+  }
+
+  @Test
+  public void testRequiredEnclose() throws IOException {
+    String[] types = { "INTEGER", "VARCHAR(32)", "INTEGER" };
+    String[] vals = { "64", "'foo'", "128" };
+
+    createTableWithColTypes(types, vals);
+    runParseTest(",", "\\n", "\\\"", "\\", true);
+  }
+
+  @Test
+  public void testStringEscapes() throws IOException {
+    String[] types = { "VARCHAR(32)", "VARCHAR(32)", "VARCHAR(32)",
+        "VARCHAR(32)", "VARCHAR(32)", };
+    String[] vals = { "'foo'", "'foo,bar'", "'foo''bar'", "'foo\\bar'",
+        "'foo,bar''baz'", };
+
+    createTableWithColTypes(types, vals);
+    runParseTest(",", "\\n", "\\\'", "\\", false);
+  }
+
+  @Test
+  public void testNumericTypes() throws IOException {
+    String[] types = { "INTEGER", "REAL", "FLOAT", "DATE", "TIME", "BIT", };
+    String[] vals = { "42", "36.0", "127.1", "'2009-07-02'", "'11:24:00'",
+
+    "1", };
+
+    createTableWithColTypes(types, vals);
+    runParseTest(",", "\\n", "\\\'", "\\", false);
+  }
+
+  protected boolean useHsqldbTestServer() {
+    return false;
+  }
+
+  protected String getConnectString() {
+    return MSSQLTestUtils.getDBConnectString();
+  }
+
+  /**
+   * Drop a table if it already exists in the database.
+   *
+   * @param table
+   *            the name of the table to drop.
+   * @throws SQLException
+   *             if something goes wrong.
+   */
+  protected void dropTableIfExists(String table) throws SQLException {
+    Connection conn = getManager().getConnection();
+    String sqlStmt = "IF OBJECT_ID('" + table
+        + "') IS NOT NULL  DROP TABLE " + table;
+
+    PreparedStatement statement = conn.prepareStatement(sqlStmt,
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+  }
+
+  protected SqoopOptions getSqoopOptions(Configuration conf) {
+
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+    SqoopOptions opts = new SqoopOptions(conf);
+    opts.setUsername(username);
+    opts.setPassword(password);
+    return opts;
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryManualTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryManualTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryManualTest.java
deleted file mode 100644
index d891c2b..0000000
--- a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryManualTest.java
+++ /dev/null
@@ -1,301 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sqoop.manager.sqlserver;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.util.StringUtils;
-
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
-import com.cloudera.sqoop.orm.CompilationManager;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.testutil.SeqFileReader;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.util.ClassLoaderStack;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-/**
- * Test that --query works in SQL Server.
- *
- * This uses JDBC to import data from an SQLServer database to HDFS.
- *
- * Since this requires an SQLServer installation,
- * this class is named in such a way that Sqoop's default QA process does
- * not run it. You need to run this manually with
- * -Dtestcase=SQLServerQueryManualTest.
- *
- * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
- * where Sqoop will be able to access it (since this library cannot be checked
- * into Apache's tree for licensing reasons).
- *
- * To set up your test environment:
- *   Install SQL Server Express 2012
- *   Create a database SQOOPTEST
- *   Create a login SQOOPUSER with password PASSWORD and grant all
- *   access for SQOOPTEST to SQOOPUSER.
- */
-public class SQLServerQueryManualTest extends ImportJobTestCase {
-
-  @Before
-  public void setUp() {
-    super.setUp();
-    MSSQLTestUtils utils = new MSSQLTestUtils();
-    try {
-      utils.createTableFromSQL(MSSQLTestUtils.CREATE_TALBE_LINEITEM);
-      utils.populateLineItem();
-    } catch (SQLException e) {
-      LOG.error("Setup fail with SQLException: " + StringUtils.stringifyException(e));
-      fail("Setup fail with SQLException: " + e.toString());
-    }
-
-  }
-
-  @After
-  public void tearDown() {
-    super.tearDown();
-    MSSQLTestUtils utils = new MSSQLTestUtils();
-    try {
-      utils.dropTableIfExists("TPCH1M_LINEITEM");
-    } catch (SQLException e) {
-      LOG.error("TearDown fail with SQLException: " + StringUtils.stringifyException(e));
-      fail("TearDown fail with SQLException: " + e.toString());
-    }
-  }
-
-  /**
-   * Create the argv to pass to Sqoop.
-   *
-   * @return the argv as an array of strings.
-   */
-  protected String[] getArgv(boolean includeHadoopFlags, String query,
-      String targetDir, boolean allowParallel) {
-
-    ArrayList<String> args = new ArrayList<String>();
-
-    if (includeHadoopFlags) {
-      CommonArgs.addHadoopFlags(args);
-    }
-    String username = MSSQLTestUtils.getDBUserName();
-    String password = MSSQLTestUtils.getDBPassWord();
-
-    args.add("--query");
-    args.add(query);
-    args.add("--split-by");
-    args.add("L_ORDERKEY");
-    args.add("--connect");
-    args.add(getConnectString());
-    args.add("--username");
-    args.add(username);
-    args.add("--password");
-    args.add(password);
-    args.add("--as-sequencefile");
-    args.add("--target-dir");
-    args.add(targetDir);
-    args.add("--class-name");
-    args.add(getTableName());
-    if (allowParallel) {
-      args.add("--num-mappers");
-      args.add("2");
-    } else {
-      args.add("--num-mappers");
-      args.add("1");
-    }
-
-    return args.toArray(new String[0]);
-  }
-
-  /**
-   * Given a comma-delimited list of integers, grab and parse the first int.
-   *
-   * @param str
-   *            a comma-delimited list of values, the first of which is an
-   *            int.
-   * @return the first field in the string, cast to int
-   */
-  private int getFirstInt(String str) {
-    String[] parts = str.split(",");
-    return Integer.parseInt(parts[0]);
-  }
-
-  public void runQueryTest(String query, String firstValStr,
-      int numExpectedResults, int expectedSum, String targetDir)
-      throws IOException {
-
-    ClassLoader prevClassLoader = null;
-    SequenceFile.Reader reader = null;
-
-    String[] argv = getArgv(true, query, targetDir, false);
-    runImport(argv);
-    try {
-      SqoopOptions opts = new ImportTool().parseArguments(getArgv(false,
-          query, targetDir, false), null, null, true);
-
-      CompilationManager compileMgr = new CompilationManager(opts);
-      String jarFileName = compileMgr.getJarFilename();
-
-      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
-          getTableName());
-
-      reader = SeqFileReader.getSeqFileReader(getDataFilePath()
-          .toString());
-
-      // here we can actually instantiate (k, v) pairs.
-      Configuration conf = new Configuration();
-      Object key = ReflectionUtils
-          .newInstance(reader.getKeyClass(), conf);
-      Object val = ReflectionUtils.newInstance(reader.getValueClass(),
-          conf);
-
-      if (reader.next(key) == null) {
-        fail("Empty SequenceFile during import");
-      }
-
-      // make sure that the value we think should be at the top, is.
-      reader.getCurrentValue(val);
-      assertEquals("Invalid ordering within sorted SeqFile", firstValStr,
-          val.toString());
-
-      // We know that these values are two ints separated by a ','
-      // character.
-      // Since this is all dynamic, though, we don't want to actually link
-      // against the class and use its methods. So we just parse this back
-      // into int fields manually. Sum them up and ensure that we get the
-      // expected total for the first column, to verify that we got all
-      // the
-      // results from the db into the file.
-      int curSum = getFirstInt(val.toString());
-      int totalResults = 1;
-
-      // now sum up everything else in the file.
-      while (reader.next(key) != null) {
-        reader.getCurrentValue(val);
-        curSum += getFirstInt(val.toString());
-        totalResults++;
-      }
-
-      assertEquals("Total sum of first db column mismatch", expectedSum,
-          curSum);
-      assertEquals("Incorrect number of results for query",
-          numExpectedResults, totalResults);
-    } catch (InvalidOptionsException ioe) {
-      LOG.error(StringUtils.stringifyException(ioe));
-      fail(ioe.toString());
-    } catch (ParseException pe) {
-      LOG.error(StringUtils.stringifyException(pe));
-      fail(pe.toString());
-    } finally {
-      IOUtils.closeStream(reader);
-
-      if (null != prevClassLoader) {
-        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
-      }
-    }
-  }
-
-  @Test
-  public void testSelectStar() throws IOException {
-    runQueryTest("SELECT * FROM " + getTableName()
-        + " WHERE L_ORDERKEY > 0 AND $CONDITIONS",
-        "1,2,3,4,5,6.00,7.00,8.00,AB,CD,abcd,efgh,hijk,dothis,likethis,"
-            + "nocomments\n", 4, 10, getTablePath().toString());
-  }
-
-  @Test
-  public void testCompoundWhere() throws IOException {
-    runQueryTest("SELECT * FROM " + getTableName()
-        + " WHERE L_ORDERKEY > 1 AND L_PARTKEY < 4 AND $CONDITIONS",
-        "2,3,4,5,6,7.00,8.00,9.00,AB,CD,abcd,efgh,hijk,dothis,likethis,"
-            + "nocomments\n", 1, 2, getTablePath().toString());
-  }
-
-  @Test
-  public void testFailNoConditions() throws IOException {
-    String[] argv = getArgv(true, "SELECT * FROM " + getTableName(),
-        getTablePath().toString() + "where $CONDITIONS", true);
-    try {
-      runImport(argv);
-      fail("Expected exception running import without $CONDITIONS");
-    } catch (Exception e) {
-      LOG.info("Got exception " + e + " running job (expected; ok)");
-    }
-  }
-
-  protected boolean useHsqldbTestServer() {
-
-    return false;
-  }
-
-  protected String getConnectString() {
-    return MSSQLTestUtils.getDBConnectString();
-  }
-
-  /**
-   * Drop a table if it already exists in the database.
-   *
-   * @param table
-   *            the name of the table to drop.
-   * @throws SQLException
-   *             if something goes wrong.
-   */
-  protected void dropTableIfExists(String table) throws SQLException {
-    Connection conn = getManager().getConnection();
-    String sqlStmt = "IF OBJECT_ID('" + table
-        + "') IS NOT NULL  DROP TABLE " + table;
-
-    PreparedStatement statement = conn.prepareStatement(sqlStmt,
-        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
-    try {
-      statement.executeUpdate();
-      conn.commit();
-    } finally {
-      statement.close();
-    }
-  }
-
-  protected SqoopOptions getSqoopOptions(Configuration conf) {
-    SqoopOptions opt = new SqoopOptions(conf);
-    String username = MSSQLTestUtils.getDBUserName();
-    String password = MSSQLTestUtils.getDBPassWord();
-    SqoopOptions opts = new SqoopOptions(conf);
-    opts.setUsername(username);
-    opts.setPassword(password);
-
-    return opt;
-  }
-
-  protected String getTableName() {
-    return "tpch1m_lineitem";
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/558bdaea/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryTest.java b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryTest.java
new file mode 100644
index 0000000..1d570fe
--- /dev/null
+++ b/src/test/org/apache/sqoop/manager/sqlserver/SQLServerQueryTest.java
@@ -0,0 +1,303 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.manager.sqlserver;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
+import com.cloudera.sqoop.orm.CompilationManager;
+import com.cloudera.sqoop.testutil.CommonArgs;
+import com.cloudera.sqoop.testutil.ImportJobTestCase;
+import com.cloudera.sqoop.testutil.SeqFileReader;
+import com.cloudera.sqoop.tool.ImportTool;
+import com.cloudera.sqoop.util.ClassLoaderStack;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+/**
+ * Test that --query works in SQL Server.
+ *
+ * This uses JDBC to import data from an SQLServer database to HDFS.
+ *
+ * Since this requires an SQLServer installation,
+ * this class is named in such a way that Sqoop's default QA process does
+ * not run it. You need to run this manually with
+ * -Dtestcase=SQLServerQueryTest or -Dthirdparty=true.
+ *
+ * You need to put SQL Server JDBC driver library (sqljdbc4.jar) in a location
+ * where Sqoop will be able to access it (since this library cannot be checked
+ * into Apache's tree for licensing reasons) and set it's path through -Dsqoop.thirdparty.lib.dir.
+ *
+ * To set up your test environment:
+ *   Install SQL Server Express 2012
+ *   Create a database SQOOPTEST
+ *   Create a login SQOOPUSER with password PASSWORD and grant all
+ *   access for SQOOPTEST to SQOOPUSER.
+ *   Set these through -Dsqoop.test.sqlserver.connectstring.host_url, -Dsqoop.test.sqlserver.database and
+ *   -Dms.sqlserver.password
+ */
+public class SQLServerQueryTest extends ImportJobTestCase {
+
+  @Before
+  public void setUp() {
+    super.setUp();
+    MSSQLTestUtils utils = new MSSQLTestUtils();
+    try {
+      utils.createTableFromSQL(MSSQLTestUtils.CREATE_TALBE_LINEITEM);
+      utils.populateLineItem();
+    } catch (SQLException e) {
+      LOG.error("Setup fail with SQLException: " + StringUtils.stringifyException(e));
+      fail("Setup fail with SQLException: " + e.toString());
+    }
+
+  }
+
+  @After
+  public void tearDown() {
+    super.tearDown();
+    MSSQLTestUtils utils = new MSSQLTestUtils();
+    try {
+      utils.dropTableIfExists("TPCH1M_LINEITEM");
+    } catch (SQLException e) {
+      LOG.error("TearDown fail with SQLException: " + StringUtils.stringifyException(e));
+      fail("TearDown fail with SQLException: " + e.toString());
+    }
+  }
+
+  /**
+   * Create the argv to pass to Sqoop.
+   *
+   * @return the argv as an array of strings.
+   */
+  protected String[] getArgv(boolean includeHadoopFlags, String query,
+      String targetDir, boolean allowParallel) {
+
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+
+    args.add("--query");
+    args.add(query);
+    args.add("--split-by");
+    args.add("L_ORDERKEY");
+    args.add("--connect");
+    args.add(getConnectString());
+    args.add("--username");
+    args.add(username);
+    args.add("--password");
+    args.add(password);
+    args.add("--as-sequencefile");
+    args.add("--target-dir");
+    args.add(targetDir);
+    args.add("--class-name");
+    args.add(getTableName());
+    if (allowParallel) {
+      args.add("--num-mappers");
+      args.add("2");
+    } else {
+      args.add("--num-mappers");
+      args.add("1");
+    }
+
+    return args.toArray(new String[0]);
+  }
+
+  /**
+   * Given a comma-delimited list of integers, grab and parse the first int.
+   *
+   * @param str
+   *            a comma-delimited list of values, the first of which is an
+   *            int.
+   * @return the first field in the string, cast to int
+   */
+  private int getFirstInt(String str) {
+    String[] parts = str.split(",");
+    return Integer.parseInt(parts[0]);
+  }
+
+  public void runQueryTest(String query, String firstValStr,
+      int numExpectedResults, int expectedSum, String targetDir)
+      throws IOException {
+
+    ClassLoader prevClassLoader = null;
+    SequenceFile.Reader reader = null;
+
+    String[] argv = getArgv(true, query, targetDir, false);
+    runImport(argv);
+    try {
+      SqoopOptions opts = new ImportTool().parseArguments(getArgv(false,
+          query, targetDir, false), null, null, true);
+
+      CompilationManager compileMgr = new CompilationManager(opts);
+      String jarFileName = compileMgr.getJarFilename();
+
+      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
+          getTableName());
+
+      reader = SeqFileReader.getSeqFileReader(getDataFilePath()
+          .toString());
+
+      // here we can actually instantiate (k, v) pairs.
+      Configuration conf = new Configuration();
+      Object key = ReflectionUtils
+          .newInstance(reader.getKeyClass(), conf);
+      Object val = ReflectionUtils.newInstance(reader.getValueClass(),
+          conf);
+
+      if (reader.next(key) == null) {
+        fail("Empty SequenceFile during import");
+      }
+
+      // make sure that the value we think should be at the top, is.
+      reader.getCurrentValue(val);
+      assertEquals("Invalid ordering within sorted SeqFile", firstValStr,
+          val.toString());
+
+      // We know that these values are two ints separated by a ','
+      // character.
+      // Since this is all dynamic, though, we don't want to actually link
+      // against the class and use its methods. So we just parse this back
+      // into int fields manually. Sum them up and ensure that we get the
+      // expected total for the first column, to verify that we got all
+      // the
+      // results from the db into the file.
+      int curSum = getFirstInt(val.toString());
+      int totalResults = 1;
+
+      // now sum up everything else in the file.
+      while (reader.next(key) != null) {
+        reader.getCurrentValue(val);
+        curSum += getFirstInt(val.toString());
+        totalResults++;
+      }
+
+      assertEquals("Total sum of first db column mismatch", expectedSum,
+          curSum);
+      assertEquals("Incorrect number of results for query",
+          numExpectedResults, totalResults);
+    } catch (InvalidOptionsException ioe) {
+      LOG.error(StringUtils.stringifyException(ioe));
+      fail(ioe.toString());
+    } catch (ParseException pe) {
+      LOG.error(StringUtils.stringifyException(pe));
+      fail(pe.toString());
+    } finally {
+      IOUtils.closeStream(reader);
+
+      if (null != prevClassLoader) {
+        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
+      }
+    }
+  }
+
+  @Test
+  public void testSelectStar() throws IOException {
+    runQueryTest("SELECT * FROM " + getTableName()
+        + " WHERE L_ORDERKEY > 0 AND $CONDITIONS",
+        "1,2,3,4,5,6.00,7.00,8.00,AB,CD,abcd,efgh,hijk,dothis,likethis,"
+            + "nocomments\n", 4, 10, getTablePath().toString());
+  }
+
+  @Test
+  public void testCompoundWhere() throws IOException {
+    runQueryTest("SELECT * FROM " + getTableName()
+        + " WHERE L_ORDERKEY > 1 AND L_PARTKEY < 4 AND $CONDITIONS",
+        "2,3,4,5,6,7.00,8.00,9.00,AB,CD,abcd,efgh,hijk,dothis,likethis,"
+            + "nocomments\n", 1, 2, getTablePath().toString());
+  }
+
+  @Test
+  public void testFailNoConditions() throws IOException {
+    String[] argv = getArgv(true, "SELECT * FROM " + getTableName(),
+        getTablePath().toString() + "where $CONDITIONS", true);
+    try {
+      runImport(argv);
+      fail("Expected exception running import without $CONDITIONS");
+    } catch (Exception e) {
+      LOG.info("Got exception " + e + " running job (expected; ok)");
+    }
+  }
+
+  protected boolean useHsqldbTestServer() {
+
+    return false;
+  }
+
+  protected String getConnectString() {
+    return MSSQLTestUtils.getDBConnectString();
+  }
+
+  /**
+   * Drop a table if it already exists in the database.
+   *
+   * @param table
+   *            the name of the table to drop.
+   * @throws SQLException
+   *             if something goes wrong.
+   */
+  protected void dropTableIfExists(String table) throws SQLException {
+    Connection conn = getManager().getConnection();
+    String sqlStmt = "IF OBJECT_ID('" + table
+        + "') IS NOT NULL  DROP TABLE " + table;
+
+    PreparedStatement statement = conn.prepareStatement(sqlStmt,
+        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    try {
+      statement.executeUpdate();
+      conn.commit();
+    } finally {
+      statement.close();
+    }
+  }
+
+  protected SqoopOptions getSqoopOptions(Configuration conf) {
+    SqoopOptions opt = new SqoopOptions(conf);
+    String username = MSSQLTestUtils.getDBUserName();
+    String password = MSSQLTestUtils.getDBPassWord();
+    SqoopOptions opts = new SqoopOptions(conf);
+    opts.setUsername(username);
+    opts.setPassword(password);
+
+    return opt;
+  }
+
+  protected String getTableName() {
+    return "tpch1m_lineitem";
+  }
+}