You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sqoop.apache.org by an...@apache.org on 2018/01/18 14:03:55 UTC

[09/32] sqoop git commit: SQOOP-3273: Removing com.cloudera.sqoop packages

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hbase/HBaseImportAddRowKeyTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hbase/HBaseImportAddRowKeyTest.java b/src/test/org/apache/sqoop/hbase/HBaseImportAddRowKeyTest.java
new file mode 100644
index 0000000..c4caafb
--- /dev/null
+++ b/src/test/org/apache/sqoop/hbase/HBaseImportAddRowKeyTest.java
@@ -0,0 +1,138 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hbase;
+
+import org.apache.commons.lang.StringUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import static java.util.Arrays.asList;
+import static org.apache.commons.lang.StringUtils.join;
+
+@RunWith(Parameterized.class)
+public class HBaseImportAddRowKeyTest extends HBaseTestCase {
+
+  @Parameterized.Parameters(name = "bulkLoad = {0}")
+  public static Iterable<? extends Object> bulkLoadParameters() {
+    return Arrays.asList(new Boolean[] { false } , new Boolean[] { true } );
+  }
+
+  private String[] columnTypes;
+
+  private String[] columnValues;
+
+  private String hbaseTableName;
+
+  private String hbaseColumnFamily;
+
+  private String hbaseTmpDir;
+
+  private String hbaseBulkLoadDir;
+
+  private boolean bulkLoad;
+
+  public HBaseImportAddRowKeyTest(boolean bulkLoad) {
+    this.bulkLoad = bulkLoad;
+  }
+
+  @Before
+  public void setUp() {
+    super.setUp();
+    columnTypes = new String[] { "INT", "INT" };
+    columnValues = new String[] { "0", "1" };
+    hbaseTableName = "addRowKeyTable";
+    hbaseColumnFamily = "addRowKeyFamily";
+    hbaseTmpDir = TEMP_BASE_DIR + "hbaseTmpDir";
+    hbaseBulkLoadDir = TEMP_BASE_DIR + "hbaseBulkLoadDir";
+    createTableWithColTypes(columnTypes, columnValues);
+  }
+
+  @Test
+  public void testAddRowKey() throws IOException {
+    String[] argv = getImportArguments(true, hbaseTableName, hbaseColumnFamily);
+
+    runImport(argv);
+
+    // Row key should have been added
+    verifyHBaseCell(hbaseTableName, columnValues[0], hbaseColumnFamily, getColName(0), columnValues[0]);
+    verifyHBaseCell(hbaseTableName, columnValues[0], hbaseColumnFamily, getColName(1), columnValues[1]);
+  }
+
+  @Test
+  public void testAddRowKeyDefault() throws IOException {
+    String[] argv = getImportArguments(false, hbaseTableName, hbaseColumnFamily);
+
+    runImport(argv);
+
+    // Row key should not be added by default
+    verifyHBaseCell(hbaseTableName, columnValues[0], hbaseColumnFamily, getColName(0), null);
+    verifyHBaseCell(hbaseTableName, columnValues[0], hbaseColumnFamily, getColName(1), columnValues[1]);
+  }
+
+  @Test
+  public void testAddCompositeKey() throws IOException {
+    String rowKey = getColName(0)+","+getColName(1);
+
+    String[] argv = getImportArguments(true, hbaseTableName, hbaseColumnFamily, rowKey);
+
+    runImport(argv);
+
+    // Row key should have been added
+    verifyHBaseCell(hbaseTableName, join(columnValues, '_'), hbaseColumnFamily, getColName(0), columnValues[0]);
+    verifyHBaseCell(hbaseTableName, join(columnValues, '_'), hbaseColumnFamily, getColName(1), columnValues[1]);
+  }
+
+  private String[] getImportArguments(boolean addRowKey, String hbaseTableName, String hbaseColumnFamily) {
+    return getImportArguments(addRowKey, hbaseTableName, hbaseColumnFamily, null);
+  }
+
+  private String[] getImportArguments(boolean addRowKey, String hbaseTableName, String hbaseColumnFamily, String rowKey) {
+    List<String> result = new ArrayList<>();
+
+    if (addRowKey) {
+      result.add("-D");
+      result.add("sqoop.hbase.add.row.key=true");
+    }
+    result.add("-D");
+    result.add("hbase.fs.tmp.dir=" + hbaseTmpDir);
+
+    result.addAll(asList(getArgv(true, hbaseTableName, hbaseColumnFamily, true, null)));
+
+    if(bulkLoad) {
+      result.add("--target-dir");
+      result.add(hbaseBulkLoadDir);
+      result.add("--hbase-bulkload");
+    }
+
+    if (!StringUtils.isBlank(rowKey)) {
+      result.add("--hbase-row-key");
+      result.add(rowKey);
+    }
+
+    return result.toArray(new String[result.size()]);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hbase/HBaseImportNullTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hbase/HBaseImportNullTest.java b/src/test/org/apache/sqoop/hbase/HBaseImportNullTest.java
new file mode 100644
index 0000000..114cc6a
--- /dev/null
+++ b/src/test/org/apache/sqoop/hbase/HBaseImportNullTest.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hbase;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ *
+ */
+public class HBaseImportNullTest extends HBaseTestCase {
+
+  @Test
+  public void testNullRow() throws IOException {
+    String [] argv = getArgv(true, "nullRowT", "nullRowF", true, null);
+    String [] types = { "INT", "INT" };
+    String [] vals = { "0", "null" };
+    createTableWithColTypes(types, vals);
+    runImport(argv);
+
+    // This cell should not be placed in the results..
+    verifyHBaseCell("nullRowT", "0", "nullRowF", getColName(1), null);
+
+    int rowCount = countHBaseTable("nullRowT", "nullRowF");
+    assertEquals(0, rowCount);
+  }
+
+  @Test
+  public void testNulls() throws IOException {
+    String [] argv = getArgv(true, "nullT", "nullF", true, null);
+    String [] types = { "INT", "INT", "INT" };
+    String [] vals = { "0", "42", "null" };
+    createTableWithColTypes(types, vals);
+    runImport(argv);
+
+    // This cell should import correctly.
+    verifyHBaseCell("nullT", "0", "nullF", getColName(1), "42");
+
+    // This cell should not be placed in the results..
+    verifyHBaseCell("nullT", "0", "nullF", getColName(2), null);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hbase/HBaseImportTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hbase/HBaseImportTest.java b/src/test/org/apache/sqoop/hbase/HBaseImportTest.java
new file mode 100644
index 0000000..2e73cf3
--- /dev/null
+++ b/src/test/org/apache/sqoop/hbase/HBaseImportTest.java
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hbase;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+import static org.junit.Assert.fail;
+
+/**
+ * Test imports of tables into HBase.
+ */
+public class HBaseImportTest extends HBaseTestCase {
+
+  @Test
+  public void testBasicUsage() throws IOException {
+    // Create the HBase table in Sqoop as we run the job.
+    String [] argv = getArgv(true, "BasicUsage", "BasicColFam", true, null);
+    String [] types = { "INT", "INT" };
+    String [] vals = { "0", "1" };
+    createTableWithColTypes(types, vals);
+    runImport(argv);
+    verifyHBaseCell("BasicUsage", "0", "BasicColFam", getColName(1), "1");
+  }
+
+  @Test
+  public void testMissingTableFails() throws IOException {
+    // Test that if the table doesn't exist, we fail unless we
+    // explicitly create the table.
+    String [] argv = getArgv(true, "MissingTable", "MissingFam", false, null);
+    String [] types = { "INT", "INT" };
+    String [] vals = { "0", "1" };
+    createTableWithColTypes(types, vals);
+    try {
+      runImport(argv);
+      fail("Expected IOException");
+    } catch (IOException ioe) {
+      LOG.info("Got exception -- ok; we expected that job to fail.");
+    }
+  }
+
+  @Test
+  public void testOverwriteSucceeds() throws IOException {
+    // Test that we can create a table and then import immediately
+    // back on top of it without problem.
+    String [] argv = getArgv(true, "OverwriteT", "OverwriteF", true, null);
+    String [] types = { "INT", "INT" };
+    String [] vals = { "0", "1" };
+    createTableWithColTypes(types, vals);
+    runImport(argv);
+    verifyHBaseCell("OverwriteT", "0", "OverwriteF", getColName(1), "1");
+    // Run a second time.
+    runImport(argv);
+    verifyHBaseCell("OverwriteT", "0", "OverwriteF", getColName(1), "1");
+  }
+
+  @Test
+  public void testOverwriteNullColumnsSucceeds() throws IOException {
+    // Test that we can create a table and then import immediately
+    // back on top of it without problem and then update with null to validate
+    String [] argv = getArgv(true, "OverwriteTable", "OverwriteColumnFamily", true, null);
+    String [] types = { "INT", "INT", "INT", "DATETIME" };
+    String [] vals = { "0", "1", "1", "'2017-03-20'" };
+    createTableWithColTypes(types, vals);
+    runImport(argv);
+    verifyHBaseCell("OverwriteTable", "0", "OverwriteColumnFamily", getColName(2), "1");
+    // Run a second time.
+    argv = getIncrementalArgv(true, "OverwriteTable", "OverwriteColumnFamily", true, null, false, false, "DATA_COL3", "2017-03-24 01:01:01.0", null);
+    vals = new String[] { "0", "1", null, "'2017-03-25'" };
+    updateTable(types, vals);
+    runImport(argv);
+    verifyHBaseCell("OverwriteTable", "0", "OverwriteColumnFamily", getColName(2), null);
+  }
+
+  @Test
+  public void testAppendWithTimestampSucceeds() throws IOException {
+    // Test that we can create a table and then import multiple rows
+    // validate for append scenario with time stamp
+    String [] argv = getArgv(true, "AppendTable", "AppendColumnFamily", true, null);
+    String [] types = { "INT", "INT", "INT", "DATETIME" };
+    String [] vals = { "0", "1", "1", "'2017-03-20'" };
+    createTableWithColTypes(types, vals);
+    runImport(argv);
+    verifyHBaseCell("AppendTable", "0", "AppendColumnFamily", getColName(2), "1");
+    // Run a second time.
+    argv = getIncrementalArgv(true, "AppendTable", "AppendColumnFamily", true, null, true, false, "DATA_COL1", "2017-03-24 01:01:01.0", null);
+    vals = new String[] { "1", "2", "3", "'2017-06-15'" };
+    insertIntoTable(types, vals);
+    runImport(argv);
+    verifyHBaseCell("AppendTable", "1", "AppendColumnFamily", getColName(2), "3");
+  }
+
+  @Test
+  public void testAppendSucceeds() throws IOException {
+	// Test that we can create a table and then import multiple rows
+	// validate for append scenario with ID column(DATA_COL3)
+    String [] argv = getArgv(true, "AppendTable", "AppendColumnFamily", true, null);
+    String [] types = { "INT", "INT", "INT", "DATETIME" };
+    String [] vals = { "0", "1", "1", "'2017-03-20'" };
+    createTableWithColTypes(types, vals);
+    runImport(argv);
+    verifyHBaseCell("AppendTable", "0", "AppendColumnFamily", getColName(2), "1");
+    // Run a second time.
+    argv = getIncrementalArgv(true, "AppendTable", "AppendColumnFamily", true, null, true, true, "DATA_COL1", null, "DATA_COL3");
+    vals = new String[] { "1", "2", "3", "'2017-06-15'" };
+    insertIntoTable(types, vals);
+    runImport(argv);
+    verifyHBaseCell("AppendTable", "1", "AppendColumnFamily", getColName(2), "3");
+  }
+
+  @Test
+  public void testExitFailure() throws IOException {
+    String [] types = { "INT", "INT", "INT" };
+    String [] vals = { "0", "42", "43" };
+    createTableWithColTypes(types, vals);
+
+    String [] argv = getArgv(true, "NoHBaseT", "NoHBaseF", true, null);
+    try {
+      HBaseUtil.setAlwaysNoHBaseJarMode(true);
+      runImport(argv);
+    } catch (IOException e)  {
+      return;
+    } finally {
+      HBaseUtil.setAlwaysNoHBaseJarMode(false);
+    }
+
+    fail("should have gotten exception");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hbase/HBaseImportTypesTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hbase/HBaseImportTypesTest.java b/src/test/org/apache/sqoop/hbase/HBaseImportTypesTest.java
new file mode 100644
index 0000000..f3616c3
--- /dev/null
+++ b/src/test/org/apache/sqoop/hbase/HBaseImportTypesTest.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hbase;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+/**
+ *
+ */
+public class HBaseImportTypesTest extends HBaseTestCase {
+
+  @Test
+  public void testStrings() throws IOException {
+    String [] argv = getArgv(true, "stringT", "stringF", true, null);
+    String [] types = { "INT", "VARCHAR(32)" };
+    String [] vals = { "0", "'abc'" };
+    createTableWithColTypes(types, vals);
+    runImport(argv);
+    verifyHBaseCell("stringT", "0", "stringF", getColName(1), "abc");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hbase/HBaseKerberizedConnectivityTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hbase/HBaseKerberizedConnectivityTest.java b/src/test/org/apache/sqoop/hbase/HBaseKerberizedConnectivityTest.java
new file mode 100644
index 0000000..73a2247
--- /dev/null
+++ b/src/test/org/apache/sqoop/hbase/HBaseKerberizedConnectivityTest.java
@@ -0,0 +1,33 @@
+package org.apache.sqoop.hbase;
+
+import org.apache.sqoop.infrastructure.kerberos.MiniKdcInfrastructureRule;
+import org.junit.ClassRule;
+import org.junit.Test;
+
+import java.io.IOException;
+
+public class HBaseKerberizedConnectivityTest extends HBaseTestCase {
+
+  private static final String HBASE_TABLE_NAME = "KerberosTest";
+  private static final String HBASE_COLUMN_FAMILY = "TestColumnFamily";
+  private static final String TEST_ROW_KEY = "0";
+  private static final String TEST_ROW_VALUE = "1";
+  private static final String[] COLUMN_TYPES = { "INT", "INT" };
+
+  @ClassRule
+  public static MiniKdcInfrastructureRule miniKdcInfrastructure = new MiniKdcInfrastructureRule();
+
+  public HBaseKerberizedConnectivityTest() {
+    super(miniKdcInfrastructure);
+  }
+
+  @Test
+  public void testSqoopImportWithKerberizedHBaseConnectivitySucceeds() throws IOException {
+    String[] argv = getArgv(true, HBASE_TABLE_NAME, HBASE_COLUMN_FAMILY, true, null);
+    createTableWithColTypes(COLUMN_TYPES, new String[] { TEST_ROW_KEY, TEST_ROW_VALUE });
+
+    runImport(argv);
+
+    verifyHBaseCell(HBASE_TABLE_NAME, TEST_ROW_KEY, HBASE_COLUMN_FAMILY, getColName(1), TEST_ROW_VALUE);
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hbase/HBaseQueryImportTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hbase/HBaseQueryImportTest.java b/src/test/org/apache/sqoop/hbase/HBaseQueryImportTest.java
new file mode 100644
index 0000000..b73afcd
--- /dev/null
+++ b/src/test/org/apache/sqoop/hbase/HBaseQueryImportTest.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hbase;
+
+import java.io.IOException;
+
+import org.junit.Test;
+
+import static org.junit.Assert.fail;
+
+/**
+ * Test import of free-form query into HBase.
+ */
+public class HBaseQueryImportTest extends HBaseTestCase {
+
+  @Test
+  public void testImportFromQuery() throws IOException {
+    String [] types = { "INT", "INT", "INT" };
+    String [] vals = { "0", "42", "43" };
+    createTableWithColTypes(types, vals);
+
+    String [] argv = getArgv(true, "queryT", "queryF", true,
+        "SELECT " + getColName(0) + ", " + getColName(1) + " FROM "
+        + getTableName() + " WHERE $CONDITIONS");
+    runImport(argv);
+
+    // This cell should import correctly.
+    verifyHBaseCell("queryT", "0", "queryF", getColName(1), "42");
+
+    // This cell should not be placed in the results..
+    verifyHBaseCell("queryT", "0", "queryF", getColName(2), null);
+  }
+
+  @Test
+  public void testExitFailure() throws IOException {
+    String [] types = { "INT", "INT", "INT" };
+    String [] vals = { "0", "42", "43" };
+    createTableWithColTypes(types, vals);
+
+    String [] argv = getArgv(true, "queryT", "queryF", true,
+        "SELECT " + getColName(0) + ", " + getColName(1) + " FROM "
+        + getTableName() + " WHERE $CONDITIONS");
+    try {
+      HBaseUtil.setAlwaysNoHBaseJarMode(true);
+      runImport(argv);
+    } catch (Exception e)  {
+      return;
+    } finally {
+      HBaseUtil.setAlwaysNoHBaseJarMode(false);
+    }
+    fail("should have gotten exception");
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hbase/HBaseTestCase.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hbase/HBaseTestCase.java b/src/test/org/apache/sqoop/hbase/HBaseTestCase.java
new file mode 100644
index 0000000..98f8698
--- /dev/null
+++ b/src/test/org/apache/sqoop/hbase/HBaseTestCase.java
@@ -0,0 +1,304 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hbase;
+
+import static org.apache.hadoop.hbase.HConstants.MASTER_INFO_PORT;
+import static org.apache.hadoop.hbase.HConstants.ZOOKEEPER_CLIENT_PORT;
+import static org.apache.hadoop.hbase.coprocessor.CoprocessorHost.REGION_COPROCESSOR_CONF_KEY;
+import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.KRB_PRINCIPAL;
+import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.MASTER_KRB_PRINCIPAL;
+import static org.apache.hadoop.hbase.security.User.HBASE_SECURITY_CONF_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HTTP_POLICY_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY;
+import static org.apache.hadoop.yarn.conf.YarnConfiguration.RM_PRINCIPAL;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
+import org.apache.hadoop.hbase.security.token.TokenProvider;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.sqoop.infrastructure.kerberos.KerberosConfigurationProvider;
+import org.junit.After;
+import org.junit.Before;
+
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+
+/**
+ * Utility methods that facilitate HBase import tests.
+ */
+public abstract class HBaseTestCase extends ImportJobTestCase {
+
+  public static final Log LOG = LogFactory.getLog(
+      HBaseTestCase.class.getName());
+  private static final String MASTER_INFO_PORT_DISABLE_WEB_UI = "-1";
+  private static final String DEFAULT_DFS_HTTPS_ADDRESS = "localhost:0";
+
+  private final KerberosConfigurationProvider kerberosConfigurationProvider;
+  private HBaseTestingUtility hbaseTestUtil;
+
+  public HBaseTestCase() {
+    this(null);
+  }
+
+  public HBaseTestCase(KerberosConfigurationProvider kerberosConfigurationProvider) {
+    this.kerberosConfigurationProvider = kerberosConfigurationProvider;
+  }
+
+  /**
+   * Create the argv to pass to Sqoop.
+   * @return the argv as an array of strings.
+   */
+  protected String [] getArgv(boolean includeHadoopFlags,
+      String hbaseTable, String hbaseColFam, boolean hbaseCreate,
+      String queryStr) {
+
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+      String zookeeperPort = hbaseTestUtil.getConfiguration().get(ZOOKEEPER_CLIENT_PORT);
+      args.add("-D");
+      args.add("hbase.zookeeper.property.clientPort=" + zookeeperPort);
+      args.addAll(getKerberosFlags());
+    }
+
+    if (null != queryStr) {
+      args.add("--query");
+      args.add(queryStr);
+    } else {
+      args.add("--table");
+      args.add(getTableName());
+    }
+    args.add("--split-by");
+    args.add(getColName(0));
+    args.add("--connect");
+    args.add(HsqldbTestServer.getUrl());
+    args.add("--num-mappers");
+    args.add("1");
+    args.add("--column-family");
+    args.add(hbaseColFam);
+    args.add("--hbase-table");
+    args.add(hbaseTable);
+    if (hbaseCreate) {
+      args.add("--hbase-create-table");
+    }
+    return args.toArray(new String[0]);
+  }
+
+  /**
+   * Create the argv to pass to Sqoop as incremental options.
+   * @return the argv as an array of strings.
+   */
+  protected String [] getIncrementalArgv(boolean includeHadoopFlags,
+      String hbaseTable, String hbaseColFam, boolean hbaseCreate,
+      String queryStr, boolean isAppend, boolean appendTimestamp, String checkColumn, String checkValue, String lastModifiedColumn) {
+
+    String[] argsStrArray = getArgv(includeHadoopFlags, hbaseTable, hbaseColFam, hbaseCreate, queryStr);
+    List<String> args = new ArrayList<String>(Arrays.asList(argsStrArray));
+
+    if (isAppend) {
+      args.add("--incremental");
+      args.add("append");
+      if (!appendTimestamp) {
+        args.add("--check-column");
+        args.add(checkColumn);//"ID");
+      } else {
+        args.add("--check-column");
+        args.add(lastModifiedColumn);//LAST_MODIFIED");
+      }
+    } else {
+      args.add("--incremental");
+      args.add("lastmodified");
+      args.add("--check-column");
+      args.add(checkColumn);
+      args.add("--last-value");
+      args.add(checkValue);
+    }
+    return args.toArray(new String[0]);
+  }
+
+  @Override
+  @Before
+  public void setUp() {
+    try {
+      hbaseTestUtil = new HBaseTestingUtility();
+      // We set the port for the hbase master web UI to -1 because we do not want the info server to run.
+      hbaseTestUtil.getConfiguration().set(MASTER_INFO_PORT, MASTER_INFO_PORT_DISABLE_WEB_UI);
+      setupKerberos();
+
+      hbaseTestUtil.startMiniCluster();
+      super.setUp();
+    } catch (Throwable e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  private void setupKerberos() {
+    if (!isKerberized()){
+      return;
+    }
+
+    String servicePrincipal = kerberosConfigurationProvider.getTestPrincipal() + "@" + kerberosConfigurationProvider.getRealm();
+    HBaseKerberosUtils.setPrincipalForTesting(servicePrincipal);
+    HBaseKerberosUtils.setKeytabFileForTesting(kerberosConfigurationProvider.getKeytabFilePath());
+
+    Configuration configuration = hbaseTestUtil.getConfiguration();
+    HBaseKerberosUtils.setSecuredConfiguration(configuration);
+    UserGroupInformation.setConfiguration(configuration);
+    configuration.setStrings(REGION_COPROCESSOR_CONF_KEY, TokenProvider.class.getName());
+
+    setupKerberosForHdfs(servicePrincipal, configuration);
+  }
+
+  private void setupKerberosForHdfs(String servicePrincipal, Configuration configuration) {
+    configuration.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, servicePrincipal);
+    configuration.set(DFS_NAMENODE_KEYTAB_FILE_KEY, kerberosConfigurationProvider.getKeytabFilePath());
+    configuration.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, servicePrincipal);
+    configuration.set(DFS_DATANODE_KEYTAB_FILE_KEY, kerberosConfigurationProvider.getKeytabFilePath());
+    configuration.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
+    configuration.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, servicePrincipal);
+    configuration.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_ONLY.name());
+    configuration.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, DEFAULT_DFS_HTTPS_ADDRESS);
+    configuration.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, DEFAULT_DFS_HTTPS_ADDRESS);
+    configuration.setBoolean(IGNORE_SECURE_PORTS_FOR_TESTING_KEY, true);
+  }
+
+  public void shutdown() throws Exception {
+    LOG.info("In shutdown() method");
+    LOG.info("Shutting down HBase cluster");
+    hbaseTestUtil.shutdownMiniCluster();
+    hbaseTestUtil = null;
+    LOG.info("shutdown() method returning.");
+  }
+
+  @Override
+  @After
+  public void tearDown() {
+    try {
+      shutdown();
+    } catch (Exception e) {
+      LOG.warn("Error shutting down HBase minicluster: "
+              + StringUtils.stringifyException(e));
+    }
+    super.tearDown();
+  }
+
+  protected void verifyHBaseCell(String tableName, String rowKey,
+      String colFamily, String colName, String val) throws IOException {
+    Get get = new Get(Bytes.toBytes(rowKey));
+    get.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(colName));
+    try (
+        Connection hbaseConnection = createHBaseConnection();
+        Table table = getHBaseTable(hbaseConnection, tableName)
+    ) {
+      Result r = table.get(get);
+      byte [] actualVal = r.getValue(Bytes.toBytes(colFamily),
+          Bytes.toBytes(colName));
+      if (null == val) {
+        assertNull("Got a result when expected null", actualVal);
+      } else {
+        assertNotNull("No result, but we expected one", actualVal);
+        assertEquals(val, Bytes.toString(actualVal));
+      }
+    }
+  }
+
+  protected int countHBaseTable(String tableName, String colFamily)
+      throws IOException {
+    int count = 0;
+    try (
+        Connection hbaseConnection = createHBaseConnection();
+        Table table = getHBaseTable(hbaseConnection, tableName)
+    ) {
+      ResultScanner scanner = table.getScanner(Bytes.toBytes(colFamily));
+      for(Result result = scanner.next();
+          result != null;
+          result = scanner.next()) {
+        count++;
+      }
+    }
+    return count;
+  }
+
+  private Connection createHBaseConnection() throws IOException {
+    return ConnectionFactory.createConnection(new Configuration(hbaseTestUtil.getConfiguration()));
+  }
+
+  private Table getHBaseTable(Connection connection, String tableName) throws IOException {
+    return connection.getTable(TableName.valueOf(tableName));
+  }
+
+  protected boolean isKerberized() {
+    return kerberosConfigurationProvider != null;
+  }
+
+  private String createFlagWithValue(String flag, String value) {
+    return String.format("%s=%s", flag, value);
+  }
+
+  private List<String> getKerberosFlags() {
+    if (!isKerberized()) {
+      return Collections.emptyList();
+    }
+    List<String> result = new ArrayList<>();
+
+    String principalForTesting = HBaseKerberosUtils.getPrincipalForTesting();
+    result.add("-D");
+    result.add(createFlagWithValue(HBASE_SECURITY_CONF_KEY, "kerberos"));
+    result.add("-D");
+    result.add(createFlagWithValue(MASTER_KRB_PRINCIPAL, principalForTesting));
+    result.add("-D");
+    result.add(createFlagWithValue(KRB_PRINCIPAL, principalForTesting));
+    result.add("-D");
+    result.add(createFlagWithValue(RM_PRINCIPAL, principalForTesting));
+
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hbase/HBaseUtilTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hbase/HBaseUtilTest.java b/src/test/org/apache/sqoop/hbase/HBaseUtilTest.java
new file mode 100644
index 0000000..c6a808c
--- /dev/null
+++ b/src/test/org/apache/sqoop/hbase/HBaseUtilTest.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hbase;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+
+/**
+ * This tests to verify that HBase is present (default when running test cases)
+ * and that when in fake not present mode, the method return false.
+ */
+public class HBaseUtilTest {
+
+  @Test
+  public void testHBasePresent() {
+    assertTrue(HBaseUtil.isHBaseJarPresent());
+  }
+
+  @Test
+  public void testHBaseNotPresent() {
+    HBaseUtil.setAlwaysNoHBaseJarMode(true);
+    boolean present = HBaseUtil.isHBaseJarPresent();
+    HBaseUtil.setAlwaysNoHBaseJarMode(false);
+    assertFalse(present);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hbase/TestHBasePutProcessor.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hbase/TestHBasePutProcessor.java b/src/test/org/apache/sqoop/hbase/TestHBasePutProcessor.java
index 73b3177..e78a535 100644
--- a/src/test/org/apache/sqoop/hbase/TestHBasePutProcessor.java
+++ b/src/test/org/apache/sqoop/hbase/TestHBasePutProcessor.java
@@ -17,7 +17,7 @@
  */
 package org.apache.sqoop.hbase;
 
-import com.cloudera.sqoop.lib.FieldMappable;
+import org.apache.sqoop.lib.FieldMappable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.client.BufferedMutator;
 import org.apache.hadoop.hbase.client.Connection;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java b/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
index 7ff046e..a124dd0 100644
--- a/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
+++ b/src/test/org/apache/sqoop/hcat/HCatalogExportTest.java
@@ -32,7 +32,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.TimeZone;
 
-import com.cloudera.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -47,7 +47,7 @@ import org.apache.sqoop.hcat.HCatalogTestUtils.KeyType;
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 import org.junit.Before;
 
-import com.cloudera.sqoop.testutil.ExportJobTestCase;
+import org.apache.sqoop.testutil.ExportJobTestCase;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
index b6741f4..c7e1ea6 100644
--- a/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
+++ b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
@@ -42,7 +42,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
@@ -53,12 +52,12 @@ import org.apache.sqoop.hcat.HCatalogTestUtils.KeyType;
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 import org.junit.Before;
 
-import com.cloudera.sqoop.Sqoop;
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.CommonArgs;
-import com.cloudera.sqoop.testutil.ImportJobTestCase;
-import com.cloudera.sqoop.tool.ImportTool;
-import com.cloudera.sqoop.tool.SqoopTool;
+import org.apache.sqoop.Sqoop;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.tool.SqoopTool;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java b/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java
index 6fb6486..28b42dc 100644
--- a/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java
+++ b/src/test/org/apache/sqoop/hcat/HCatalogTestUtils.java
@@ -63,9 +63,9 @@ import org.apache.sqoop.config.ConfigurationConstants;
 import org.apache.sqoop.mapreduce.hcat.SqoopHCatUtilities;
 import org.junit.Assert;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
-import com.cloudera.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.testutil.BaseSqoopTestCase;
+import org.apache.sqoop.testutil.CommonArgs;
 
 /**
  * HCatalog common test utilities.

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
index 104effb..ba05cab 100644
--- a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
+++ b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
@@ -20,9 +20,9 @@ package org.apache.sqoop.hcat;
 
 import org.junit.Before;
 
-import com.cloudera.sqoop.SqoopOptions;
-import com.cloudera.sqoop.tool.ExportTool;
-import com.cloudera.sqoop.tool.ImportTool;
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.tool.ExportTool;
+import org.apache.sqoop.tool.ImportTool;
 
 import org.junit.Test;
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hive/TestHiveImport.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hive/TestHiveImport.java b/src/test/org/apache/sqoop/hive/TestHiveImport.java
new file mode 100644
index 0000000..4e1f249
--- /dev/null
+++ b/src/test/org/apache/sqoop/hive/TestHiveImport.java
@@ -0,0 +1,743 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.hive;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.sqoop.Sqoop;
+
+import org.apache.avro.Schema;
+import org.apache.avro.SchemaBuilder;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.sqoop.avro.AvroSchemaMismatchException;
+import org.apache.sqoop.mapreduce.ParquetJob;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
+import org.apache.sqoop.testutil.CommonArgs;
+import org.apache.sqoop.testutil.ImportJobTestCase;
+import org.apache.sqoop.tool.BaseSqoopTool;
+import org.apache.sqoop.tool.CodeGenTool;
+import org.apache.sqoop.tool.CreateHiveTableTool;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.tool.SqoopTool;
+import org.apache.commons.cli.ParseException;
+import org.junit.rules.ExpectedException;
+import org.kitesdk.data.Dataset;
+import org.kitesdk.data.DatasetReader;
+import org.kitesdk.data.Datasets;
+import org.kitesdk.data.Formats;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Test HiveImport capability after an import to HDFS.
+ */
+
+public class TestHiveImport extends ImportJobTestCase {
+
+  public static final Log LOG = LogFactory.getLog(
+      TestHiveImport.class.getName());
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  @Before
+  public void setUp() {
+    super.setUp();
+    HiveImport.setTestMode(true);
+  }
+
+  @After
+  public void tearDown() {
+    super.tearDown();
+    HiveImport.setTestMode(false);
+  }
+
+  /**
+   * Sets the expected number of columns in the table being manipulated
+   * by the test. Under the hood, this sets the expected column names
+   * to DATA_COLi for 0 &lt;= i &lt; numCols.
+   * @param numCols the number of columns to be created.
+   */
+  protected void setNumCols(int numCols) {
+    String [] cols = new String[numCols];
+    for (int i = 0; i < numCols; i++) {
+      cols[i] = "DATA_COL" + i;
+    }
+
+    setColNames(cols);
+  }
+
+  protected String[] getTypes() {
+    String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
+    return types;
+  }
+
+  /**
+   * Create the argv to pass to Sqoop.
+   * @return the argv as an array of strings.
+   */
+  protected String [] getArgv(boolean includeHadoopFlags, String [] moreArgs) {
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      CommonArgs.addHadoopFlags(args);
+    }
+
+    if (null != moreArgs) {
+      for (String arg: moreArgs) {
+        args.add(arg);
+      }
+    }
+
+    args.add("--table");
+    args.add(getTableName());
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--connect");
+    args.add(getConnectString());
+    args.add("--hive-import");
+    String [] colNames = getColNames();
+    if (null != colNames) {
+      args.add("--split-by");
+      args.add(colNames[0]);
+    } else {
+      fail("Could not determine column names.");
+    }
+
+    args.add("--num-mappers");
+    args.add("1");
+
+    for (String a : args) {
+      LOG.debug("ARG : "+ a);
+    }
+
+    return args.toArray(new String[0]);
+  }
+
+  /**
+   * @return the argv to supply to a create-table only job for Hive imports.
+   */
+  protected String [] getCreateTableArgv(boolean includeHadoopFlags,
+      String [] moreArgs) {
+
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (null != moreArgs) {
+      for (String arg: moreArgs) {
+        args.add(arg);
+      }
+    }
+
+    args.add("--table");
+    args.add(getTableName());
+    args.add("--connect");
+    args.add(getConnectString());
+
+    return args.toArray(new String[0]);
+  }
+
+  /**
+   * @return the argv to supply to a code-gen only job for Hive imports.
+   */
+  protected String [] getCodeGenArgs() {
+    ArrayList<String> args = new ArrayList<String>();
+
+    args.add("--table");
+    args.add(getTableName());
+    args.add("--connect");
+    args.add(getConnectString());
+    args.add("--hive-import");
+
+    return args.toArray(new String[0]);
+  }
+
+  /**
+   * @return the argv to supply to a ddl-executing-only job for Hive imports.
+   */
+  protected String [] getCreateHiveTableArgs(String [] extraArgs) {
+    ArrayList<String> args = new ArrayList<String>();
+
+    args.add("--table");
+    args.add(getTableName());
+    args.add("--connect");
+    args.add(getConnectString());
+
+    if (null != extraArgs) {
+      for (String arg : extraArgs) {
+        args.add(arg);
+      }
+    }
+
+    return args.toArray(new String[0]);
+  }
+
+  private SqoopOptions getSqoopOptions(String [] args, SqoopTool tool) {
+    SqoopOptions opts = null;
+    try {
+      opts = tool.parseArguments(args, null, null, true);
+    } catch (Exception e) {
+      fail("Invalid options: " + e.toString());
+    }
+
+    return opts;
+  }
+
+  private void runImportTest(String tableName, String [] types,
+      String [] values, String verificationScript, String [] args,
+      SqoopTool tool) throws IOException {
+
+    // create a table and populate it with a row...
+    createTableWithColTypes(types, values);
+
+    // set up our mock hive shell to compare our generated script
+    // against the correct expected one.
+    SqoopOptions options = getSqoopOptions(args, tool);
+    String hiveHome = options.getHiveHome();
+    assertNotNull("hive.home was not set", hiveHome);
+    String testDataPath = new Path(new Path(hiveHome),
+        "scripts/" + verificationScript).toString();
+    System.setProperty("expected.script",
+        new File(testDataPath).getAbsolutePath());
+
+    // verify that we can import it correctly into hive.
+    runImport(tool, args);
+  }
+
+  /** Test that we can generate a file containing the DDL and not import. */
+  @Test
+  public void testGenerateOnly() throws IOException {
+    final String TABLE_NAME = "GenerateOnly";
+    setCurTableName(TABLE_NAME);
+    setNumCols(1);
+
+    // Figure out where our target generated .q file is going to be.
+    SqoopOptions options = getSqoopOptions(getArgv(false, null),
+        new ImportTool());
+    Path ddlFile = new Path(new Path(options.getCodeOutputDir()),
+        TABLE_NAME + ".q");
+    FileSystem fs = FileSystem.getLocal(new Configuration());
+
+    // If it's already there, remove it before running the test to ensure
+    // that it's the current test that generated the file.
+    if (fs.exists(ddlFile)) {
+      if (!fs.delete(ddlFile, false)) {
+        LOG.warn("Could not delete previous ddl file: " + ddlFile);
+      }
+    }
+
+    // Run a basic import, but specify that we're just generating definitions.
+    String [] types = { "INTEGER" };
+    String [] vals = { "42" };
+    runImportTest(TABLE_NAME, types, vals, null, getCodeGenArgs(),
+        new CodeGenTool());
+
+    // Test that the generated definition file exists.
+    assertTrue("Couldn't find expected ddl file", fs.exists(ddlFile));
+
+    Path hiveImportPath = new Path(new Path(options.getWarehouseDir()),
+        TABLE_NAME);
+    assertFalse("Import actually happened!", fs.exists(hiveImportPath));
+  }
+
+  /** Test that strings and ints are handled in the normal fashion. */
+  @Test
+  public void testNormalHiveImport() throws IOException {
+    final String TABLE_NAME = "NORMAL_HIVE_IMPORT";
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
+    String [] vals = { "'test'", "42", "'somestring'" };
+    runImportTest(TABLE_NAME, types, vals, "normalImport.q",
+        getArgv(false, null), new ImportTool());
+  }
+
+  /** Test that strings and ints are handled in the normal fashion as parquet
+   * file. */
+  @Test
+  public void testNormalHiveImportAsParquet() throws IOException {
+    final String TABLE_NAME = "NORMAL_HIVE_IMPORT_AS_PARQUET";
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String [] types = getTypes();
+    String [] vals = { "'test'", "42", "'somestring'" };
+    String [] extraArgs = {"--as-parquetfile"};
+
+    runImportTest(TABLE_NAME, types, vals, "", getArgv(false, extraArgs),
+        new ImportTool());
+    verifyHiveDataset(TABLE_NAME, new Object[][]{{"test", 42, "somestring"}});
+  }
+
+  private void verifyHiveDataset(String tableName, Object[][] valsArray) {
+    String datasetUri = String.format("dataset:hive:default/%s",
+        tableName.toLowerCase());
+    assertTrue(Datasets.exists(datasetUri));
+    Dataset dataset = Datasets.load(datasetUri);
+    assertFalse(dataset.isEmpty());
+
+    DatasetReader<GenericRecord> reader = dataset.newReader();
+    try {
+      List<String> expectations = new ArrayList<String>();
+      if (valsArray != null) {
+        for (Object[] vals : valsArray) {
+          expectations.add(Arrays.toString(vals));
+        }
+      }
+
+      while (reader.hasNext() && expectations.size() > 0) {
+        String actual = Arrays.toString(
+            convertGenericRecordToArray(reader.next()));
+        assertTrue("Expect record: " + actual, expectations.remove(actual));
+      }
+      assertFalse(reader.hasNext());
+      assertEquals(0, expectations.size());
+    } finally {
+      reader.close();
+    }
+  }
+
+  private static Object[] convertGenericRecordToArray(GenericRecord record) {
+    Object[] result = new Object[record.getSchema().getFields().size()];
+    for (int i = 0; i < result.length; i++) {
+      result[i] = record.get(i);
+    }
+    return result;
+  }
+
+  /** Test that table is created in hive with no data import. */
+  @Test
+  public void testCreateOnlyHiveImport() throws IOException {
+    final String TABLE_NAME = "CREATE_ONLY_HIVE_IMPORT";
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
+    String [] vals = { "'test'", "42", "'somestring'" };
+    runImportTest(TABLE_NAME, types, vals,
+        "createOnlyImport.q", getCreateHiveTableArgs(null),
+        new CreateHiveTableTool());
+  }
+
+  /**
+   * Test that table is created in hive and replaces the existing table if
+   * any.
+   */
+  @Test
+  public void testCreateOverwriteHiveImport() throws IOException {
+    final String TABLE_NAME = "CREATE_OVERWRITE_HIVE_IMPORT";
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
+    String [] vals = { "'test'", "42", "'somestring'" };
+    String [] extraArgs = {"--hive-overwrite", "--create-hive-table"};
+    runImportTest(TABLE_NAME, types, vals,
+        "createOverwriteImport.q", getCreateHiveTableArgs(extraArgs),
+        new CreateHiveTableTool());
+    runImportTest(TABLE_NAME, types, vals,
+        "createOverwriteImport.q", getCreateHiveTableArgs(extraArgs),
+        new CreateHiveTableTool());
+  }
+
+  /**
+   * Test that table is created in hive and replaces the existing table if
+   * any.
+   */
+  @Test
+  public void testCreateOverwriteHiveImportAsParquet() throws IOException {
+    final String TABLE_NAME = "CREATE_OVERWRITE_HIVE_IMPORT_AS_PARQUET";
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String [] types = getTypes();
+    String [] vals = { "'test'", "42", "'somestring'" };
+    String [] extraArgs = {"--as-parquetfile"};
+    ImportTool tool = new ImportTool();
+
+    runImportTest(TABLE_NAME, types, vals, "", getArgv(false, extraArgs), tool);
+    verifyHiveDataset(TABLE_NAME, new Object[][]{{"test", 42, "somestring"}});
+
+    String [] valsToOverwrite = { "'test2'", "24", "'somestring2'" };
+    String [] extraArgsForOverwrite = {"--as-parquetfile", "--hive-overwrite"};
+    runImportTest(TABLE_NAME, types, valsToOverwrite, "",
+        getArgv(false, extraArgsForOverwrite), tool);
+    verifyHiveDataset(TABLE_NAME, new Object[][] {{"test2", 24, "somestring2"}});
+  }
+
+  @Test
+  public void testHiveImportAsParquetWhenTableExistsWithIncompatibleSchema() throws Exception {
+    final String TABLE_NAME = "HIVE_IMPORT_AS_PARQUET_EXISTING_TABLE";
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+
+    String [] types = { "VARCHAR(32)", "INTEGER", "DATE" };
+    String [] vals = { "'test'", "42", "'2009-12-31'" };
+    String [] extraArgs = {"--as-parquetfile"};
+
+    createHiveDataSet(TABLE_NAME);
+
+    createTableWithColTypes(types, vals);
+
+    thrown.expect(AvroSchemaMismatchException.class);
+    thrown.expectMessage(ParquetJob.INCOMPATIBLE_AVRO_SCHEMA_MSG + ParquetJob.HIVE_INCOMPATIBLE_AVRO_SCHEMA_MSG);
+
+    SqoopOptions sqoopOptions = getSqoopOptions(getConf());
+    sqoopOptions.setThrowOnError(true);
+    Sqoop sqoop = new Sqoop(new ImportTool(), getConf(), sqoopOptions);
+    sqoop.run(getArgv(false, extraArgs));
+
+  }
+
+  private void createHiveDataSet(String tableName) {
+    Schema dataSetSchema = SchemaBuilder
+        .record(tableName)
+            .fields()
+            .name(getColName(0)).type().nullable().stringType().noDefault()
+            .name(getColName(1)).type().nullable().stringType().noDefault()
+            .name(getColName(2)).type().nullable().stringType().noDefault()
+            .endRecord();
+    String dataSetUri = "dataset:hive:/default/" + tableName;
+    ParquetJob.createDataset(dataSetSchema, Formats.PARQUET.getDefaultCompressionType(), dataSetUri);
+  }
+
+  /**
+   * Test that records are appended to an existing table.
+   */
+  @Test
+  public void testAppendHiveImportAsParquet() throws IOException {
+    final String TABLE_NAME = "APPEND_HIVE_IMPORT_AS_PARQUET";
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String [] types = getTypes();
+    String [] vals = { "'test'", "42", "'somestring'" };
+    String [] extraArgs = {"--as-parquetfile"};
+    String [] args = getArgv(false, extraArgs);
+    ImportTool tool = new ImportTool();
+
+    runImportTest(TABLE_NAME, types, vals, "", args, tool);
+    verifyHiveDataset(TABLE_NAME, new Object[][]{{"test", 42, "somestring"}});
+
+    String [] valsToAppend = { "'test2'", "4242", "'somestring2'" };
+    runImportTest(TABLE_NAME, types, valsToAppend, "", args, tool);
+    verifyHiveDataset(TABLE_NAME, new Object[][] {
+        {"test2", 4242, "somestring2"}, {"test", 42, "somestring"}});
+  }
+
+  /**
+   * Test hive create and --as-parquetfile options validation.
+   */
+  @Test
+  public void testCreateHiveImportAsParquet() throws ParseException, InvalidOptionsException {
+    final String TABLE_NAME = "CREATE_HIVE_IMPORT_AS_PARQUET";
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String [] extraArgs = {"--as-parquetfile", "--create-hive-table"};
+    ImportTool tool = new ImportTool();
+
+    thrown.expect(InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during Hive table creation with " +
+        "--as-parquetfile");
+    tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null,
+        null, true));
+  }
+
+
+  /** Test that dates are coerced properly to strings. */
+  @Test
+  public void testDate() throws IOException {
+    final String TABLE_NAME = "DATE_HIVE_IMPORT";
+    setCurTableName(TABLE_NAME);
+    setNumCols(2);
+    String [] types = { "VARCHAR(32)", "DATE" };
+    String [] vals = { "'test'", "'2009-05-12'" };
+    runImportTest(TABLE_NAME, types, vals, "dateImport.q",
+        getArgv(false, null), new ImportTool());
+  }
+
+  /** Test that NUMERICs are coerced to doubles. */
+  @Test
+  public void testNumeric() throws IOException {
+    final String TABLE_NAME = "NUMERIC_HIVE_IMPORT";
+    setCurTableName(TABLE_NAME);
+    setNumCols(2);
+    String [] types = { "NUMERIC", "CHAR(64)" };
+    String [] vals = { "3.14159", "'foo'" };
+    runImportTest(TABLE_NAME, types, vals, "numericImport.q",
+        getArgv(false, null), new ImportTool());
+  }
+
+  /** Test that DECIMALS using --map-column-hive option maps can run without issues. */
+  @Test
+  public void testDecimalMapColumnHive() throws IOException {
+    final String TABLE_NAME = "DECIMAL_MAP_HIVE_IMPORT";
+    setCurTableName(TABLE_NAME);
+    setNumCols(2);
+    String [] types = { "NUMERIC", "CHAR(64)" };
+    String [] vals = { "12343.14159", "'foo'" };
+
+    ArrayList<String> args = new ArrayList<String>();
+    args.add("--map-column-hive");
+    args.add(BASE_COL_NAME  + "0=DECIMAL(10,10)");
+
+    runImportTest(TABLE_NAME, types, vals, "decimalMapImport.q",
+        getArgv(false, args.toArray(new String[args.size()])), new ImportTool());
+  }
+
+  /** If bin/hive returns an error exit status, we should get an IOException. */
+  @Test
+  public void testHiveExitFails() throws IOException {
+    // The expected script is different than the one which would be generated
+    // by this, so we expect an IOException out.
+    final String TABLE_NAME = "FAILING_HIVE_IMPORT";
+    setCurTableName(TABLE_NAME);
+    setNumCols(2);
+    String [] types = { "NUMERIC", "CHAR(64)" };
+    String [] vals = { "3.14159", "'foo'" };
+
+    thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException on erroneous Hive exit status");
+    runImportTest(TABLE_NAME, types, vals, "failingImport.q",
+        getArgv(false, null), new ImportTool());
+  }
+
+  /** Test that we can set delimiters how we want them. */
+  @Test
+  public void testCustomDelimiters() throws IOException {
+    final String TABLE_NAME = "CUSTOM_DELIM_IMPORT";
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
+    String [] vals = { "'test'", "42", "'somestring'" };
+    String [] extraArgs = {
+      "--fields-terminated-by", ",",
+      "--lines-terminated-by", "|",
+    };
+    runImportTest(TABLE_NAME, types, vals, "customDelimImport.q",
+        getArgv(false, extraArgs), new ImportTool());
+  }
+
+  /**
+   * Test hive import with row that has new line in it.
+   */
+  @Test
+  public void testFieldWithHiveDelims() throws IOException,
+    InterruptedException {
+    final String TABLE_NAME = "FIELD_WITH_NL_HIVE_IMPORT";
+
+    LOG.info("Doing import of single row into FIELD_WITH_NL_HIVE_IMPORT table");
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String[] types = getTypes();
+    String[] vals = { "'test with \n new lines \n'", "42",
+        "'oh no " + '\01' + " field delims " + '\01' + "'", };
+    String[] moreArgs = { "--"+ BaseSqoopTool.HIVE_DROP_DELIMS_ARG };
+
+    runImportTest(TABLE_NAME, types, vals, "fieldWithNewlineImport.q",
+        getArgv(false, moreArgs), new ImportTool());
+
+    LOG.info("Validating data in single row is present in: "
+          + "FIELD_WITH_NL_HIVE_IMPORT table");
+
+    // Ideally, we would actually invoke hive code to verify that record with
+    // record and field delimiters have values replaced and that we have the
+    // proper number of hive records. Unfortunately, this is a non-trivial task,
+    // and better dealt with at an integration test level
+    //
+    // Instead, this assumes the path of the generated table and just validate
+    // map job output.
+
+    // Get and read the raw output file
+    String whDir = getWarehouseDir();
+    File p = new File(new File(whDir, TABLE_NAME), "part-m-00000");
+    File f = new File(p.toString());
+    FileReader fr = new FileReader(f);
+    BufferedReader br = new BufferedReader(fr);
+    try {
+      // verify the output
+      assertEquals(br.readLine(), "test with  new lines " + '\01' + "42"
+          + '\01' + "oh no  field delims ");
+      assertEquals(br.readLine(), null); // should only be one line
+    } catch (IOException ioe) {
+      fail("Unable to read files generated from hive");
+    } finally {
+      br.close();
+    }
+  }
+
+  /**
+   * Test hive import with row that has new line in it.
+   */
+  @Test
+  public void testFieldWithHiveDelimsReplacement() throws IOException,
+    InterruptedException {
+    final String TABLE_NAME = "FIELD_WITH_NL_REPLACEMENT_HIVE_IMPORT";
+
+    LOG.info("Doing import of single row into "
+        + "FIELD_WITH_NL_REPLACEMENT_HIVE_IMPORT table");
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String[] types = getTypes();
+    String[] vals = { "'test with\nnew lines\n'", "42",
+        "'oh no " + '\01' + " field delims " + '\01' + "'", };
+    String[] moreArgs = { "--"+BaseSqoopTool.HIVE_DELIMS_REPLACEMENT_ARG, " "};
+
+    runImportTest(TABLE_NAME, types, vals,
+        "fieldWithNewlineReplacementImport.q", getArgv(false, moreArgs),
+        new ImportTool());
+
+    LOG.info("Validating data in single row is present in: "
+          + "FIELD_WITH_NL_REPLACEMENT_HIVE_IMPORT table");
+
+    // Ideally, we would actually invoke hive code to verify that record with
+    // record and field delimiters have values replaced and that we have the
+    // proper number of hive records. Unfortunately, this is a non-trivial task,
+    // and better dealt with at an integration test level
+    //
+    // Instead, this assumes the path of the generated table and just validate
+    // map job output.
+
+    // Get and read the raw output file
+    String whDir = getWarehouseDir();
+    File p = new File(new File(whDir, TABLE_NAME), "part-m-00000");
+    File f = new File(p.toString());
+    FileReader fr = new FileReader(f);
+    BufferedReader br = new BufferedReader(fr);
+    try {
+      // verify the output
+      assertEquals(br.readLine(), "test with new lines " + '\01' + "42"
+          + '\01' + "oh no   field delims  ");
+      assertEquals(br.readLine(), null); // should only be one line
+    } catch (IOException ioe) {
+      fail("Unable to read files generated from hive");
+    } finally {
+      br.close();
+    }
+  }
+
+  /**
+   * Test hive drop and replace option validation.
+   */
+  @Test
+  public void testHiveDropAndReplaceOptionValidation() throws ParseException, InvalidOptionsException {
+    LOG.info("Testing conflicting Hive delimiter drop/replace options");
+
+    setNumCols(3);
+    String[] moreArgs = { "--"+BaseSqoopTool.HIVE_DELIMS_REPLACEMENT_ARG, " ",
+      "--"+BaseSqoopTool.HIVE_DROP_DELIMS_ARG, };
+
+    ImportTool tool = new ImportTool();
+
+    thrown.expect(InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException with conflicting Hive delimiter " +
+        "drop/replace options");
+    tool.validateOptions(tool.parseArguments(getArgv(false, moreArgs), null,
+        null, true));
+  }
+
+  /**
+   * Test hive import with row that has new line in it.
+   */
+  @Test
+  public void testImportHiveWithPartitions() throws IOException,
+      InterruptedException {
+    final String TABLE_NAME = "PARTITION_HIVE_IMPORT";
+
+    LOG.info("Doing import of single row into PARTITION_HIVE_IMPORT table");
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)", };
+    String[] vals = { "'whoop'", "42", "'I am a row in a partition'", };
+    String[] moreArgs = { "--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG, "ds",
+        "--" + BaseSqoopTool.HIVE_PARTITION_VALUE_ARG, "20110413", };
+
+    runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
+        getArgv(false, moreArgs), new ImportTool());
+  }
+
+  /**
+   * If partition key is set to one of importing columns, we should get an
+   * IOException.
+   * */
+  @Test
+  public void testImportWithBadPartitionKey() throws IOException {
+    final String TABLE_NAME = "FAILING_PARTITION_HIVE_IMPORT";
+
+    LOG.info("Doing import of single row into " + TABLE_NAME + " table");
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)", };
+    String[] vals = { "'key'", "42", "'I am a row in a partition'", };
+
+    String partitionKey = getColNames()[0];
+
+    // Specify 1st column as partition key and import every column of the
+    // table by default (i.e. no --columns option).
+    String[] moreArgs1 = {
+        "--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG,
+        partitionKey,
+    };
+
+    // Specify 1st column as both partition key and importing column.
+    String[] moreArgs2 = {
+        "--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG,
+        partitionKey,
+        "--" + BaseSqoopTool.COLUMNS_ARG,
+        partitionKey,
+    };
+
+    // Test hive-import with the 1st args.
+    thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive import with partition key " +
+        "as importing column");
+    runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
+        getArgv(false, moreArgs1), new ImportTool());
+
+    // Test hive-import with the 2nd args.
+    thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive import with partition key " +
+        "as importing column");
+    runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
+        getArgv(false, moreArgs2), new ImportTool());
+
+    // Test create-hive-table with the 1st args.
+    thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive table creation with partition key " +
+        "as importing column");
+    runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
+        getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hive/TestTableDefWriter.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hive/TestTableDefWriter.java b/src/test/org/apache/sqoop/hive/TestTableDefWriter.java
index 035b0e1..496b5ad 100644
--- a/src/test/org/apache/sqoop/hive/TestTableDefWriter.java
+++ b/src/test/org/apache/sqoop/hive/TestTableDefWriter.java
@@ -15,112 +15,275 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.sqoop.hive;
 
-import static org.mockito.Mockito.*;
+package org.apache.sqoop.hive;
 
-import com.cloudera.sqoop.manager.ConnManager;
-import com.cloudera.sqoop.SqoopOptions;
+import java.util.Map;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.junit.BeforeClass;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.sqoop.util.SqlTypeMap;
+
+import org.apache.sqoop.SqoopOptions;
+import org.apache.sqoop.tool.ImportTool;
+import org.apache.sqoop.testutil.HsqldbTestServer;
+
+import org.junit.Rule;
 import org.junit.Test;
-import org.mockito.Mockito;
+import org.junit.rules.ExpectedException;
 
-import java.sql.*;
-import java.util.HashMap;
-import java.io.IOException;
+import java.sql.Types;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+
+/**
+ * Test Hive DDL statement generation.
+ */
 public class TestTableDefWriter {
-  static String inputTableName = "genres";
-  static String outputTableName = "genres";
-  static String testTargetDir = "/tmp/testDB/genre";
-  static String hdfsTableDir = "/data/movielens/genre";
-  static String testDbUri = "jdbc:postgresql://localhost/movielens";
-  static ConnManager manager;
-  static SqoopOptions options;
+
   public static final Log LOG = LogFactory.getLog(
       TestTableDefWriter.class.getName());
-  TableDefWriter tableDefWriter;
-
-  @BeforeClass
-  public static void setup() {
-    // create mock
-    HashMap<String, Integer> map = new HashMap<String, Integer>();
-    map.put("id", Types.TINYINT);
-    map.put("name", Types.VARCHAR);
-    manager = Mockito.mock(ConnManager.class);
-    when(manager.getColumnNames(inputTableName)).thenReturn(new String[] { "id", "name" });
-    when(manager.getColumnTypes(inputTableName)).thenReturn(map);
-    options = new SqoopOptions(testDbUri, inputTableName);
-    options.setTargetDir(testTargetDir);
-    options.setHiveExternalTableDir(hdfsTableDir);
-    String[] cols = new String[] { "id", "name" };
-    options.setColumns(cols);
-    options.setMapColumnHive("id=TINYINT,name=STRING");
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  // Test getHiveOctalCharCode and expect an IllegalArgumentException.
+  private void expectExceptionInCharCode(int charCode) {
+    thrown.expect(IllegalArgumentException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException with out-of-range Hive delimiter");
+    TableDefWriter.getHiveOctalCharCode(charCode);
   }
 
   @Test
-  public void testGenerateExternalTableStatement() throws IOException, SQLException {
-    // need to set this as the other unit test functions may override it for their own test.
-    options.setHiveExternalTableDir(hdfsTableDir);
-    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
-        options.getConf(), false);
-    String stmt = tableDefWriter.getCreateTableStmt();
-    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
-    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
-    assert (isHiveExternalTableSet && stmt.contains("CREATE EXTERNAL TABLE ") && stmt.contains("LOCATION '" + hdfsTableDir));
+  public void testHiveOctalCharCode() {
+    assertEquals("\\000", TableDefWriter.getHiveOctalCharCode(0));
+    assertEquals("\\001", TableDefWriter.getHiveOctalCharCode(1));
+    assertEquals("\\012", TableDefWriter.getHiveOctalCharCode((int) '\n'));
+    assertEquals("\\177", TableDefWriter.getHiveOctalCharCode(0177));
+
+    expectExceptionInCharCode(4096);
+    expectExceptionInCharCode(0200);
+    expectExceptionInCharCode(254);
   }
 
   @Test
-  public void testGenerateTableStatement() throws IOException, SQLException {
-    // need to set this as the other unit test functions may override it for their own test.
-    options.setHiveExternalTableDir(null);
-    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
-        options.getConf(), false);
-    String stmt = tableDefWriter.getCreateTableStmt();
-    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
-    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
-    assert (!isHiveExternalTableSet && stmt.contains("CREATE TABLE "));
+  public void testDifferentTableNames() throws Exception {
+    Configuration conf = new Configuration();
+    SqoopOptions options = new SqoopOptions();
+    TableDefWriter writer = new TableDefWriter(options, null,
+        "inputTable", "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+    String loadData = writer.getLoadDataStmt();
+
+    LOG.debug("Create table stmt: " + createTable);
+    LOG.debug("Load data stmt: " + loadData);
+
+    // Assert that the statements generated have the form we expect.
+    assertTrue(createTable.indexOf(
+        "CREATE TABLE IF NOT EXISTS `outputTable`") != -1);
+    assertTrue(loadData.indexOf("INTO TABLE `outputTable`") != -1);
+    assertTrue(loadData.indexOf("/inputTable'") != -1);
   }
 
   @Test
-  public void testGenerateExternalTableIfExistsStatement() throws IOException, SQLException {
-    options.setFailIfHiveTableExists(false);
-    // need to set this as the other unit test functions may override it for their own test.
-    options.setHiveExternalTableDir(hdfsTableDir);
-    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
-        options.getConf(), false);
-    String stmt = tableDefWriter.getCreateTableStmt();
-    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
-    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
-    assert (isHiveExternalTableSet && stmt.contains("CREATE EXTERNAL TABLE IF NOT EXISTS") && stmt.contains("LOCATION '"
-        + hdfsTableDir));
+  public void testDifferentTargetDirs() throws Exception {
+    String targetDir = "targetDir";
+    String inputTable = "inputTable";
+    String outputTable = "outputTable";
+
+    Configuration conf = new Configuration();
+    SqoopOptions options = new SqoopOptions();
+    // Specify a different target dir from input table name
+    options.setTargetDir(targetDir);
+    TableDefWriter writer = new TableDefWriter(options, null,
+        inputTable, outputTable, conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+    String loadData = writer.getLoadDataStmt();
+
+    LOG.debug("Create table stmt: " + createTable);
+    LOG.debug("Load data stmt: " + loadData);
+
+    // Assert that the statements generated have the form we expect.
+    assertTrue(createTable.indexOf(
+        "CREATE TABLE IF NOT EXISTS `" + outputTable + "`") != -1);
+    assertTrue(loadData.indexOf("INTO TABLE `" + outputTable + "`") != -1);
+    assertTrue(loadData.indexOf("/" + targetDir + "'") != -1);
   }
 
   @Test
-  public void testGenerateTableIfExistsStatement() throws IOException, SQLException {
-    // need to set this as the other unit test functions may override it for their own test.
-    options.setHiveExternalTableDir(null);
-    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
-        options.getConf(), false);
-    String stmt = tableDefWriter.getCreateTableStmt();
-    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
-    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
-    assert (!isHiveExternalTableSet && stmt.contains("CREATE TABLE IF NOT EXISTS"));
+  public void testPartitions() throws Exception {
+    String[] args = {
+        "--hive-partition-key", "ds",
+        "--hive-partition-value", "20110413",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+      new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, "inputTable", "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+    String loadData = writer.getLoadDataStmt();
+
+    assertNotNull(createTable);
+    assertNotNull(loadData);
+    assertEquals("CREATE TABLE IF NOT EXISTS `outputTable` ( ) "
+        + "PARTITIONED BY (ds STRING) "
+        + "ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\054' "
+        + "LINES TERMINATED BY '\\012' STORED AS TEXTFILE", createTable);
+    assertTrue(loadData.endsWith(" PARTITION (ds='20110413')"));
   }
 
   @Test
-  public void testGenerateExternalTableLoadStatement() throws IOException, SQLException {
-    // need to set this as the other unit test functions may override it for their own test.
-    options.setHiveExternalTableDir(hdfsTableDir);
-    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
-        options.getConf(), false);
-    String stmt = tableDefWriter.getLoadDataStmt();
-    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
-    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
-    assert (isHiveExternalTableSet && stmt.contains("LOAD DATA INPATH ") && stmt.contains(testTargetDir));
+  public void testLzoSplitting() throws Exception {
+    String[] args = {
+        "--compress",
+        "--compression-codec", "lzop",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+      new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, "inputTable", "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+    String loadData = writer.getLoadDataStmt();
+
+    assertNotNull(createTable);
+    assertNotNull(loadData);
+    assertEquals("CREATE TABLE IF NOT EXISTS `outputTable` ( ) "
+        + "ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\054' "
+        + "LINES TERMINATED BY '\\012' STORED AS "
+        + "INPUTFORMAT 'com.hadoop.mapred.DeprecatedLzoTextInputFormat' "
+        + "OUTPUTFORMAT "
+        + "'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'",
+        createTable);
   }
+
+  @Test
+  public void testUserMappingNoDecimal() throws Exception {
+    String[] args = {
+        "--map-column-hive", "id=STRING,value=INTEGER",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+      new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    colTypes.put("id", Types.INTEGER);
+    colTypes.put("value", Types.VARCHAR);
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+
+    assertNotNull(createTable);
+
+    assertTrue(createTable.contains("`id` STRING"));
+    assertTrue(createTable.contains("`value` INTEGER"));
+
+    assertFalse(createTable.contains("`id` INTEGER"));
+    assertFalse(createTable.contains("`value` STRING"));
+  }
+
+  @Test
+  public void testUserMappingWithDecimal() throws Exception {
+    String[] args = {
+        "--map-column-hive", "id=STRING,value2=DECIMAL(13,5),value1=INTEGER," +
+                             "value3=DECIMAL(4,5),value4=VARCHAR(255)",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+        new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    colTypes.put("id", Types.INTEGER);
+    colTypes.put("value1", Types.VARCHAR);
+    colTypes.put("value2", Types.DOUBLE);
+    colTypes.put("value3", Types.FLOAT);
+    colTypes.put("value4", Types.CHAR);
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+
+    assertNotNull(createTable);
+
+    assertTrue(createTable.contains("`id` STRING"));
+    assertTrue(createTable.contains("`value1` INTEGER"));
+    assertTrue(createTable.contains("`value2` DECIMAL(13,5)"));
+    assertTrue(createTable.contains("`value3` DECIMAL(4,5)"));
+    assertTrue(createTable.contains("`value4` VARCHAR(255)"));
+
+    assertFalse(createTable.contains("`id` INTEGER"));
+    assertFalse(createTable.contains("`value1` STRING"));
+    assertFalse(createTable.contains("`value2` DOUBLE"));
+    assertFalse(createTable.contains("`value3` FLOAT"));
+    assertFalse(createTable.contains("`value4` CHAR"));
+  }
+
+  @Test
+  public void testUserMappingFailWhenCantBeApplied() throws Exception {
+    String[] args = {
+        "--map-column-hive", "id=STRING,value=INTEGER",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+      new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    colTypes.put("id", Types.INTEGER);
+    writer.setColumnTypes(colTypes);
+
+    thrown.expect(IllegalArgumentException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException on non applied Hive type mapping");
+    String createTable = writer.getCreateTableStmt();
+  }
+
+  @Test
+  public void testHiveDatabase() throws Exception {
+    String[] args = {
+        "--hive-database", "db",
+    };
+    Configuration conf = new Configuration();
+    SqoopOptions options =
+      new ImportTool().parseArguments(args, null, null, false);
+    TableDefWriter writer = new TableDefWriter(options,
+        null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
+
+    Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
+    writer.setColumnTypes(colTypes);
+
+    String createTable = writer.getCreateTableStmt();
+    assertNotNull(createTable);
+    assertTrue(createTable.contains("`db`.`outputTable`"));
+
+    String loadStmt = writer.getLoadDataStmt();
+    assertNotNull(loadStmt);
+    assertTrue(createTable.contains("`db`.`outputTable`"));
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/org/apache/sqoop/hive/TestTableDefWriterForExternalTable.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hive/TestTableDefWriterForExternalTable.java b/src/test/org/apache/sqoop/hive/TestTableDefWriterForExternalTable.java
new file mode 100644
index 0000000..f1768ee
--- /dev/null
+++ b/src/test/org/apache/sqoop/hive/TestTableDefWriterForExternalTable.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.hive;
+
+import static org.mockito.Mockito.*;
+
+import org.apache.sqoop.manager.ConnManager;
+import org.apache.sqoop.SqoopOptions;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.sql.*;
+import java.util.HashMap;
+import java.io.IOException;
+
+public class TestTableDefWriterForExternalTable {
+  static String inputTableName = "genres";
+  static String outputTableName = "genres";
+  static String testTargetDir = "/tmp/testDB/genre";
+  static String hdfsTableDir = "/data/movielens/genre";
+  static String testDbUri = "jdbc:postgresql://localhost/movielens";
+  static ConnManager manager;
+  static SqoopOptions options;
+  public static final Log LOG = LogFactory.getLog(
+      TestTableDefWriterForExternalTable.class.getName());
+  TableDefWriter tableDefWriter;
+
+  @BeforeClass
+  public static void setup() {
+    // create mock
+    HashMap<String, Integer> map = new HashMap<String, Integer>();
+    map.put("id", Types.TINYINT);
+    map.put("name", Types.VARCHAR);
+    manager = Mockito.mock(ConnManager.class);
+    when(manager.getColumnNames(inputTableName)).thenReturn(new String[] { "id", "name" });
+    when(manager.getColumnTypes(inputTableName)).thenReturn(map);
+    options = new SqoopOptions(testDbUri, inputTableName);
+    options.setTargetDir(testTargetDir);
+    options.setHiveExternalTableDir(hdfsTableDir);
+    String[] cols = new String[] { "id", "name" };
+    options.setColumns(cols);
+    options.setMapColumnHive("id=TINYINT,name=STRING");
+  }
+
+  @Test
+  public void testGenerateExternalTableStatement() throws IOException, SQLException {
+    // need to set this as the other unit test functions may override it for their own test.
+    options.setHiveExternalTableDir(hdfsTableDir);
+    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
+        options.getConf(), false);
+    String stmt = tableDefWriter.getCreateTableStmt();
+    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
+    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
+    assert (isHiveExternalTableSet && stmt.contains("CREATE EXTERNAL TABLE ") && stmt.contains("LOCATION '" + hdfsTableDir));
+  }
+
+  @Test
+  public void testGenerateTableStatement() throws IOException, SQLException {
+    // need to set this as the other unit test functions may override it for their own test.
+    options.setHiveExternalTableDir(null);
+    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
+        options.getConf(), false);
+    String stmt = tableDefWriter.getCreateTableStmt();
+    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
+    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
+    assert (!isHiveExternalTableSet && stmt.contains("CREATE TABLE "));
+  }
+
+  @Test
+  public void testGenerateExternalTableIfExistsStatement() throws IOException, SQLException {
+    options.setFailIfHiveTableExists(false);
+    // need to set this as the other unit test functions may override it for their own test.
+    options.setHiveExternalTableDir(hdfsTableDir);
+    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
+        options.getConf(), false);
+    String stmt = tableDefWriter.getCreateTableStmt();
+    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
+    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
+    assert (isHiveExternalTableSet && stmt.contains("CREATE EXTERNAL TABLE IF NOT EXISTS") && stmt.contains("LOCATION '"
+        + hdfsTableDir));
+  }
+
+  @Test
+  public void testGenerateTableIfExistsStatement() throws IOException, SQLException {
+    // need to set this as the other unit test functions may override it for their own test.
+    options.setHiveExternalTableDir(null);
+    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
+        options.getConf(), false);
+    String stmt = tableDefWriter.getCreateTableStmt();
+    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
+    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
+    assert (!isHiveExternalTableSet && stmt.contains("CREATE TABLE IF NOT EXISTS"));
+  }
+
+  @Test
+  public void testGenerateExternalTableLoadStatement() throws IOException, SQLException {
+    // need to set this as the other unit test functions may override it for their own test.
+    options.setHiveExternalTableDir(hdfsTableDir);
+    tableDefWriter = new TableDefWriter(options, manager, inputTableName, outputTableName,
+        options.getConf(), false);
+    String stmt = tableDefWriter.getLoadDataStmt();
+    Boolean isHiveExternalTableSet = !StringUtils.isBlank(options.getHiveExternalTableDir());
+    LOG.debug("External table dir: "+options.getHiveExternalTableDir());
+    assert (isHiveExternalTableSet && stmt.contains("LOAD DATA INPATH ") && stmt.contains(testTargetDir));
+  }
+}