You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sqoop.apache.org by ma...@apache.org on 2017/03/16 16:23:23 UTC

sqoop git commit: SQOOP-3142: Restore fail messages removed in SQOOP-3092

Repository: sqoop
Updated Branches:
  refs/heads/trunk 281a87aed -> 1a8fe23d7


SQOOP-3142: Restore fail messages removed in SQOOP-3092

(Boglarka Egyed via Attila Szabo)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/1a8fe23d
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/1a8fe23d
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/1a8fe23d

Branch: refs/heads/trunk
Commit: 1a8fe23d7a8adf1492b354693dc610d2822cb44e
Parents: 281a87a
Author: Attila Szabo <ma...@apache.org>
Authored: Thu Mar 16 17:21:29 2017 +0100
Committer: Attila Szabo <ma...@apache.org>
Committed: Thu Mar 16 17:22:18 2017 +0100

----------------------------------------------------------------------
 .../com/cloudera/sqoop/hive/TestHiveImport.java | 11 +++++++++
 .../cloudera/sqoop/hive/TestTableDefWriter.java |  2 ++
 .../com/cloudera/sqoop/io/TestCodecMap.java     |  5 +++-
 src/test/com/cloudera/sqoop/io/TestLobFile.java |  2 ++
 .../apache/sqoop/hcat/TestHCatalogBasic.java    | 24 ++++++++++++++++++++
 .../sqoop/mapreduce/db/TestTextSplitter.java    |  2 ++
 6 files changed, 45 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/1a8fe23d/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
index 1d67a2d..6f13fe2 100644
--- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
+++ b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
@@ -468,6 +468,8 @@ public class TestHiveImport extends ImportJobTestCase {
     ImportTool tool = new ImportTool();
 
     thrown.expect(InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during Hive table creation with " +
+        "--as-parquetfile");
     tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null,
         null, true));
   }
@@ -509,6 +511,7 @@ public class TestHiveImport extends ImportJobTestCase {
     String [] vals = { "3.14159", "'foo'" };
 
     thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException on erroneous Hive exit status");
     runImportTest(TABLE_NAME, types, vals, "failingImport.q",
         getArgv(false, null), new ImportTool());
   }
@@ -641,6 +644,8 @@ public class TestHiveImport extends ImportJobTestCase {
     ImportTool tool = new ImportTool();
 
     thrown.expect(InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException with conflicting Hive delimiter " +
+        "drop/replace options");
     tool.validateOptions(tool.parseArguments(getArgv(false, moreArgs), null,
         null, true));
   }
@@ -698,16 +703,22 @@ public class TestHiveImport extends ImportJobTestCase {
 
     // Test hive-import with the 1st args.
     thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive import with partition key " +
+        "as importing column");
     runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
         getArgv(false, moreArgs1), new ImportTool());
 
     // Test hive-import with the 2nd args.
     thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive import with partition key " +
+        "as importing column");
     runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
         getArgv(false, moreArgs2), new ImportTool());
 
     // Test create-hive-table with the 1st args.
     thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException during Hive table creation with partition key " +
+        "as importing column");
     runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
         getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
   }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/1a8fe23d/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
index 4db629f..6af12da 100644
--- a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
+++ b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java
@@ -55,6 +55,7 @@ public class TestTableDefWriter {
   // Test getHiveOctalCharCode and expect an IllegalArgumentException.
   private void expectExceptionInCharCode(int charCode) {
     thrown.expect(IllegalArgumentException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException with out-of-range Hive delimiter");
     TableDefWriter.getHiveOctalCharCode(charCode);
   }
 
@@ -221,6 +222,7 @@ public class TestTableDefWriter {
     writer.setColumnTypes(colTypes);
 
     thrown.expect(IllegalArgumentException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException on non applied Hive type mapping");
     String createTable = writer.getCreateTableStmt();
   }
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/1a8fe23d/src/test/com/cloudera/sqoop/io/TestCodecMap.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/io/TestCodecMap.java b/src/test/com/cloudera/sqoop/io/TestCodecMap.java
index 982b6ad..c78a5ae 100644
--- a/src/test/com/cloudera/sqoop/io/TestCodecMap.java
+++ b/src/test/com/cloudera/sqoop/io/TestCodecMap.java
@@ -70,7 +70,9 @@ public class TestCodecMap  {
     verifyShortName("gzip", "org.apache.hadoop.io.compress.GzipCodec");
     verifyShortName("default", "org.apache.hadoop.io.compress.DefaultCodec");
 
-    thrown.expect(IOException.class);
+    thrown.expect(UnsupportedCodecException.class);
+    thrown.reportMissingExceptionWithMessage("Expected UnsupportedCodecException with invalid codec name during getting " +
+        "short codec name");
     verifyShortName("NONE", "bogus");
   }
 
@@ -83,6 +85,7 @@ public class TestCodecMap  {
   @Test
   public void testUnrecognizedCodec() throws UnsupportedCodecException {
     thrown.expect(UnsupportedCodecException.class);
+    thrown.reportMissingExceptionWithMessage("Expected UnsupportedCodecException with invalid codec name");
     CodecMap.getCodec("bogus", new Configuration());
   }
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/1a8fe23d/src/test/com/cloudera/sqoop/io/TestLobFile.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/io/TestLobFile.java b/src/test/com/cloudera/sqoop/io/TestLobFile.java
index 7f8ca6d..029758c 100644
--- a/src/test/com/cloudera/sqoop/io/TestLobFile.java
+++ b/src/test/com/cloudera/sqoop/io/TestLobFile.java
@@ -143,6 +143,7 @@ public class TestLobFile {
     reader.close();
 
     thrown.expect(IOException.class);
+    thrown.reportMissingExceptionWithMessage("Expected IOException calling next after close");
     reader.next();
 
     // A second close shouldn't hurt anything. This should be a no-op.
@@ -590,6 +591,7 @@ public class TestLobFile {
     runCompressedTest(CodecMap.DEFLATE);
 
     thrown.expect(UnsupportedCodecException.class);
+    thrown.reportMissingExceptionWithMessage("Expected UnsupportedCodecException for lzo");
     runCompressedTest(CodecMap.LZO);
   }
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/1a8fe23d/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
index 71a371d..104effb 100644
--- a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
+++ b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
@@ -110,6 +110,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --target-dir");
     importTool.validateOptions(opts);
   }
 
@@ -128,6 +130,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --warehouse-dir");
     importTool.validateOptions(opts);
   }
 
@@ -145,6 +149,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --hive-import");
     importTool.validateOptions(opts);
   }
 
@@ -163,6 +169,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseExportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog export " +
+        "with --export-dir");
     exportTool.validateOptions(opts);
   }
 
@@ -180,6 +188,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseExportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog export " +
+        "with --as-parquetfile");
     exportTool.validateOptions(opts);
   }
 
@@ -197,6 +207,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --as-sequencefile");
     importTool.validateOptions(opts);
   }
 
@@ -217,6 +229,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --as-parquetfile");
     importTool.validateOptions(opts);
   }
 
@@ -234,6 +248,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --as-avrodatafile");
     importTool.validateOptions(opts);
   }
 
@@ -278,6 +294,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with --drop-and-create-hcatalog-table");
     importTool.validateOptions(opts);
   }
 
@@ -331,6 +349,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with only HCatalog keys");
     importTool.validateOptions(opts);
   }
 
@@ -351,6 +371,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with mismatched keys and values");
     importTool.validateOptions(opts);
   }
 
@@ -371,6 +393,8 @@ public class TestHCatalogBasic {
     SqoopOptions opts = parseImportArgs(args);
 
     thrown.expect(SqoopOptions.InvalidOptionsException.class);
+    thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " +
+        "with empty keys and values");
     importTool.validateOptions(opts);
   }
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/1a8fe23d/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
index adb795e..911749f 100644
--- a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
+++ b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java
@@ -146,6 +146,8 @@ public class TestTextSplitter {
     TextSplitter splitter = new TextSplitter();
 
     thrown.expect(ValidationException.class);
+    thrown.reportMissingExceptionWithMessage("Expected ValidationException during splitting " +
+        "when min string greater than max string");
     splitter.split(4, "Z", "A", "");
   }