You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pr...@apache.org on 2014/10/14 21:07:05 UTC

svn commit: r1631841 [3/42] - in /hive/branches/llap: ./ accumulo-handler/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/ accumulo-handler/src/java/org/apache/hadoop/hive...

Modified: hive/branches/llap/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java (original)
+++ hive/branches/llap/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java Tue Oct 14 19:06:45 2014
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
+import junit.framework.Assert;
 import junit.framework.TestCase;
 
 import org.apache.avro.Schema;
@@ -61,6 +62,7 @@ import org.apache.hadoop.hive.serde2.io.
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.io.BooleanWritable;
@@ -135,6 +137,27 @@ public class TestHBaseSerDe extends Test
       "  ]\n" +
       "}";
 
+  private static final String EXPECTED_DESERIALIZED_AVRO_STRING =
+      "{\"key\":\"test-row1\",\"cola_avro\":{\"arecord\":{\"int1\":42,\"boolean1\":true,"
+          + "\"long1\":42432234234}}}";
+
+  private static final String EXPECTED_DESERIALIZED_AVRO_STRING_2 =
+ "{\"key\":\"test-row1\","
+      + "\"cola_avro\":{\"employeename\":\"Avro Employee1\","
+      + "\"employeeid\":11111,\"age\":25,\"gender\":\"FEMALE\","
+      + "\"contactinfo\":{\"address\":[{\"address1\":\"Avro First Address1\",\"address2\":"
+      + "\"Avro Second Address1\",\"city\":\"Avro City1\",\"zipcode\":123456,\"county\":"
+      + "{0:{\"areacode\":999,\"number\":1234567890}},\"aliases\":null,\"metadata\":"
+      + "{\"testkey\":\"testvalue\"}},{\"address1\":\"Avro First Address1\",\"address2\":"
+      + "\"Avro Second Address1\",\"city\":\"Avro City1\",\"zipcode\":123456,\"county\":"
+      + "{0:{\"areacode\":999,\"number\":1234567890}},\"aliases\":null,\"metadata\":"
+      + "{\"testkey\":\"testvalue\"}}],\"homephone\":{\"areacode\":999,\"number\":1234567890},"
+      + "\"officephone\":{\"areacode\":999,\"number\":1234455555}}}}";
+
+  private static final String EXPECTED_DESERIALIZED_AVRO_STRING_3 =
+      "{\"key\":\"test-row1\",\"cola_avro\":{\"arecord\":{\"int1\":42,\"string1\":\"test\","
+          + "\"boolean1\":true,\"long1\":42432234234}}}";
+
   /**
    * Test the default behavior of the Lazy family of objects and object inspectors.
    */
@@ -1047,7 +1070,8 @@ public class TestHBaseSerDe extends Test
     Properties tbl = createPropertiesForHiveAvroSchemaInline();
     serDe.initialize(conf, tbl);
 
-    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
+    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
+        EXPECTED_DESERIALIZED_AVRO_STRING);
   }
 
   private Properties createPropertiesForHiveAvroSchemaInline() {
@@ -1092,7 +1116,8 @@ public class TestHBaseSerDe extends Test
     Properties tbl = createPropertiesForHiveAvroForwardEvolvedSchema();
     serDe.initialize(conf, tbl);
 
-    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
+    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
+        EXPECTED_DESERIALIZED_AVRO_STRING_3);
   }
 
   private Properties createPropertiesForHiveAvroForwardEvolvedSchema() {
@@ -1136,7 +1161,8 @@ public class TestHBaseSerDe extends Test
     Properties tbl = createPropertiesForHiveAvroBackwardEvolvedSchema();
     serDe.initialize(conf, tbl);
 
-    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
+    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
+        EXPECTED_DESERIALIZED_AVRO_STRING);
   }
 
   private Properties createPropertiesForHiveAvroBackwardEvolvedSchema() {
@@ -1185,7 +1211,8 @@ public class TestHBaseSerDe extends Test
     Properties tbl = createPropertiesForHiveAvroSerClass();
     serDe.initialize(conf, tbl);
 
-    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
+    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
+        EXPECTED_DESERIALIZED_AVRO_STRING_2);
   }
 
   private Properties createPropertiesForHiveAvroSerClass() {
@@ -1243,7 +1270,8 @@ public class TestHBaseSerDe extends Test
       Properties tbl = createPropertiesForHiveAvroSchemaUrl(onHDFS);
       serDe.initialize(conf, tbl);
 
-      deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
+      deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
+          EXPECTED_DESERIALIZED_AVRO_STRING);
     } finally {
       // Teardown the cluster
       if (miniDfs != null) {
@@ -1298,7 +1326,8 @@ public class TestHBaseSerDe extends Test
     Properties tbl = createPropertiesForHiveAvroExternalSchema();
     serDe.initialize(conf, tbl);
 
-    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
+    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
+        EXPECTED_DESERIALIZED_AVRO_STRING_2);
   }
 
   private Properties createPropertiesForHiveAvroExternalSchema() {
@@ -1389,8 +1418,87 @@ public class TestHBaseSerDe extends Test
     return tbl;
   }
 
+  public void testHBaseSerDeCustomStructValue() throws IOException, SerDeException {
+
+    byte[] cfa = "cola".getBytes();
+    byte[] qualStruct = "struct".getBytes();
+
+    TestStruct testStruct = new TestStruct("A", "B", "C", false, (byte) 0);
+    byte[] key = testStruct.getBytes();
+    // Data
+    List<KeyValue> kvs = new ArrayList<KeyValue>();
+
+    byte[] testData = testStruct.getBytes();
+    kvs.add(new KeyValue(key, cfa, qualStruct, testData));
+
+    Result r = new Result(kvs);
+    byte[] putKey = testStruct.getBytesWithDelimiters();
+
+    Put p = new Put(putKey);
+
+    // Post serialization, separators are automatically inserted between different fields in the
+    // struct. Currently there is not way to disable that. So the work around here is to pad the
+    // data with the separator bytes before creating a "Put" object
+    p.add(new KeyValue(putKey, cfa, qualStruct, Bytes.padTail(testData, 2)));
+
+    // Create, initialize, and test the SerDe
+    HBaseSerDe serDe = new HBaseSerDe();
+    Configuration conf = new Configuration();
+    Properties tbl = createPropertiesForValueStruct();
+    serDe.initialize(conf, tbl);
+
+    deserializeAndSerializeHBaseValueStruct(serDe, r, p);
+
+  }
+
+  private Properties createPropertiesForValueStruct() {
+    Properties tbl = new Properties();
+    tbl.setProperty("cola.struct.serialization.type", "struct");
+    tbl.setProperty("cola.struct.test.value", "test value");
+    tbl.setProperty(HBaseSerDe.HBASE_STRUCT_SERIALIZER_CLASS,
+        "org.apache.hadoop.hive.hbase.HBaseTestStructSerializer");
+    tbl.setProperty(serdeConstants.LIST_COLUMNS, "key,astring");
+    tbl.setProperty(serdeConstants.LIST_COLUMN_TYPES,
+        "struct<col1:string,col2:string,col3:string>,struct<col1:string,col2:string,col3:string>");
+    tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, ":key,cola:struct");
+    tbl.setProperty(HBaseSerDe.HBASE_COMPOSITE_KEY_CLASS,
+        "org.apache.hadoop.hive.hbase.HBaseTestCompositeKey");
+    return tbl;
+  }
+
+  private void deserializeAndSerializeHBaseValueStruct(HBaseSerDe serDe, Result r, Put p)
+      throws SerDeException, IOException {
+    StructObjectInspector soi = (StructObjectInspector) serDe.getObjectInspector();
+
+    List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
+
+    Object row = serDe.deserialize(new ResultWritable(r));
+
+    Object fieldData = null;
+    for (int j = 0; j < fieldRefs.size(); j++) {
+      fieldData = soi.getStructFieldData(row, fieldRefs.get(j));
+      assertNotNull(fieldData);
+      if (fieldData instanceof LazyStruct) {
+        assertEquals(((LazyStruct) fieldData).getField(0).toString(), "A");
+        assertEquals(((LazyStruct) fieldData).getField(1).toString(), "B");
+        assertEquals(((LazyStruct) fieldData).getField(2).toString(), "C");
+      } else {
+        Assert.fail("fieldData should be an instance of LazyStruct");
+      }
+    }
+
+    assertEquals(
+        "{\"key\":{\"col1\":\"A\",\"col2\":\"B\",\"col3\":\"C\"},\"astring\":{\"col1\":\"A\",\"col2\":\"B\",\"col3\":\"C\"}}",
+        SerDeUtils.getJSONString(row, soi));
+
+    // Now serialize
+    Put put = ((PutWritable) serDe.serialize(row, soi)).getPut();
+
+    assertEquals("Serialized put:", p.toString(), put.toString());
+  }
+
   private void deserializeAndSerializeHiveAvro(HBaseSerDe serDe, Result r, Put p,
-      Object[] expectedFieldsData)
+      Object[] expectedFieldsData, String expectedDeserializedAvroString)
       throws SerDeException, IOException {
     StructObjectInspector soi = (StructObjectInspector) serDe.getObjectInspector();
 
@@ -1403,6 +1511,8 @@ public class TestHBaseSerDe extends Test
       assertNotNull(fieldData);
       assertEquals(expectedFieldsData[j], fieldData.toString().trim());
     }
+    
+    assertEquals(expectedDeserializedAvroString, SerDeUtils.getJSONString(row, soi));
 
     // Now serialize
     Put put = ((PutWritable) serDe.serialize(row, soi)).getPut();

Modified: hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (original)
+++ hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java Tue Oct 14 19:06:45 2014
@@ -26,7 +26,6 @@ import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.DDLTask;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -195,8 +194,7 @@ final class CreateTableHook extends HCat
 
         //authorize against the table operation so that location permissions can be checked if any
 
-        if (HiveConf.getBoolVar(context.getConf(),
-          HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+        if (HCatAuthUtil.isAuthorizationEnabled(context.getConf())) {
           authorize(table, Privilege.CREATE);
         }
       } catch (HiveException ex) {

Modified: hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java (original)
+++ hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java Tue Oct 14 19:06:45 2014
@@ -23,6 +23,7 @@ import java.util.List;
 
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
@@ -104,6 +105,7 @@ public class HCatSemanticAnalyzer extend
     case HiveParser.TOK_ALTERVIEW_DROPPARTS:
     case HiveParser.TOK_ALTERVIEW_PROPERTIES:
     case HiveParser.TOK_ALTERVIEW_RENAME:
+    case HiveParser.TOK_ALTERVIEW:
     case HiveParser.TOK_CREATEVIEW:
     case HiveParser.TOK_DROPVIEW:
 
@@ -359,7 +361,7 @@ public class HCatSemanticAnalyzer extend
     AlterTableDesc alterTable = work.getAlterTblDesc();
     if (alterTable != null) {
       Table table = hive.getTable(SessionState.get().getCurrentDatabase(),
-        alterTable.getOldName(), false);
+          Utilities.getDbTableName(alterTable.getOldName())[1], false);
 
       Partition part = null;
       if (alterTable.getPartSpec() != null) {

Modified: hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java (original)
+++ hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java Tue Oct 14 19:06:45 2014
@@ -22,7 +22,6 @@ package org.apache.hive.hcatalog.cli.Sem
 import java.io.Serializable;
 import java.util.List;
 
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
@@ -89,8 +88,7 @@ public class HCatSemanticAnalyzerBase ex
   protected void authorizeDDL(HiveSemanticAnalyzerHookContext context,
                 List<Task<? extends Serializable>> rootTasks) throws SemanticException {
 
-    if (!HiveConf.getBoolVar(context.getConf(),
-      HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+    if (!HCatAuthUtil.isAuthorizationEnabled(context.getConf())) {
       return;
     }
 

Modified: hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java (original)
+++ hive/branches/llap/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java Tue Oct 14 19:06:45 2014
@@ -33,6 +33,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
+import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -120,25 +121,11 @@ public class HCatUtil {
   }
 
   public static String encodeBytes(byte[] bytes) {
-    StringBuilder strBuf = new StringBuilder();
-
-    for (int i = 0; i < bytes.length; i++) {
-      strBuf.append((char) (((bytes[i] >> 4) & 0xF) + ('a')));
-      strBuf.append((char) (((bytes[i]) & 0xF) + ('a')));
-    }
-
-    return strBuf.toString();
+    return new String(Base64.encodeBase64(bytes, false, false));
   }
 
   public static byte[] decodeBytes(String str) {
-    byte[] bytes = new byte[str.length() / 2];
-    for (int i = 0; i < str.length(); i += 2) {
-      char c = str.charAt(i);
-      bytes[i / 2] = (byte) ((c - 'a') << 4);
-      c = str.charAt(i + 1);
-      bytes[i / 2] += (c - 'a');
-    }
-    return bytes;
+    return Base64.decodeBase64(str.getBytes());
   }
 
   public static List<HCatFieldSchema> getHCatFieldSchemaList(

Modified: hive/branches/llap/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java (original)
+++ hive/branches/llap/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java Tue Oct 14 19:06:45 2014
@@ -90,6 +90,7 @@ public class TestHCatPartitionPublish {
     File workDir = handleWorkDir();
     conf.set("yarn.scheduler.capacity.root.queues", "default");
     conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+    conf.set("fs.pfile.impl", "org.apache.hadoop.fs.ProxyLocalFileSystem");
 
     fs = FileSystem.get(conf);
     System.setProperty("hadoop.log.dir", new File(workDir, "/logs").getAbsolutePath());

Modified: hive/branches/llap/hcatalog/hcatalog-pig-adapter/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/hcatalog-pig-adapter/pom.xml?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/hcatalog-pig-adapter/pom.xml (original)
+++ hive/branches/llap/hcatalog/hcatalog-pig-adapter/pom.xml Tue Oct 14 19:06:45 2014
@@ -53,6 +53,13 @@
       <classifier>tests</classifier>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${project.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
 

Modified: hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java (original)
+++ hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java Tue Oct 14 19:06:45 2014
@@ -36,6 +36,7 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
 import org.apache.pig.LoadFunc;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
 import org.apache.pig.data.Tuple;

Modified: hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java (original)
+++ hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java Tue Oct 14 19:06:45 2014
@@ -23,9 +23,8 @@ import java.io.IOException;
 import java.util.HashMap;
 import java.util.Iterator;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.io.FileUtils;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
@@ -42,6 +41,7 @@ import org.apache.hadoop.mapreduce.Recor
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatContext;
@@ -51,12 +51,16 @@ import org.apache.hive.hcatalog.mapreduc
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.HCatMapRedUtil;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.data.Tuple;
 
-public class TestE2EScenarios extends TestCase {
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
+public class TestE2EScenarios {
   private static final String TEST_DATA_DIR = System.getProperty("java.io.tmpdir") + File.separator
       + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis();
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
@@ -69,9 +73,8 @@ public class TestE2EScenarios extends Te
     return "orc";
   }
 
-  @Override
-  protected void setUp() throws Exception {
-
+  @Before
+  public void setUp() throws Exception {
     File f = new File(TEST_WAREHOUSE_DIR);
     if (f.exists()) {
       FileUtil.fullyDelete(f);
@@ -90,8 +93,8 @@ public class TestE2EScenarios extends Te
 
   }
 
-  @Override
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     try {
       dropTable("inpy");
       dropTable("rc5318");
@@ -146,16 +149,13 @@ public class TestE2EScenarios extends Te
     System.err.println("===");
   }
 
-
   private void copyTable(String in, String out) throws IOException, InterruptedException {
     Job ijob = new Job();
     Job ojob = new Job();
     HCatInputFormat inpy = new HCatInputFormat();
     inpy.setInput(ijob , null, in);
     HCatOutputFormat oupy = new HCatOutputFormat();
-    oupy.setOutput(ojob,
-      OutputJobInfo.create(null, out, new HashMap<String,String>()
-      ));
+    oupy.setOutput(ojob, OutputJobInfo.create(null, out, new HashMap<String,String>()));
 
     // Test HCatContext
 
@@ -207,6 +207,7 @@ public class TestE2EScenarios extends Te
   }
 
 
+  @Test
   public void testReadOrcAndRCFromPig() throws Exception {
     String tableSchema = "ti tinyint, si smallint,i int, bi bigint, f float, d double, b boolean";
 
@@ -224,15 +225,14 @@ public class TestE2EScenarios extends Te
     driverRun("LOAD DATA LOCAL INPATH '"+TEXTFILE_LOCN+"' OVERWRITE INTO TABLE inpy");
 
     // write it out from hive to an rcfile table, and to an orc table
-//        driverRun("insert overwrite table rc5318 select * from inpy");
+    //driverRun("insert overwrite table rc5318 select * from inpy");
     copyTable("inpy","rc5318");
-//        driverRun("insert overwrite table orc5318 select * from inpy");
+    //driverRun("insert overwrite table orc5318 select * from inpy");
     copyTable("inpy","orc5318");
 
     pigDump("inpy");
     pigDump("rc5318");
     pigDump("orc5318");
-
   }
 
 }

Modified: hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java (original)
+++ hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java Tue Oct 14 19:06:45 2014
@@ -28,12 +28,15 @@ import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
 
 import org.apache.commons.io.FileUtils;
+
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
@@ -41,15 +44,19 @@ import org.apache.hadoop.hive.cli.CliSes
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapreduce.Job;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.data.Pair;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.ResourceStatistics;
@@ -60,19 +67,26 @@ import org.apache.pig.impl.logicalLayer.
 import org.apache.pig.PigRunner;
 import org.apache.pig.tools.pigstats.OutputStats;
 import org.apache.pig.tools.pigstats.PigStats;
+
 import org.joda.time.DateTime;
+
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static org.junit.Assert.*;
+import static org.junit.Assume.assumeTrue;
 
+@RunWith(Parameterized.class)
 public class TestHCatLoader {
   private static final Logger LOG = LoggerFactory.getLogger(TestHCatLoader.class);
   private static final String TEST_DATA_DIR = HCatUtil.makePathASafeFileName(System.getProperty("java.io.tmpdir") +
-          File.separator + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis());
+      File.separator + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis());
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
   private static final String BASIC_FILE_NAME = TEST_DATA_DIR + "/basic.input.data";
   private static final String COMPLEX_FILE_NAME = TEST_DATA_DIR + "/complex.input.data";
@@ -85,22 +99,45 @@ public class TestHCatLoader {
   private Driver driver;
   private Map<Integer, Pair<Integer, String>> basicInputData;
 
-  protected String storageFormat() {
-    return "RCFILE tblproperties('hcat.isd'='org.apache.hive.hcatalog.rcfile.RCFileInputDriver'," +
-      "'hcat.osd'='org.apache.hive.hcatalog.rcfile.RCFileOutputDriver')";
+  private static final Map<String, Set<String>> DISABLED_STORAGE_FORMATS =
+      new HashMap<String, Set<String>>() {{
+        put(IOConstants.AVRO, new HashSet<String>() {{
+          add("testReadDataBasic");
+          add("testReadPartitionedBasic");
+          add("testProjectionsBasic");
+          add("testSchemaLoadPrimitiveTypes");
+        }});
+        put(IOConstants.PARQUETFILE, new HashSet<String>() {{
+          add("testReadDataBasic");
+          add("testReadPartitionedBasic");
+          add("testProjectionsBasic");
+        }});
+      }};
+
+  private String storageFormat;
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> generateParameters() {
+    return StorageFormats.names();
+  }
+
+  public TestHCatLoader(String storageFormat) {
+    this.storageFormat = storageFormat;
   }
 
   private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
     dropTable(tablename, driver);
   }
+
   static void dropTable(String tablename, Driver driver) throws IOException, CommandNeedRetryException {
     driver.run("drop table if exists " + tablename);
   }
 
   private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
-    createTable(tablename, schema, partitionedBy, driver, storageFormat());
+    createTable(tablename, schema, partitionedBy, driver, storageFormat);
   }
-  static void createTable(String tablename, String schema, String partitionedBy, Driver driver, String storageFormat) 
+
+  static void createTable(String tablename, String schema, String partitionedBy, Driver driver, String storageFormat)
       throws IOException, CommandNeedRetryException {
     String createTable;
     createTable = "create table " + tablename + "(" + schema + ") ";
@@ -114,6 +151,7 @@ public class TestHCatLoader {
   private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
     createTable(tablename, schema, null);
   }
+
   /**
    * Execute Hive CLI statement
    * @param cmd arbitrary statement to execute
@@ -125,20 +163,20 @@ public class TestHCatLoader {
       throw new IOException("Failed to execute \"" + cmd + "\". Driver returned " + cpr.getResponseCode() + " Error: " + cpr.getErrorMessage());
     }
   }
+
   private static void checkProjection(FieldSchema fs, String expectedName, byte expectedPigType) {
     assertEquals(fs.alias, expectedName);
     assertEquals("Expected " + DataType.findTypeName(expectedPigType) + "; got " +
       DataType.findTypeName(fs.type), expectedPigType, fs.type);
   }
-  
+
   @Before
   public void setup() throws Exception {
-
     File f = new File(TEST_WAREHOUSE_DIR);
     if (f.exists()) {
       FileUtil.fullyDelete(f);
     }
-    if(!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
+    if (!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
       throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
     }
 
@@ -192,7 +230,7 @@ public class TestHCatLoader {
     server.registerQuery("B = foreach A generate a,b;", ++i);
     server.registerQuery("B2 = filter B by a < 2;", ++i);
     server.registerQuery("store B2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=0');", ++i);
-    
+
     server.registerQuery("C = foreach A generate a,b;", ++i);
     server.registerQuery("C2 = filter C by a >= 2;", ++i);
     server.registerQuery("store C2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=1');", ++i);
@@ -200,17 +238,18 @@ public class TestHCatLoader {
     server.registerQuery("D = load '" + COMPLEX_FILE_NAME + "' as (name:chararray, studentid:int, contact:tuple(phno:chararray,email:chararray), currently_registered_courses:bag{innertup:tuple(course:chararray)}, current_grades:map[ ] , phnos :bag{innertup:tuple(phno:chararray,type:chararray)});", ++i);
     server.registerQuery("store D into '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer();", ++i);
     server.executeBatch();
-
   }
 
   @After
   public void tearDown() throws Exception {
     try {
-      dropTable(BASIC_TABLE);
-      dropTable(COMPLEX_TABLE);
-      dropTable(PARTITIONED_TABLE);
-      dropTable(SPECIFIC_SIZE_TABLE);
-      dropTable(AllTypesTable.ALL_PRIMITIVE_TYPES_TABLE);
+      if (driver != null) {
+        dropTable(BASIC_TABLE);
+        dropTable(COMPLEX_TABLE);
+        dropTable(PARTITIONED_TABLE);
+        dropTable(SPECIFIC_SIZE_TABLE);
+        dropTable(AllTypesTable.ALL_PRIMITIVE_TYPES_TABLE);
+      }
     } finally {
       FileUtils.deleteDirectory(new File(TEST_DATA_DIR));
     }
@@ -218,6 +257,7 @@ public class TestHCatLoader {
 
   @Test
   public void testSchemaLoadBasic() throws IOException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     PigServer server = new PigServer(ExecType.LOCAL);
 
@@ -232,23 +272,28 @@ public class TestHCatLoader {
     assertTrue(Xfields.get(1).type == DataType.CHARARRAY);
 
   }
+
   /**
    * Test that we properly translate data types in Hive/HCat table schema into Pig schema
    */
   @Test
   public void testSchemaLoadPrimitiveTypes() throws IOException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     AllTypesTable.testSchemaLoadPrimitiveTypes();
   }
+
   /**
    * Test that value from Hive table are read properly in Pig
    */
   @Test
   public void testReadDataPrimitiveTypes() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     AllTypesTable.testReadDataPrimitiveTypes();
   }
 
   @Test
   public void testReadDataBasic() throws IOException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     PigServer server = new PigServer(ExecType.LOCAL);
 
     server.registerQuery("X = load '" + BASIC_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
@@ -270,6 +315,7 @@ public class TestHCatLoader {
 
   @Test
   public void testSchemaLoadComplex() throws IOException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     PigServer server = new PigServer(ExecType.LOCAL);
 
@@ -328,6 +374,7 @@ public class TestHCatLoader {
 
   @Test
   public void testReadPartitionedBasic() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     PigServer server = new PigServer(ExecType.LOCAL);
 
     driver.run("select * from " + PARTITIONED_TABLE);
@@ -395,6 +442,7 @@ public class TestHCatLoader {
 
   @Test
   public void testProjectionsBasic() throws IOException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     PigServer server = new PigServer(ExecType.LOCAL);
 
@@ -444,6 +492,7 @@ public class TestHCatLoader {
 
   @Test
   public void testColumnarStorePushdown() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     String PIGOUTPUT_DIR = TEST_DATA_DIR+ "/colpushdownop";
     String PIG_FILE = "test.pig";
     String expectedCols = "0,1";
@@ -470,13 +519,14 @@ public class TestHCatLoader {
       {
         fs.delete(new Path(PIGOUTPUT_DIR), true);
       }
-    }finally {
+    } finally {
       new File(PIG_FILE).delete();
     }
   }
 
   @Test
   public void testGetInputBytes() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     File file = new File(TEST_WAREHOUSE_DIR + "/" + SPECIFIC_SIZE_TABLE + "/part-m-00000");
     file.deleteOnExit();
     RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw");
@@ -492,6 +542,7 @@ public class TestHCatLoader {
 
   @Test
   public void testConvertBooleanToInt() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     String tbl = "test_convert_boolean_to_int";
     String inputFileName = TEST_DATA_DIR + "/testConvertBooleanToInt/data.txt";
     File inputDataDir = new File(inputFileName).getParentFile();
@@ -534,7 +585,7 @@ public class TestHCatLoader {
   }
 
   /**
-   * basic tests that cover each scalar type 
+   * basic tests that cover each scalar type
    * https://issues.apache.org/jira/browse/HIVE-5814
    */
   private static final class AllTypesTable {
@@ -591,7 +642,11 @@ public class TestHCatLoader {
      * Test that value from Hive table are read properly in Pig
      */
     private static void testReadDataPrimitiveTypes() throws Exception {
-      PigServer server = new PigServer(ExecType.LOCAL);
+      // testConvertBooleanToInt() sets HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER=true, and
+      // might be the last one to call HCatContext.INSTANCE.setConf(). Make sure setting is false.
+      Properties properties = new Properties();
+      properties.setProperty(HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER, "false");
+      PigServer server = new PigServer(ExecType.LOCAL, properties);
       server.registerQuery("X = load '" + ALL_PRIMITIVE_TYPES_TABLE + "' using " + HCatLoader.class.getName() + "();");
       Iterator<Tuple> XIter = server.openIterator("X");
       int numTuplesRead = 0;
@@ -599,22 +654,26 @@ public class TestHCatLoader {
         Tuple t = XIter.next();
         assertEquals(HCatFieldSchema.Type.numPrimitiveTypes(), t.size());
         int colPos = 0;
-        for(Object referenceData : primitiveRows[numTuplesRead]) {
-          if(referenceData == null) {
-            assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos + " Reference data is null; actual " +
-                t.get(colPos), t.get(colPos) == null);
-          }
-          else if(referenceData instanceof java.util.Date) {
-            assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos + " Reference data=" + ((java.util.Date)referenceData).getTime() + " actual=" +
-                ((DateTime)t.get(colPos)).getMillis() + "; types=(" + referenceData.getClass() + "," + t.get(colPos).getClass() + ")",
+        for (Object referenceData : primitiveRows[numTuplesRead]) {
+          if (referenceData == null) {
+            assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
+                + " Reference data is null; actual "
+                + t.get(colPos), t.get(colPos) == null);
+          } else if (referenceData instanceof java.util.Date) {
+            // Note that here we ignore nanos part of Hive Timestamp since nanos are dropped when
+            // reading Hive from Pig by design.
+            assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
+                + " Reference data=" + ((java.util.Date)referenceData).getTime()
+                + " actual=" + ((DateTime)t.get(colPos)).getMillis()
+                + "; types=(" + referenceData.getClass() + "," + t.get(colPos).getClass() + ")",
                 ((java.util.Date)referenceData).getTime()== ((DateTime)t.get(colPos)).getMillis());
-            //note that here we ignore nanos part of Hive Timestamp since nanos are dropped when reading Hive from Pig by design
-          }
-          else {
-            assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos + " Reference data=" + referenceData + " actual=" +
-                t.get(colPos) + "; types=(" + referenceData.getClass() + "," + t.get(colPos).getClass() + ")",
+          } else {
+            // Doing String comps here as value objects in Hive in Pig are different so equals()
+            // doesn't work.
+            assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
+                + " Reference data=" + referenceData + " actual=" + t.get(colPos)
+                + "; types=(" + referenceData.getClass() + "," + t.get(colPos).getClass() + ") ",
                 referenceData.toString().equals(t.get(colPos).toString()));
-            //doing String comps here as value objects in Hive in Pig are different so equals() doesn't work
           }
           colPos++;
         }
@@ -624,10 +683,10 @@ public class TestHCatLoader {
     }
     private static void setupAllTypesTable(Driver driver) throws Exception {
       String[] primitiveData = new String[primitiveRows.length];
-      for(int i = 0; i < primitiveRows.length; i++) {
+      for (int i = 0; i < primitiveRows.length; i++) {
         Object[] rowData = primitiveRows[i];
         StringBuilder row = new StringBuilder();
-        for(Object cell : rowData) {
+        for (Object cell : rowData) {
           row.append(row.length() == 0 ? "" : "\t").append(cell == null ? null : cell);
         }
         primitiveData[i] = row.toString();

Modified: hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java (original)
+++ hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java Tue Oct 14 19:06:45 2014
@@ -18,21 +18,28 @@
  */
 package org.apache.hive.hcatalog.pig;
 
+import com.google.common.collect.ImmutableSet;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
-
-import junit.framework.Assert;
+import java.util.Map;
+import java.util.Set;
 
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecException;
@@ -44,11 +51,21 @@ import org.apache.pig.data.TupleFactory;
 import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
+
+import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assume.assumeTrue;
+
+@RunWith(Parameterized.class)
 public class TestHCatLoaderComplexSchema {
 
   //private static MiniCluster cluster = MiniCluster.buildCluster();
@@ -56,13 +73,33 @@ public class TestHCatLoaderComplexSchema
   //private static Properties props;
   private static final Logger LOG = LoggerFactory.getLogger(TestHCatLoaderComplexSchema.class);
 
-  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
-    driver.run("drop table " + tablename);
+  private static final Map<String, Set<String>> DISABLED_STORAGE_FORMATS =
+      new HashMap<String, Set<String>>() {{
+        put(IOConstants.AVRO, new HashSet<String>() {{
+          add("testSyntheticComplexSchema");
+          add("testTupleInBagInTupleInBag");
+          add("testMapWithComplexData");
+        }});
+        put(IOConstants.PARQUETFILE, new HashSet<String>() {{
+          add("testSyntheticComplexSchema");
+          add("testTupleInBagInTupleInBag");
+          add("testMapWithComplexData");
+        }});
+      }};
+
+  private String storageFormat;
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> generateParameters() {
+    return StorageFormats.names();
   }
 
-  protected String storageFormat() {
-    return "RCFILE tblproperties('hcat.isd'='org.apache.hive.hcatalog.rcfile.RCFileInputDriver'," +
-      "'hcat.osd'='org.apache.hive.hcatalog.rcfile.RCFileOutputDriver')";
+  public TestHCatLoaderComplexSchema(String storageFormat) {
+    this.storageFormat = storageFormat;
+  }
+
+  private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
+    driver.run("drop table " + tablename);
   }
 
   private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
@@ -71,7 +108,7 @@ public class TestHCatLoaderComplexSchema
     if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
       createTable = createTable + "partitioned by (" + partitionedBy + ") ";
     }
-    createTable = createTable + "stored as " + storageFormat();
+    createTable = createTable + "stored as " + storageFormat;
     LOG.info("Creating table:\n {}", createTable);
     CommandProcessorResponse result = driver.run(createTable);
     int retCode = result.getResponseCode();
@@ -86,7 +123,6 @@ public class TestHCatLoaderComplexSchema
 
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
-
     HiveConf hiveConf = new HiveConf(TestHCatLoaderComplexSchema.class);
     hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
     hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
@@ -95,7 +131,6 @@ public class TestHCatLoaderComplexSchema
     SessionState.start(new CliSessionState(hiveConf));
     //props = new Properties();
     //props.setProperty("fs.default.name", cluster.getProperties().getProperty("fs.default.name"));
-
   }
 
   private static final TupleFactory tf = TupleFactory.getInstance();
@@ -115,6 +150,7 @@ public class TestHCatLoaderComplexSchema
    */
   @Test
   public void testSyntheticComplexSchema() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     String pigSchema =
         "a: " +
         "(" +
@@ -183,7 +219,6 @@ public class TestHCatLoaderComplexSchema
     verifyWriteRead("testSyntheticComplexSchema", pigSchema, tableSchema, data, false);
     verifyWriteRead("testSyntheticComplexSchema2", pigSchema, tableSchema2, data, true);
     verifyWriteRead("testSyntheticComplexSchema2", pigSchema, tableSchema2, data, false);
-
   }
 
   private void verifyWriteRead(String tablename, String pigSchema, String tableSchema, List<Tuple> data, boolean provideSchemaToStorer)
@@ -216,7 +251,7 @@ public class TestHCatLoaderComplexSchema
       }
       Schema dumpedXSchema = server.dumpSchema("X");
 
-      Assert.assertEquals(
+      assertEquals(
         "expected " + dumpedASchema + " but was " + dumpedXSchema + " (ignoring field names)",
         "",
         compareIgnoreFiledNames(dumpedASchema, dumpedXSchema));
@@ -225,19 +260,19 @@ public class TestHCatLoaderComplexSchema
       dropTable(tablename);
     }
   }
-  
+
   private void compareTuples(Tuple t1, Tuple t2) throws ExecException {
-    Assert.assertEquals("Tuple Sizes don't match", t1.size(), t2.size());
+    assertEquals("Tuple Sizes don't match", t1.size(), t2.size());
     for (int i = 0; i < t1.size(); i++) {
       Object f1 = t1.get(i);
       Object f2 = t2.get(i);
-      Assert.assertNotNull("left", f1);
-      Assert.assertNotNull("right", f2);
+      assertNotNull("left", f1);
+      assertNotNull("right", f2);
       String msg = "right: " + f1 + ", left: " + f2;
-      Assert.assertEquals(msg, noOrder(f1.toString()), noOrder(f2.toString()));
+      assertEquals(msg, noOrder(f1.toString()), noOrder(f2.toString()));
     }
   }
-  
+
   private String noOrder(String s) {
     char[] chars = s.toCharArray();
     Arrays.sort(chars);
@@ -275,6 +310,7 @@ public class TestHCatLoaderComplexSchema
    */
   @Test
   public void testTupleInBagInTupleInBag() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     String pigSchema = "a: { b : ( c: { d: (i : long) } ) }";
 
     String tableSchema = "a array< array< bigint > >";
@@ -294,11 +330,11 @@ public class TestHCatLoaderComplexSchema
 
     verifyWriteRead("TupleInBagInTupleInBag3", pigSchema, tableSchema2, data, true);
     verifyWriteRead("TupleInBagInTupleInBag4", pigSchema, tableSchema2, data, false);
-
   }
 
   @Test
   public void testMapWithComplexData() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     String pigSchema = "a: long, b: map[]";
     String tableSchema = "a bigint, b map<string, struct<aa:bigint, ab:string>>";
 
@@ -317,6 +353,5 @@ public class TestHCatLoaderComplexSchema
     }
     verifyWriteRead("testMapWithComplexData", pigSchema, tableSchema, data, true);
     verifyWriteRead("testMapWithComplexData2", pigSchema, tableSchema, data, false);
-
   }
 }

Modified: hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java?rev=1631841&r1=1631840&r2=1631841&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java (original)
+++ hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java Tue Oct 14 19:06:45 2014
@@ -18,21 +18,32 @@
  */
 package org.apache.hive.hcatalog.pig;
 
+import com.google.common.collect.ImmutableSet;
+
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileReader;
 import java.io.IOException;
 import java.math.BigDecimal;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
 
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
+
 import org.apache.pig.EvalFunc;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigException;
@@ -41,21 +52,96 @@ import org.apache.pig.data.DataByteArray
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.util.LogUtils;
+
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
-import org.junit.Assert;
+
+import org.junit.Before;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.junit.Assert.*;
+import static org.junit.Assume.assumeTrue;
+
+@RunWith(Parameterized.class)
 public class TestHCatStorer extends HCatBaseTest {
   private static final Logger LOG = LoggerFactory.getLogger(TestHCatStorer.class);
 
   private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
 
+  private static final Map<String, Set<String>> DISABLED_STORAGE_FORMATS =
+    new HashMap<String, Set<String>>() {{
+      put(IOConstants.AVRO, new HashSet<String>() {{
+        add("testBagNStruct");
+        add("testDateCharTypes");
+        add("testDynamicPartitioningMultiPartColsInDataNoSpec");
+        add("testDynamicPartitioningMultiPartColsInDataPartialSpec");
+        add("testMultiPartColsInData");
+        add("testPartColsInData");
+        add("testStoreFuncAllSimpleTypes");
+        add("testStoreFuncSimple");
+        add("testStoreInPartiitonedTbl");
+        add("testStoreMultiTables");
+        add("testStoreWithNoCtorArgs");
+        add("testStoreWithNoSchema");
+        add("testWriteChar");
+        add("testWriteDate");
+        add("testWriteDate2");
+        add("testWriteDate3");
+        add("testWriteDecimal");
+        add("testWriteDecimalX");
+        add("testWriteDecimalXY");
+        add("testWriteSmallint");
+        add("testWriteTimestamp");
+        add("testWriteTinyint");
+        add("testWriteVarchar");
+      }});
+      put(IOConstants.PARQUETFILE, new HashSet<String>() {{
+        add("testBagNStruct");
+        add("testDateCharTypes");
+        add("testDynamicPartitioningMultiPartColsInDataNoSpec");
+        add("testDynamicPartitioningMultiPartColsInDataPartialSpec");
+        add("testMultiPartColsInData");
+        add("testPartColsInData");
+        add("testStoreFuncAllSimpleTypes");
+        add("testStoreFuncSimple");
+        add("testStoreInPartiitonedTbl");
+        add("testStoreMultiTables");
+        add("testStoreWithNoCtorArgs");
+        add("testStoreWithNoSchema");
+        add("testWriteChar");
+        add("testWriteDate");
+        add("testWriteDate2");
+        add("testWriteDate3");
+        add("testWriteDecimal");
+        add("testWriteDecimalX");
+        add("testWriteDecimalXY");
+        add("testWriteSmallint");
+        add("testWriteTimestamp");
+        add("testWriteTinyint");
+        add("testWriteVarchar");
+      }});
+    }};
+
+  private String storageFormat;
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> generateParameters() {
+    return StorageFormats.names();
+  }
+
+  public TestHCatStorer(String storageFormat) {
+    this.storageFormat = storageFormat;
+  }
+
   //Start: tests that check values from Pig that are out of range for target column
   @Test
   public void testWriteTinyint() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     pigValueRangeTest("junitTypeTest1", "tinyint", "int", null, Integer.toString(1), Integer.toString(1));
     pigValueRangeTestOverflow("junitTypeTest1", "tinyint", "int", null, Integer.toString(300));
     pigValueRangeTestOverflow("junitTypeTest2", "tinyint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null,
@@ -63,8 +149,10 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "tinyint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       Integer.toString(300));
   }
+
   @Test
   public void testWriteSmallint() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     pigValueRangeTest("junitTypeTest1", "smallint", "int", null, Integer.toString(Short.MIN_VALUE),
       Integer.toString(Short.MIN_VALUE));
     pigValueRangeTestOverflow("junitTypeTest2", "smallint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null,
@@ -72,8 +160,10 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "smallint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       Integer.toString(Short.MAX_VALUE + 1));
   }
+
   @Test
   public void testWriteChar() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     pigValueRangeTest("junitTypeTest1", "char(5)", "chararray", null, "xxx", "xxx  ");
     pigValueRangeTestOverflow("junitTypeTest1", "char(5)", "chararray", null, "too_long");
     pigValueRangeTestOverflow("junitTypeTest2", "char(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null,
@@ -81,8 +171,10 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "char(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       "too_long2");
   }
+
   @Test
   public void testWriteVarchar() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     pigValueRangeTest("junitTypeTest1", "varchar(5)", "chararray", null, "xxx", "xxx");
     pigValueRangeTestOverflow("junitTypeTest1", "varchar(5)", "chararray", null, "too_long");
     pigValueRangeTestOverflow("junitTypeTest2", "varchar(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null,
@@ -90,8 +182,10 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "varchar(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       "too_long2");
   }
+
   @Test
   public void testWriteDecimalXY() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     pigValueRangeTest("junitTypeTest1", "decimal(5,2)", "bigdecimal", null, BigDecimal.valueOf(1.2).toString(),
       BigDecimal.valueOf(1.2).toString());
     pigValueRangeTestOverflow("junitTypeTest1", "decimal(5,2)", "bigdecimal", null, BigDecimal.valueOf(12345.12).toString());
@@ -100,8 +194,10 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "decimal(5,2)", "bigdecimal", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       BigDecimal.valueOf(500.123).toString());
   }
+
   @Test
   public void testWriteDecimalX() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     //interestingly decimal(2) means decimal(2,0)
     pigValueRangeTest("junitTypeTest1", "decimal(2)", "bigdecimal", null, BigDecimal.valueOf(12).toString(),
       BigDecimal.valueOf(12).toString());
@@ -110,8 +206,10 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "decimal(2)", "bigdecimal", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       BigDecimal.valueOf(50.123).toString());
   }
+
   @Test
   public void testWriteDecimal() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     //decimal means decimal(10,0)
     pigValueRangeTest("junitTypeTest1", "decimal", "bigdecimal", null, BigDecimal.valueOf(1234567890).toString(),
       BigDecimal.valueOf(1234567890).toString());
@@ -120,13 +218,16 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "decimal", "bigdecimal", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       BigDecimal.valueOf(12345678900L).toString());
   }
+
   /**
    * because we want to ignore TZ which is included in toString()
-   * include time to make sure it's 0 
+   * include time to make sure it's 0
    */
   private static final String FORMAT_4_DATE = "yyyy-MM-dd HH:mm:ss";
+
   @Test
   public void testWriteDate() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     DateTime d = new DateTime(1991,10,11,0,0);
     pigValueRangeTest("junitTypeTest1", "date", "datetime", null, d.toString(),
       d.toString(FORMAT_4_DATE), FORMAT_4_DATE);
@@ -142,8 +243,10 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest6", "date", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       d.plusMinutes(1).toString(), FORMAT_4_DATE);//date out of range due to time!=0
   }
+
   @Test
   public void testWriteDate3() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     DateTime d = new DateTime(1991,10,11,23,10,DateTimeZone.forOffsetHours(-11));
     FrontendException fe = null;
     //expect to fail since the time component is not 0
@@ -154,8 +257,10 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest6", "date", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       d.plusMinutes(1).toString(), FORMAT_4_DATE);
   }
+
   @Test
   public void testWriteDate2() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     DateTime d = new DateTime(1991,11,12,0,0, DateTimeZone.forID("US/Eastern"));
     pigValueRangeTest("junitTypeTest1", "date", "datetime", null, d.toString(),
       d.toString(FORMAT_4_DATE), FORMAT_4_DATE);
@@ -168,57 +273,54 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "date", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       d.plusMinutes(1).toString(), FORMAT_4_DATE);
   }
+
   /**
-   * Note that the value that comes back from Hive will have local TZ on it.  Using local is 
+   * Note that the value that comes back from Hive will have local TZ on it.  Using local is
    * arbitrary but DateTime needs TZ (or will assume default) and Hive does not have TZ.
    * So if you start with Pig value in TZ=x and write to Hive, when you read it back the TZ may
    * be different.  The millis value should match, of course.
-   * 
+   *
    * @throws Exception
    */
   @Test
   public void testWriteTimestamp() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     DateTime d = new DateTime(1991,10,11,14,23,30, 10);//uses default TZ
-    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
     d = d.plusHours(2);
     pigValueRangeTest("junitTypeTest2", "timestamp", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null,
       d.toString(), d.toDateTime(DateTimeZone.getDefault()).toString());
     d = d.toDateTime(DateTimeZone.UTC);
-    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
 
     d = new DateTime(1991,10,11,23,24,25, 26);
-    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
     d = d.toDateTime(DateTimeZone.UTC);
-    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
   }
   //End: tests that check values from Pig that are out of range for target column
 
-
   private void pigValueRangeTestOverflow(String tblName, String hiveType, String pigType,
     HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue, String format) throws Exception {
     pigValueRangeTest(tblName, hiveType, pigType, goal, inputValue, null, format);
   }
+
   private void pigValueRangeTestOverflow(String tblName, String hiveType, String pigType,
                                  HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue) throws Exception {
     pigValueRangeTest(tblName, hiveType, pigType, goal, inputValue, null, null);
   }
+
   private void pigValueRangeTest(String tblName, String hiveType, String pigType,
-                                 HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue, 
+                                 HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue,
                                  String expectedValue) throws Exception {
     pigValueRangeTest(tblName, hiveType, pigType, goal, inputValue, expectedValue, null);
   }
 
   /**
-   * this should be overridden in subclass to test with different file formats
-   */
-  String getStorageFormat() {
-    return "RCFILE";
-  }
-  /**
    * This is used to test how Pig values of various data types which are out of range for Hive target
    * column are handled.  Currently the options are to raise an error or write NULL.
    * 1. create a data file with 1 column, 1 row
@@ -236,12 +338,12 @@ public class TestHCatStorer extends HCat
    * @param format date format to use for comparison of values since default DateTime.toString()
    *               includes TZ which is meaningless for Hive DATE type
    */
-  private void pigValueRangeTest(String tblName, String hiveType, String pigType, 
+  private void pigValueRangeTest(String tblName, String hiveType, String pigType,
                                  HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue, String expectedValue, String format)
     throws Exception {
     TestHCatLoader.dropTable(tblName, driver);
     final String field = "f1";
-    TestHCatLoader.createTable(tblName, field + " " + hiveType, null, driver, getStorageFormat());
+    TestHCatLoader.createTable(tblName, field + " " + hiveType, null, driver, storageFormat);
     HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, new String[] {inputValue});
     LOG.debug("File=" + INPUT_FILE_NAME);
     dumpFile(INPUT_FILE_NAME);
@@ -270,11 +372,11 @@ public class TestHCatStorer extends HCat
           //do nothing, fall through and verify the data
           break;
         case Throw:
-          Assert.assertTrue("Expected a FrontendException", fe != null);
-          Assert.assertEquals("Expected a different FrontendException.", fe.getMessage(), "Unable to store alias A");
+          assertTrue("Expected a FrontendException", fe != null);
+          assertEquals("Expected a different FrontendException.", fe.getMessage(), "Unable to store alias A");
           return;//this test is done
         default:
-          Assert.assertFalse("Unexpected goal: " + goal, 1 == 1);
+          assertFalse("Unexpected goal: " + goal, 1 == 1);
       }
     }
     logAndRegister(server, "B = load '" + tblName + "' using " + HCatLoader.class.getName() + "();", queryNumber);
@@ -293,22 +395,23 @@ public class TestHCatStorer extends HCat
       Tuple t = itr.next();
       if("date".equals(hiveType)) {
         DateTime dateTime = (DateTime)t.get(0);
-        Assert.assertTrue(format != null);
-        Assert.assertEquals("Comparing Pig to Raw data for table " + tblName, expectedValue, dateTime== null ? null : dateTime.toString(format));
+        assertTrue(format != null);
+        assertEquals("Comparing Pig to Raw data for table " + tblName, expectedValue, dateTime== null ? null : dateTime.toString(format));
       }
       else {
-        Assert.assertEquals("Comparing Pig to Raw data for table " + tblName, expectedValue, t.isNull(0) ? null : t.get(0).toString());
+        assertEquals("Comparing Pig to Raw data for table " + tblName, expectedValue, t.isNull(0) ? null : t.get(0).toString());
       }
       //see comment at "Dumping rows via SQL..." for why this doesn't work
-      //Assert.assertEquals("Comparing Pig to Hive", t.get(0), l.get(0));
+      //assertEquals("Comparing Pig to Hive", t.get(0), l.get(0));
       numRowsRead++;
     }
-    Assert.assertEquals("Expected " + 1 + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME + "; table " +
+    assertEquals("Expected " + 1 + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME + "; table " +
       tblName, 1, numRowsRead);
     /* Misc notes:
     Unfortunately Timestamp.toString() adjusts the value for local TZ and 't' is a String
     thus the timestamp in 't' doesn't match rawData*/
   }
+
   /**
    * Create a data file with datatypes added in 0.13.  Read it with Pig and use
    * Pig + HCatStorer to write to a Hive table.  Then read it using Pig and Hive
@@ -316,10 +419,11 @@ public class TestHCatStorer extends HCat
    */
   @Test
   public void testDateCharTypes() throws Exception {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     final String tblName = "junit_date_char";
     TestHCatLoader.dropTable(tblName, driver);
     TestHCatLoader.createTable(tblName,
-      "id int, char5 char(5), varchar10 varchar(10), dec52 decimal(5,2)", null, driver, getStorageFormat());
+      "id int, char5 char(5), varchar10 varchar(10), dec52 decimal(5,2)", null, driver, storageFormat);
     int NUM_ROWS = 5;
     String[] rows = new String[NUM_ROWS];
     for(int i = 0; i < NUM_ROWS; i++) {
@@ -358,13 +462,14 @@ public class TestHCatStorer extends HCat
         rowFromPig.append(t.get(i)).append("\t");
       }
       rowFromPig.setLength(rowFromPig.length() - 1);
-      Assert.assertEquals("Comparing Pig to Raw data", rows[numRowsRead], rowFromPig.toString());
+      assertEquals("Comparing Pig to Raw data", rows[numRowsRead], rowFromPig.toString());
       //see comment at "Dumping rows via SQL..." for why this doesn't work (for all types)
-      //Assert.assertEquals("Comparing Pig to Hive", rowFromPig.toString(), l.get(numRowsRead));
+      //assertEquals("Comparing Pig to Hive", rowFromPig.toString(), l.get(numRowsRead));
       numRowsRead++;
     }
-    Assert.assertEquals("Expected " + NUM_ROWS + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME, NUM_ROWS, numRowsRead);
+    assertEquals("Expected " + NUM_ROWS + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME, NUM_ROWS, numRowsRead);
   }
+
   static void dumpFile(String fileName) throws Exception {
     File file = new File(fileName);
     BufferedReader reader = new BufferedReader(new FileReader(file));
@@ -375,11 +480,13 @@ public class TestHCatStorer extends HCat
     }
     reader.close();
   }
+
   @Test
   public void testPartColsInData() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + getStorageFormat();
+    String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table.");
@@ -400,22 +507,23 @@ public class TestHCatStorer extends HCat
 
     while (itr.hasNext()) {
       Tuple t = itr.next();
-      Assert.assertEquals(2, t.size());
-      Assert.assertEquals(t.get(0), i);
-      Assert.assertEquals(t.get(1), "1");
+      assertEquals(2, t.size());
+      assertEquals(t.get(0), i);
+      assertEquals(t.get(1), "1");
       i++;
     }
 
-    Assert.assertFalse(itr.hasNext());
-    Assert.assertEquals(LOOP_SIZE, i);
+    assertFalse(itr.hasNext());
+    assertEquals(LOOP_SIZE, i);
   }
 
   @Test
   public void testMultiPartColsInData() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table employee");
     String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
-      " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat();
+      " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
 
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
@@ -444,20 +552,21 @@ public class TestHCatStorer extends HCat
     driver.run("select * from employee");
     ArrayList<String> results = new ArrayList<String>();
     driver.getResults(results);
-    Assert.assertEquals(4, results.size());
+    assertEquals(4, results.size());
     Collections.sort(results);
-    Assert.assertEquals(inputData[0], results.get(0));
-    Assert.assertEquals(inputData[1], results.get(1));
-    Assert.assertEquals(inputData[2], results.get(2));
-    Assert.assertEquals(inputData[3], results.get(3));
+    assertEquals(inputData[0], results.get(0));
+    assertEquals(inputData[1], results.get(1));
+    assertEquals(inputData[2], results.get(2));
+    assertEquals(inputData[3], results.get(3));
     driver.run("drop table employee");
   }
 
   @Test
   public void testStoreInPartiitonedTbl() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + getStorageFormat();
+    String createTable = "create table junit_unparted(a int) partitioned by (b string) stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table.");
@@ -478,20 +587,21 @@ public class TestHCatStorer extends HCat
 
     while (itr.hasNext()) {
       Tuple t = itr.next();
-      Assert.assertEquals(2, t.size());
-      Assert.assertEquals(t.get(0), i);
-      Assert.assertEquals(t.get(1), "1");
+      assertEquals(2, t.size());
+      assertEquals(t.get(0), i);
+      assertEquals(t.get(1), "1");
       i++;
     }
 
-    Assert.assertFalse(itr.hasNext());
-    Assert.assertEquals(11, i);
+    assertFalse(itr.hasNext());
+    assertEquals(11, i);
   }
 
   @Test
   public void testNoAlias() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     driver.run("drop table junit_parted");
-    String createTable = "create table junit_parted(a int, b string) partitioned by (ds string) stored as " + getStorageFormat();
+    String createTable = "create table junit_parted(a int, b string) partitioned by (ds string) stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table.");
@@ -506,12 +616,12 @@ public class TestHCatStorer extends HCat
       server.executeBatch();
     } catch (PigException fe) {
       PigException pe = LogUtils.getPigException(fe);
-      Assert.assertTrue(pe instanceof FrontendException);
-      Assert.assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
-      Assert.assertTrue(pe.getMessage().contains("Column name for a field is not specified. Please provide the full schema as an argument to HCatStorer."));
+      assertTrue(pe instanceof FrontendException);
+      assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
+      assertTrue(pe.getMessage().contains("Column name for a field is not specified. Please provide the full schema as an argument to HCatStorer."));
       errCaught = true;
     }
-    Assert.assertTrue(errCaught);
+    assertTrue(errCaught);
     errCaught = false;
     try {
       server.setBatchOn();
@@ -521,20 +631,21 @@ public class TestHCatStorer extends HCat
       server.executeBatch();
     } catch (PigException fe) {
       PigException pe = LogUtils.getPigException(fe);
-      Assert.assertTrue(pe instanceof FrontendException);
-      Assert.assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
-      Assert.assertTrue(pe.getMessage().contains("Column names should all be in lowercase. Invalid name found: B"));
+      assertTrue(pe instanceof FrontendException);
+      assertEquals(PigHCatUtil.PIG_EXCEPTION_CODE, pe.getErrorCode());
+      assertTrue(pe.getMessage().contains("Column names should all be in lowercase. Invalid name found: B"));
       errCaught = true;
     }
     driver.run("drop table junit_parted");
-    Assert.assertTrue(errCaught);
+    assertTrue(errCaught);
   }
 
   @Test
   public void testStoreMultiTables() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat();
+    String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table.");
@@ -578,18 +689,19 @@ public class TestHCatStorer extends HCat
 
     Iterator<String> itr = res.iterator();
     for (int i = 0; i < LOOP_SIZE * LOOP_SIZE; i++) {
-      Assert.assertEquals(input[i], itr.next());
+      assertEquals(input[i], itr.next());
     }
 
-    Assert.assertFalse(itr.hasNext());
+    assertFalse(itr.hasNext());
 
   }
 
   @Test
   public void testStoreWithNoSchema() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat();
+    String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table.");
@@ -617,18 +729,19 @@ public class TestHCatStorer extends HCat
     driver.run("drop table junit_unparted");
     Iterator<String> itr = res.iterator();
     for (int i = 0; i < LOOP_SIZE * LOOP_SIZE; i++) {
-      Assert.assertEquals(input[i], itr.next());
+      assertEquals(input[i], itr.next());
     }
 
-    Assert.assertFalse(itr.hasNext());
+    assertFalse(itr.hasNext());
 
   }
 
   @Test
   public void testStoreWithNoCtorArgs() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat();
+    String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table.");
@@ -656,18 +769,19 @@ public class TestHCatStorer extends HCat
     driver.run("drop table junit_unparted");
     Iterator<String> itr = res.iterator();
     for (int i = 0; i < LOOP_SIZE * LOOP_SIZE; i++) {
-      Assert.assertEquals(input[i], itr.next());
+      assertEquals(input[i], itr.next());
     }
 
-    Assert.assertFalse(itr.hasNext());
+    assertFalse(itr.hasNext());
 
   }
 
   @Test
   public void testEmptyStore() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat();
+    String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table.");
@@ -695,15 +809,16 @@ public class TestHCatStorer extends HCat
     driver.getResults(res);
     driver.run("drop table junit_unparted");
     Iterator<String> itr = res.iterator();
-    Assert.assertFalse(itr.hasNext());
+    assertFalse(itr.hasNext());
 
   }
 
   @Test
   public void testBagNStruct() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
     driver.run("drop table junit_unparted");
     String createTable = "create table junit_unparted(b string,a struct<a1:int>,  arr_of_struct array<string>, " +
-      "arr_of_struct2 array<struct<s1:string,s2:string>>,  arr_of_struct3 array<struct<s3:string>>) stored as " + getStorageFormat();
+      "arr_of_struct2 array<struct<s1:string,s2:string>>,  arr_of_struct3 array<struct<s3:string>>) stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table.");
@@ -726,17 +841,18 @@ public class TestHCatStorer extends HCat
     driver.getResults(res);
     driver.run("drop table junit_unparted");
     Iterator<String> itr = res.iterator();
-    Assert.assertEquals("zookeeper\t{\"a1\":2}\t[\"pig\"]\t[{\"s1\":\"pnuts\",\"s2\":\"hdfs\"}]\t[{\"s3\":\"hadoop\"},{\"s3\":\"hcat\"}]", itr.next());
-    Assert.assertEquals("chubby\t{\"a1\":2}\t[\"sawzall\"]\t[{\"s1\":\"bigtable\",\"s2\":\"gfs\"}]\t[{\"s3\":\"mapreduce\"},{\"s3\":\"hcat\"}]", itr.next());
-    Assert.assertFalse(itr.hasNext());
+    assertEquals("zookeeper\t{\"a1\":2}\t[\"pig\"]\t[{\"s1\":\"pnuts\",\"s2\":\"hdfs\"}]\t[{\"s3\":\"hadoop\"},{\"s3\":\"hcat\"}]", itr.next());
+    assertEquals("chubby\t{\"a1\":2}\t[\"sawzall\"]\t[{\"s1\":\"bigtable\",\"s2\":\"gfs\"}]\t[{\"s3\":\"mapreduce\"},{\"s3\":\"hcat\"}]", itr.next());
+    assertFalse(itr.hasNext());
 
   }
 
   @Test
   public void testStoreFuncAllSimpleTypes() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b float, c double, d bigint, e string, h boolean, f binary, g binary) stored as " + getStorageFormat();
+    String createTable = "create table junit_unparted(a int, b float, c double, d bigint, e string, h boolean, f binary, g binary) stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table.");
@@ -763,10 +879,10 @@ public class TestHCatStorer extends HCat
 
     Iterator<String> itr = res.iterator();
     String next = itr.next();
-    Assert.assertEquals("0\tNULL\tNULL\tNULL\tNULL\tNULL\tNULL\tNULL", next );
-    Assert.assertEquals("NULL\t4.2\t2.2\t4\tlets hcat\ttrue\tbinary-data\tNULL", itr.next());
-    Assert.assertEquals("3\t6.2999997\t3.3000000000000003\t6\tlets hcat\tfalse\tbinary-data\tNULL", itr.next());
-    Assert.assertFalse(itr.hasNext());
+    assertEquals("0\tNULL\tNULL\tNULL\tNULL\tNULL\tNULL\tNULL", next );
+    assertEquals("NULL\t4.2\t2.2\t4\tlets hcat\ttrue\tbinary-data\tNULL", itr.next());
+    assertEquals("3\t6.2999997\t3.3000000000000003\t6\tlets hcat\tfalse\tbinary-data\tNULL", itr.next());
+    assertFalse(itr.hasNext());
 
     server.registerQuery("B = load 'junit_unparted' using " + HCatLoader.class.getName() + ";");
     Iterator<Tuple> iter = server.openIterator("B");
@@ -777,21 +893,22 @@ public class TestHCatStorer extends HCat
       if (t.get(6) == null) {
         num5nulls++;
       } else {
-        Assert.assertTrue(t.get(6) instanceof DataByteArray);
+        assertTrue(t.get(6) instanceof DataByteArray);
       }
-      Assert.assertNull(t.get(7));
+      assertNull(t.get(7));
       count++;
     }
-    Assert.assertEquals(3, count);
-    Assert.assertEquals(1, num5nulls);
+    assertEquals(3, count);
+    assertEquals(1, num5nulls);
     driver.run("drop table junit_unparted");
   }
 
   @Test
   public void testStoreFuncSimple() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table junit_unparted");
-    String createTable = "create table junit_unparted(a int, b string) stored as " + getStorageFormat();
+    String createTable = "create table junit_unparted(a int, b string) stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table.");
@@ -821,19 +938,20 @@ public class TestHCatStorer extends HCat
     for (int i = 1; i <= LOOP_SIZE; i++) {
       String si = i + "";
       for (int j = 1; j <= LOOP_SIZE; j++) {
-        Assert.assertEquals(si + "\t" + j, itr.next());
+        assertEquals(si + "\t" + j, itr.next());
       }
     }
-    Assert.assertFalse(itr.hasNext());
+    assertFalse(itr.hasNext());
 
   }
 
   @Test
   public void testDynamicPartitioningMultiPartColsInDataPartialSpec() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table if exists employee");
     String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
-      " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat();
+      " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
 
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
@@ -856,21 +974,22 @@ public class TestHCatStorer extends HCat
     driver.run("select * from employee");
     ArrayList<String> results = new ArrayList<String>();
     driver.getResults(results);
-    Assert.assertEquals(4, results.size());
+    assertEquals(4, results.size());
     Collections.sort(results);
-    Assert.assertEquals(inputData[0], results.get(0));
-    Assert.assertEquals(inputData[1], results.get(1));
-    Assert.assertEquals(inputData[2], results.get(2));
-    Assert.assertEquals(inputData[3], results.get(3));
+    assertEquals(inputData[0], results.get(0));
+    assertEquals(inputData[1], results.get(1));
+    assertEquals(inputData[2], results.get(2));
+    assertEquals(inputData[3], results.get(3));
     driver.run("drop table employee");
   }
 
   @Test
   public void testDynamicPartitioningMultiPartColsInDataNoSpec() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table if exists employee");
     String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
-      " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat();
+      " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
 
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
@@ -893,21 +1012,22 @@ public class TestHCatStorer extends HCat
     driver.run("select * from employee");
     ArrayList<String> results = new ArrayList<String>();
     driver.getResults(results);
-    Assert.assertEquals(4, results.size());
+    assertEquals(4, results.size());
     Collections.sort(results);
-    Assert.assertEquals(inputData[0], results.get(0));
-    Assert.assertEquals(inputData[1], results.get(1));
-    Assert.assertEquals(inputData[2], results.get(2));
-    Assert.assertEquals(inputData[3], results.get(3));
+    assertEquals(inputData[0], results.get(0));
+    assertEquals(inputData[1], results.get(1));
+    assertEquals(inputData[2], results.get(2));
+    assertEquals(inputData[3], results.get(3));
     driver.run("drop table employee");
   }
 
   @Test
   public void testDynamicPartitioningMultiPartColsNoDataInDataNoSpec() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table if exists employee");
     String createTable = "CREATE TABLE employee (emp_id INT, emp_name STRING, emp_start_date STRING , emp_gender STRING ) " +
-      " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + getStorageFormat();
+      " PARTITIONED BY (emp_country STRING , emp_state STRING ) STORED AS " + storageFormat;
 
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
@@ -927,15 +1047,16 @@ public class TestHCatStorer extends HCat
     driver.run("select * from employee");
     ArrayList<String> results = new ArrayList<String>();
     driver.getResults(results);
-    Assert.assertEquals(0, results.size());
+    assertEquals(0, results.size());
     driver.run("drop table employee");
   }
+
   @Test
-  public void testPartitionPublish()
-    throws IOException, CommandNeedRetryException {
+  public void testPartitionPublish() throws IOException, CommandNeedRetryException {
+    assumeTrue(!TestUtil.shouldSkip(storageFormat, DISABLED_STORAGE_FORMATS));
 
     driver.run("drop table ptn_fail");
-    String createTable = "create table ptn_fail(a int, c string) partitioned by (b string) stored as " + getStorageFormat();
+    String createTable = "create table ptn_fail(a int, c string) partitioned by (b string) stored as " + storageFormat;
     int retCode = driver.run(createTable).getResponseCode();
     if (retCode != 0) {
       throw new IOException("Failed to create table.");
@@ -967,11 +1088,11 @@ public class TestHCatStorer extends HCat
 
     ArrayList<String> res = new ArrayList<String>();
     driver.getResults(res);
-    Assert.assertEquals(0, res.size());
+    assertEquals(0, res.size());
 
     // Make sure the partitions directory is not in hdfs.
-    Assert.assertTrue((new File(TEST_WAREHOUSE_DIR + "/ptn_fail")).exists());
-    Assert.assertFalse((new File(TEST_WAREHOUSE_DIR + "/ptn_fail/b=math"))
+    assertTrue((new File(TEST_WAREHOUSE_DIR + "/ptn_fail")).exists());
+    assertFalse((new File(TEST_WAREHOUSE_DIR + "/ptn_fail/b=math"))
       .exists());
   }