You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kh...@apache.org on 2014/02/05 22:02:58 UTC

svn commit: r1564924 [1/2] - in /hive/trunk: ./ hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/ hcatalog/core/src/...

Author: khorgath
Date: Wed Feb  5 21:02:57 2014
New Revision: 1564924

URL: http://svn.apache.org/r1564924
Log:
HIVE-5814 : Add DATE, TIMESTAMP, DECIMAL, CHAR, VARCHAR types support in HCat (Eugene Koifman via Sushanth Sowmyan)

Modified:
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatContext.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchemaUtils.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/OutputJobInfo.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/HcatTestUtils.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchema.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/pom.xml
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatStorer.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java
    hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
    hive/trunk/itests/hcatalog-unit/pom.xml
    hive/trunk/pom.xml

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatContext.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatContext.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatContext.java Wed Feb  5 21:02:57 2014
@@ -63,17 +63,12 @@ public enum HCatContext {
       conf = newConf;
       return this;
     }
-
-    if (conf != newConf) {
-      synchronized (conf) {
-        for (Map.Entry<String, String> entry : conf) {
-          if ((entry.getKey().matches("hcat.*")) && (newConf.get(entry.getKey()) == null)) {
-            newConf.set(entry.getKey(), entry.getValue());
-          }
-        }        
+    for (Map.Entry<String, String> entry : conf) {
+      if ((entry.getKey().matches("hcat.*")) && (newConf.get(entry.getKey()) == null)) {
+        newConf.set(entry.getKey(), entry.getValue());
       }
-      conf = newConf;
     }
+    conf = newConf;
     return this;
   }
 

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java Wed Feb  5 21:02:57 2014
@@ -119,7 +119,7 @@ public class HCatUtil {
   }
 
   public static String encodeBytes(byte[] bytes) {
-    StringBuffer strBuf = new StringBuffer();
+    StringBuilder strBuf = new StringBuilder();
 
     for (int i = 0; i < bytes.length; i++) {
       strBuf.append((char) (((bytes[i] >> 4) & 0xF) + ('a')));
@@ -282,11 +282,13 @@ public class HCatUtil {
           .getTypeInfoFromTypeString(tableField.getType());
 
         if (!partitionType.equals(tableType)) {
-          throw new HCatException(
-            ErrorType.ERROR_SCHEMA_TYPE_MISMATCH, "Column <"
+          String msg =
+            "Column <"
             + field.getName() + ">, expected <"
             + tableType.getTypeName() + ">, got <"
-            + partitionType.getTypeName() + ">");
+            + partitionType.getTypeName() + ">";
+          LOG.warn(msg);
+          throw new HCatException(ErrorType.ERROR_SCHEMA_TYPE_MISMATCH, msg);
         }
       }
     }
@@ -657,4 +659,12 @@ public class HCatUtil {
   public static String makePathASafeFileName(String filePath) {
     return new File(filePath).getPath().replaceAll("\\\\", "/");
   }
+  public static void assertNotNull(Object t, String msg, Logger logger) {
+    if(t == null) {
+      if(logger != null) {
+        logger.warn(msg);
+      }
+      throw new IllegalArgumentException(msg);
+    }
+  }
 }

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java Wed Feb  5 21:02:57 2014
@@ -19,15 +19,26 @@
 
 package org.apache.hive.hcatalog.data;
 
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.Map.Entry;
 
-
+/*
+ * The data written and read withing the same M/R job, thus should never be written by one 
+ * version of Hive and read by another.
+ * @see org.apache.hive.hcatalog.data.ReaderWriter
+ */
 public abstract class DataType {
-
+  //todo: this should be moved to be an inner class of ReaderWrite as that is the only place it 
+  // is used
   public static final byte NULL = 1;
   public static final byte BOOLEAN = 5;
   public static final byte BYTE = 6;
@@ -38,6 +49,11 @@ public abstract class DataType {
   public static final byte DOUBLE = 25;
   public static final byte STRING = 55;
   public static final byte BINARY = 60;
+  public static final byte CHAR = 61;
+  public static final byte VARCHAR = 62;
+  public static final byte DECIMAL = 63;
+  public static final byte DATE = 64;
+  public static final byte TIMESTAMP = 65;
 
   public static final byte MAP = 100;
   public static final byte STRUCT = 110;
@@ -79,6 +95,16 @@ public abstract class DataType {
       return MAP;
     } else if (o instanceof byte[]) {
       return BINARY;
+    } else if(o instanceof HiveChar) {
+      return CHAR;
+    } else if(o instanceof HiveVarchar) {
+      return VARCHAR;
+    } else if(o instanceof HiveDecimal) {
+      return DECIMAL;
+    } else if(o instanceof Date) {
+      return DATE;
+    } else if(o instanceof Timestamp) {
+      return TIMESTAMP;
     } else {
       return ERROR;
     }
@@ -170,8 +196,17 @@ public abstract class DataType {
           return 0;
         }
       }
-
-      default:
+      case CHAR:
+        return ((HiveChar)o1).compareTo((HiveChar)o2);  
+      case VARCHAR:
+        return ((HiveVarchar)o1).compareTo((HiveVarchar)o2);
+      case DECIMAL:
+        return ((HiveDecimal)o1).compareTo((HiveDecimal)o2);
+      case DATE:
+        return ((Date)o1).compareTo((Date)o2);
+      case TIMESTAMP:
+        return ((Timestamp)o1).compareTo((Timestamp)o2);
+     default:
         throw new RuntimeException("Unkown type " + dt1 +
           " in compare");
       }

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java Wed Feb  5 21:02:57 2014
@@ -19,11 +19,16 @@
 
 package org.apache.hive.hcatalog.data;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 
@@ -149,5 +154,43 @@ public abstract class HCatRecord impleme
   public void setMap(String fieldName, HCatSchema recordSchema, Map<?, ?> value) throws HCatException {
     set(fieldName, recordSchema, value);
   }
+  public void setChar(String fieldName, HCatSchema recordSchema, HiveChar value)
+          throws HCatException {
+    set(fieldName, recordSchema, value);
+  }
+  public HiveChar getChar(String fieldName, HCatSchema recordSchema) throws HCatException {
+    return (HiveChar) get(fieldName, recordSchema, HiveChar.class);
+  }
+  public void setVarchar(String fieldName, HCatSchema recordSchema, HiveVarchar value)
+          throws HCatException {
+    set(fieldName, recordSchema, value);
+  }
+  public HiveVarchar getVarchar(String fieldName, HCatSchema recordSchema) throws HCatException {
+    return (HiveVarchar) get(fieldName, recordSchema, HiveVarchar.class);
+  }
+  public void setDecimal(String fieldName, HCatSchema recordSchema, HiveDecimal value)
+          throws HCatException {
+    set(fieldName, recordSchema, value);
+  }
+  public HiveDecimal getDecimal(String fieldName, HCatSchema recordSchema) throws HCatException {
+    return (HiveDecimal) get(fieldName, recordSchema, HiveDecimal.class);
+  }
 
+  /**
+   * Note that the proper way to construct a java.sql.Date for use with this object is 
+   * Date.valueOf("1999-12-31").  
+   */
+  public void setDate(String fieldName, HCatSchema recordSchema, Date value) throws HCatException {
+    set(fieldName, recordSchema, value);
+  }
+  public Date getDate(String fieldName, HCatSchema recordSchema) throws HCatException {
+    return (Date) get(fieldName, recordSchema, Date.class);
+  }
+  public void setTimestamp(String fieldName, HCatSchema recordSchema, Timestamp value)
+          throws HCatException {
+    set(fieldName, recordSchema, value);
+  }
+  public Timestamp getTimestamp(String fieldName, HCatSchema recordSchema) throws HCatException {
+    return (Timestamp) get(fieldName, recordSchema, Timestamp.class);
+  }
 }

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java Wed Feb  5 21:02:57 2014
@@ -20,6 +20,8 @@ package org.apache.hive.hcatalog.data;
 
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -31,6 +33,9 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDe;
@@ -46,13 +51,19 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -75,9 +86,6 @@ public class JsonSerDe implements SerDe 
 
   private static final Logger LOG = LoggerFactory.getLogger(JsonSerDe.class);
   private List<String> columnNames;
-  private List<TypeInfo> columnTypes;
-
-  private StructTypeInfo rowTypeInfo;
   private HCatSchema schema;
 
   private JsonFactory jsonFactory = null;
@@ -87,6 +95,8 @@ public class JsonSerDe implements SerDe 
   @Override
   public void initialize(Configuration conf, Properties tbl)
     throws SerDeException {
+    List<TypeInfo> columnTypes;
+    StructTypeInfo rowTypeInfo;
 
 
     LOG.debug("Initializing JsonSerDe");
@@ -195,7 +205,7 @@ public class JsonSerDe implements SerDe 
       // column mapping, and we're about to proceed.
     }
     HCatFieldSchema hcatFieldSchema = s.getFields().get(fpos);
-    Object currField = extractCurrentField(p, null, hcatFieldSchema, false);
+    Object currField = extractCurrentField(p, hcatFieldSchema, false);
     r.set(fpos, currField);
   }
 
@@ -241,17 +251,12 @@ public class JsonSerDe implements SerDe 
   /**
    * Utility method to extract current expected field from given JsonParser
    *
-   * To get the field, we need either a type or a hcatFieldSchema(necessary for complex types)
-   * It is possible that one of them can be null, and so, if so, the other is instantiated
-   * from the other
-   *
    * isTokenCurrent is a boolean variable also passed in, which determines
    * if the JsonParser is already at the token we expect to read next, or
    * needs advancing to the next before we read.
    */
-  private Object extractCurrentField(JsonParser p, Type t,
-                     HCatFieldSchema hcatFieldSchema, boolean isTokenCurrent) throws IOException, JsonParseException,
-    HCatException {
+  private Object extractCurrentField(JsonParser p, HCatFieldSchema hcatFieldSchema,
+                                     boolean isTokenCurrent) throws IOException {
     Object val = null;
     JsonToken valueToken;
     if (isTokenCurrent) {
@@ -259,11 +264,7 @@ public class JsonSerDe implements SerDe 
     } else {
       valueToken = p.nextToken();
     }
-
-    if (hcatFieldSchema != null) {
-      t = hcatFieldSchema.getType();
-    }
-    switch (t) {
+    switch (hcatFieldSchema.getType()) {
     case INT:
       val = (valueToken == JsonToken.VALUE_NULL) ? null : p.getIntValue();
       break;
@@ -295,6 +296,23 @@ public class JsonSerDe implements SerDe 
       break;
     case BINARY:
       throw new IOException("JsonSerDe does not support BINARY type");
+    case DATE:
+      val = (valueToken == JsonToken.VALUE_NULL) ? null : Date.valueOf(p.getText());
+      break;
+    case TIMESTAMP:
+      val = (valueToken == JsonToken.VALUE_NULL) ? null : Timestamp.valueOf(p.getText());
+      break;
+    case DECIMAL:
+      val = (valueToken == JsonToken.VALUE_NULL) ? null : HiveDecimal.create(p.getText());
+      break;
+    case VARCHAR:
+      int vLen = ((BaseCharTypeInfo)hcatFieldSchema.getTypeInfo()).getLength();
+      val = (valueToken == JsonToken.VALUE_NULL) ? null : new HiveVarchar(p.getText(), vLen);
+      break;
+    case CHAR:
+      int cLen = ((BaseCharTypeInfo)hcatFieldSchema.getTypeInfo()).getLength();
+      val = (valueToken == JsonToken.VALUE_NULL) ? null : new HiveChar(p.getText(), cLen);
+      break;
     case ARRAY:
       if (valueToken == JsonToken.VALUE_NULL) {
         val = null;
@@ -305,7 +323,7 @@ public class JsonSerDe implements SerDe 
       }
       List<Object> arr = new ArrayList<Object>();
       while ((valueToken = p.nextToken()) != JsonToken.END_ARRAY) {
-        arr.add(extractCurrentField(p, null, hcatFieldSchema.getArrayElementSchema().get(0), true));
+        arr.add(extractCurrentField(p, hcatFieldSchema.getArrayElementSchema().get(0), true));
       }
       val = arr;
       break;
@@ -318,15 +336,14 @@ public class JsonSerDe implements SerDe 
         throw new IOException("Start of Object expected");
       }
       Map<Object, Object> map = new LinkedHashMap<Object, Object>();
-      Type keyType = hcatFieldSchema.getMapKeyType();
       HCatFieldSchema valueSchema = hcatFieldSchema.getMapValueSchema().get(0);
       while ((valueToken = p.nextToken()) != JsonToken.END_OBJECT) {
-        Object k = getObjectOfCorrespondingPrimitiveType(p.getCurrentName(), keyType);
+        Object k = getObjectOfCorrespondingPrimitiveType(p.getCurrentName(), hcatFieldSchema.getMapKeyTypeInfo());
         Object v;
         if (valueSchema.getType() == HCatFieldSchema.Type.STRUCT) {
-          v = extractCurrentField(p, null, valueSchema, false);
+          v = extractCurrentField(p, valueSchema, false);
         } else {
-          v = extractCurrentField(p, null, valueSchema, true);
+          v = extractCurrentField(p, valueSchema, true);
         }
 
         map.put(k, v);
@@ -350,12 +367,16 @@ public class JsonSerDe implements SerDe 
       }
       val = struct;
       break;
+    default:
+      LOG.error("Unknown type found: " + hcatFieldSchema.getType());
+      return null;
     }
     return val;
   }
 
-  private Object getObjectOfCorrespondingPrimitiveType(String s, Type t) throws IOException {
-    switch (t) {
+  private Object getObjectOfCorrespondingPrimitiveType(String s, PrimitiveTypeInfo mapKeyType)
+    throws IOException {
+    switch (Type.getPrimitiveHType(mapKeyType)) {
     case INT:
       return Integer.valueOf(s);
     case TINYINT:
@@ -374,8 +395,18 @@ public class JsonSerDe implements SerDe 
       return s;
     case BINARY:
       throw new IOException("JsonSerDe does not support BINARY type");
+    case DATE:
+      return Date.valueOf(s);
+    case TIMESTAMP:
+      return Timestamp.valueOf(s);
+    case DECIMAL:
+      return HiveDecimal.create(s);
+    case VARCHAR:
+      return new HiveVarchar(s, ((BaseCharTypeInfo)mapKeyType).getLength());
+    case CHAR:
+      return new HiveChar(s, ((BaseCharTypeInfo)mapKeyType).getLength());
     }
-    throw new IOException("Could not convert from string to map type " + t);
+    throw new IOException("Could not convert from string to map type " + mapKeyType.getTypeName());
   }
 
   /**
@@ -399,9 +430,7 @@ public class JsonSerDe implements SerDe 
           if (i > 0) {
             sb.append(SerDeUtils.COMMA);
           }
-          sb.append(SerDeUtils.QUOTE);
-          sb.append(columnNames.get(i));
-          sb.append(SerDeUtils.QUOTE);
+          appendWithQuotes(sb, columnNames.get(i));
           sb.append(SerDeUtils.COLON);
           buildJSONString(sb, soi.getStructFieldData(obj, structFields.get(i)),
             structFields.get(i).getFieldObjectInspector());
@@ -415,7 +444,9 @@ public class JsonSerDe implements SerDe 
     }
     return new Text(sb.toString());
   }
-
+  private static StringBuilder appendWithQuotes(StringBuilder sb, String value) {
+    return sb == null ? null : sb.append(SerDeUtils.QUOTE).append(value).append(SerDeUtils.QUOTE);
+  }
   // TODO : code section copied over from SerDeUtils because of non-standard json production there
   // should use quotes for all field names. We should fix this there, and then remove this copy.
   // See http://jackson.codehaus.org/1.7.3/javadoc/org/codehaus/jackson/JsonParser.Feature.html#ALLOW_UNQUOTED_FIELD_NAMES
@@ -461,25 +492,37 @@ public class JsonSerDe implements SerDe 
           break;
         }
         case STRING: {
-          sb.append('"');
-          sb.append(SerDeUtils.escapeString(((StringObjectInspector) poi)
-            .getPrimitiveJavaObject(o)));
-          sb.append('"');
-          break;
-        }
-        case TIMESTAMP: {
-          sb.append('"');
-          sb.append(((TimestampObjectInspector) poi)
-            .getPrimitiveWritableObject(o));
-          sb.append('"');
+          String s = 
+                  SerDeUtils.escapeString(((StringObjectInspector) poi).getPrimitiveJavaObject(o));
+          appendWithQuotes(sb, s);
           break;
         }
         case BINARY: {
           throw new IOException("JsonSerDe does not support BINARY type");
         }
+        case DATE:
+          Date d = ((DateObjectInspector)poi).getPrimitiveJavaObject(o);
+          appendWithQuotes(sb, d.toString());
+          break;
+        case TIMESTAMP: {
+          Timestamp t = ((TimestampObjectInspector) poi).getPrimitiveJavaObject(o);
+          appendWithQuotes(sb, t.toString());
+          break;
+        }
+        case DECIMAL:
+          sb.append(((HiveDecimalObjectInspector)poi).getPrimitiveJavaObject(o));
+          break;
+        case VARCHAR:
+          appendWithQuotes(sb, 
+                  ((HiveVarcharObjectInspector)poi).getPrimitiveJavaObject(o).toString());
+          break;
+        case CHAR:
+          //this should use HiveChar.getPaddedValue() but it's protected; currently (v0.13)
+          // HiveChar.toString() returns getPaddedValue()
+          appendWithQuotes(sb, ((HiveCharObjectInspector)poi).getPrimitiveJavaObject(o).toString());
+          break;
         default:
-          throw new RuntimeException("Unknown primitive type: "
-            + poi.getPrimitiveCategory());
+          throw new RuntimeException("Unknown primitive type: " + poi.getPrimitiveCategory());
         }
       }
       break;
@@ -524,13 +567,11 @@ public class JsonSerDe implements SerDe 
           StringBuilder keyBuilder = new StringBuilder();
           buildJSONString(keyBuilder, e.getKey(), mapKeyObjectInspector);
           String keyString = keyBuilder.toString().trim();
-          boolean doQuoting = (!keyString.isEmpty()) && (keyString.charAt(0) != SerDeUtils.QUOTE);
-          if (doQuoting) {
-            sb.append(SerDeUtils.QUOTE);
+          if((!keyString.isEmpty()) && (keyString.charAt(0) != SerDeUtils.QUOTE)) {
+            appendWithQuotes(sb, keyString);
           }
-          sb.append(keyString);
-          if (doQuoting) {
-            sb.append(SerDeUtils.QUOTE);
+          else {
+            sb.append(keyString);
           }
           sb.append(SerDeUtils.COLON);
           buildJSONString(sb, e.getValue(), mapValueObjectInspector);
@@ -550,9 +591,7 @@ public class JsonSerDe implements SerDe 
           if (i > 0) {
             sb.append(SerDeUtils.COMMA);
           }
-          sb.append(SerDeUtils.QUOTE);
-          sb.append(structFields.get(i).getFieldName());
-          sb.append(SerDeUtils.QUOTE);
+          appendWithQuotes(sb, structFields.get(i).getFieldName());
           sb.append(SerDeUtils.COLON);
           buildJSONString(sb, soi.getStructFieldData(o, structFields.get(i)),
             structFields.get(i).getFieldObjectInspector());

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java Wed Feb  5 21:02:57 2014
@@ -22,6 +22,7 @@ package org.apache.hive.hcatalog.data;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.sql.Date;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -29,10 +30,21 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.VIntWritable;
 import org.apache.hadoop.io.VLongWritable;
 
-
+/*
+ * when adding support for new types, we should try to use classes of Hive value system to keep 
+ * things more readable (though functionally it should not make a difference). 
+ */
 public abstract class ReaderWriter {
 
   private static final String UTF8 = "UTF-8";
@@ -96,7 +108,26 @@ public abstract class ReaderWriter {
         list.add(readDatum(in));
       }
       return list;
-
+    case DataType.CHAR:
+      HiveCharWritable hcw = new HiveCharWritable();
+      hcw.readFields(in);
+      return hcw.getHiveChar();
+    case DataType.VARCHAR:
+      HiveVarcharWritable hvw = new HiveVarcharWritable();
+      hvw.readFields(in);
+      return hvw.getHiveVarchar();
+    case DataType.DECIMAL:
+      HiveDecimalWritable hdw = new HiveDecimalWritable();
+      hdw.readFields(in);
+      return hdw.getHiveDecimal();
+    case DataType.DATE:
+      DateWritable dw = new DateWritable();
+      dw.readFields(in);
+      return dw.get();
+    case DataType.TIMESTAMP:
+      TimestampWritable tw = new TimestampWritable();
+      tw.readFields(in);
+      return tw.getTimestamp();
     default:
       throw new IOException("Unexpected data type " + type +
         " found in stream.");
@@ -106,9 +137,9 @@ public abstract class ReaderWriter {
   public static void writeDatum(DataOutput out, Object val) throws IOException {
     // write the data type
     byte type = DataType.findType(val);
+    out.write(type);
     switch (type) {
     case DataType.LIST:
-      out.writeByte(DataType.LIST);
       List<?> list = (List<?>) val;
       int sz = list.size();
       out.writeInt(sz);
@@ -118,7 +149,6 @@ public abstract class ReaderWriter {
       return;
 
     case DataType.MAP:
-      out.writeByte(DataType.MAP);
       Map<?, ?> m = (Map<?, ?>) val;
       out.writeInt(m.size());
       Iterator<?> i =
@@ -131,59 +161,64 @@ public abstract class ReaderWriter {
       return;
 
     case DataType.INTEGER:
-      out.writeByte(DataType.INTEGER);
       new VIntWritable((Integer) val).write(out);
       return;
 
     case DataType.LONG:
-      out.writeByte(DataType.LONG);
       new VLongWritable((Long) val).write(out);
       return;
 
     case DataType.FLOAT:
-      out.writeByte(DataType.FLOAT);
       out.writeFloat((Float) val);
       return;
 
     case DataType.DOUBLE:
-      out.writeByte(DataType.DOUBLE);
       out.writeDouble((Double) val);
       return;
 
     case DataType.BOOLEAN:
-      out.writeByte(DataType.BOOLEAN);
       out.writeBoolean((Boolean) val);
       return;
 
     case DataType.BYTE:
-      out.writeByte(DataType.BYTE);
       out.writeByte((Byte) val);
       return;
 
     case DataType.SHORT:
-      out.writeByte(DataType.SHORT);
       out.writeShort((Short) val);
       return;
 
     case DataType.STRING:
       String s = (String) val;
       byte[] utfBytes = s.getBytes(ReaderWriter.UTF8);
-      out.writeByte(DataType.STRING);
       out.writeInt(utfBytes.length);
       out.write(utfBytes);
       return;
 
     case DataType.BINARY:
       byte[] ba = (byte[]) val;
-      out.writeByte(DataType.BINARY);
       out.writeInt(ba.length);
       out.write(ba);
       return;
 
     case DataType.NULL:
-      out.writeByte(DataType.NULL);
+      //for NULL we just write out the type
+      return;
+    case DataType.CHAR:
+      new HiveCharWritable((HiveChar)val).write(out);
+      return;
+    case DataType.VARCHAR:
+      new HiveVarcharWritable((HiveVarchar)val).write(out);
+      return;
+    case DataType.DECIMAL:
+      new HiveDecimalWritable((HiveDecimal)val).write(out);
+      return;
+    case DataType.DATE:
+      new DateWritable((Date)val).write(out);
+      return;
+    case DataType.TIMESTAMP:
+      new TimestampWritable((java.sql.Timestamp)val).write(out);
       return;
-
     default:
       throw new IOException("Unexpected data type " + type +
         " found in stream.");

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java Wed Feb  5 21:02:57 2014
@@ -21,23 +21,86 @@ package org.apache.hive.hcatalog.data.sc
 import java.io.Serializable;
 
 import org.apache.commons.lang.builder.ToStringBuilder;
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import org.apache.hadoop.hive.common.classification.InterfaceStability;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hive.hcatalog.common.HCatException;
+import org.apache.hive.hcatalog.common.HCatUtil;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class HCatFieldSchema implements Serializable {
-
+/*the implementation of HCatFieldSchema is a bit messy since with the addition of parametrized 
+types (e.g. char(7)) we need to represent something richer than an enum but for backwards 
+compatibility (and effort required to do full refactoring) this class has both 'type' and 'typeInfo';
+similarly for mapKeyType/mapKeyTypeInfo */
+  
   public enum Type {
-    INT,
-    TINYINT,
-    SMALLINT,
-    BIGINT,
-    BOOLEAN,
-    FLOAT,
-    DOUBLE,
-    STRING,
-    ARRAY,
-    MAP,
-    STRUCT,
-    BINARY,
+    /*this captures mapping of Hive type names to HCat type names; in the long run
+    * we should just use Hive types directly but that is a larger refactoring effort
+    * For HCat->Pig mapping see PigHCatUtil.getPigType(Type)
+    * For Pig->HCat mapping see HCatBaseStorer#validateSchema(...)*/
+    BOOLEAN(PrimitiveObjectInspector.PrimitiveCategory.BOOLEAN),
+    TINYINT(PrimitiveObjectInspector.PrimitiveCategory.BYTE),
+    SMALLINT(PrimitiveObjectInspector.PrimitiveCategory.SHORT),
+    INT(PrimitiveObjectInspector.PrimitiveCategory.INT),
+    BIGINT(PrimitiveObjectInspector.PrimitiveCategory.LONG),
+    FLOAT(PrimitiveObjectInspector.PrimitiveCategory.FLOAT),
+    DOUBLE(PrimitiveObjectInspector.PrimitiveCategory.DOUBLE),
+    DECIMAL(PrimitiveObjectInspector.PrimitiveCategory.DECIMAL),
+    STRING(PrimitiveObjectInspector.PrimitiveCategory.STRING),
+    CHAR(PrimitiveObjectInspector.PrimitiveCategory.CHAR),
+    VARCHAR(PrimitiveObjectInspector.PrimitiveCategory.VARCHAR),
+    BINARY(PrimitiveObjectInspector.PrimitiveCategory.BINARY),
+    DATE(PrimitiveObjectInspector.PrimitiveCategory.DATE), 
+    TIMESTAMP(PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP), 
+
+    ARRAY(ObjectInspector.Category.LIST),
+    MAP(ObjectInspector.Category.MAP),
+    STRUCT(ObjectInspector.Category.STRUCT);
+
+    
+    private final ObjectInspector.Category category;
+    private final PrimitiveObjectInspector.PrimitiveCategory primitiveCategory;
+    private Type(ObjectInspector.Category cat) {
+      category = cat;
+      primitiveCategory = null;
+      assert category != ObjectInspector.Category.PRIMITIVE : 
+              "This c'tor should be used for complex category types";
+    }
+    private Type(PrimitiveObjectInspector.PrimitiveCategory primCat) {
+      category = ObjectInspector.Category.PRIMITIVE;
+      primitiveCategory = primCat;
+    }
+    public ObjectInspector.Category getCategory() {
+      return category;
+    }
+    /**
+     * May return {@code null}
+     */
+    public PrimitiveObjectInspector.PrimitiveCategory getPrimitiveCategory() {
+      return primitiveCategory;
+    }
+    public static Type getPrimitiveHType(PrimitiveTypeInfo basePrimitiveTypeInfo) {
+      for(Type t : values()) {
+        if(t.getPrimitiveCategory() == basePrimitiveTypeInfo.getPrimitiveCategory()) {
+          return t;
+        }
+      }
+      throw new TypeNotPresentException(basePrimitiveTypeInfo.getTypeName(), null);
+    }
+    //aid in testing
+    public static int numPrimitiveTypes() {
+      int numPrimitives = 0;
+      for(Type t : values()) {
+        if(t.category == ObjectInspector.Category.PRIMITIVE) {
+          numPrimitives++;
+        }
+      }
+      return numPrimitives;
+    }
   }
 
   public enum Category {
@@ -59,10 +122,8 @@ public class HCatFieldSchema implements 
     }
   }
 
-  ;
-
   public boolean isComplex() {
-    return (category == Category.PRIMITIVE) ? false : true;
+    return category != Category.PRIMITIVE;
   }
 
   /**
@@ -72,6 +133,9 @@ public class HCatFieldSchema implements 
 
   String fieldName = null;
   String comment = null;
+  /**
+   * @deprecated use {@link #typeInfo}
+   */
   Type type = null;
   Category category = null;
 
@@ -82,9 +146,18 @@ public class HCatFieldSchema implements 
   HCatSchema subSchema = null;
 
   // populated if column is Map type
+  @Deprecated
   Type mapKeyType = null;
 
   private String typeString = null;
+  /**
+   * This is needed for parametrized types such as decimal(8,9), char(7), varchar(6)
+   */
+  private PrimitiveTypeInfo typeInfo;
+  /**
+   * represents key type for a Map; currently Hive only supports primitive keys
+   */
+  private PrimitiveTypeInfo mapKeyTypeInfo;
 
   @SuppressWarnings("unused")
   private HCatFieldSchema() {
@@ -94,6 +167,7 @@ public class HCatFieldSchema implements 
   /**
    * Returns type of the field
    * @return type of the field
+   * @deprecated use {@link #getTypeInfo()}
    */
   public Type getType() {
     return type;
@@ -118,12 +192,18 @@ public class HCatFieldSchema implements 
   public String getComment() {
     return comment;
   }
-
+  /**
+   * May return {@code null}
+   */
+  public PrimitiveTypeInfo getTypeInfo() {
+    return typeInfo;
+  }
   /**
    * Constructor constructing a primitive datatype HCatFieldSchema
    * @param fieldName Name of the primitive field
    * @param type Type of the primitive field
    * @throws HCatException if call made on non-primitive types
+   * @deprecated as of Hive 0.13; use {@link #HCatFieldSchema(String, org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo, String)}
    */
   public HCatFieldSchema(String fieldName, Type type, String comment) throws HCatException {
     assertTypeInCategory(type, Category.PRIMITIVE, fieldName);
@@ -132,6 +212,17 @@ public class HCatFieldSchema implements 
     this.category = Category.PRIMITIVE;
     this.comment = comment;
   }
+  public HCatFieldSchema(String fieldName, PrimitiveTypeInfo typeInfo, String comment)
+          throws HCatException {
+    this.fieldName = fieldName;
+    //HCatUtil.assertNotNull(fieldName, "fieldName cannot be null");//seems sometimes it can be 
+    // null, for ARRAY types in particular (which may be a complex type)
+    this.category = Category.PRIMITIVE;
+    this.typeInfo = typeInfo;
+    HCatUtil.assertNotNull(typeInfo, "typeInfo cannot be null; fieldName=" + fieldName, null);
+    type = Type.getPrimitiveHType(typeInfo);
+    this.comment = comment;
+  }
 
   /**
    * Constructor for constructing a ARRAY type or STRUCT type HCatFieldSchema, passing type and subschema
@@ -164,9 +255,12 @@ public class HCatFieldSchema implements 
    * @param mapKeyType - key type of the Map
    * @param mapValueSchema - subschema of the value of the Map
    * @throws HCatException if call made on non-Map types
+   * @deprecated use {@link #createMapTypeFieldSchema(String, org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo, HCatSchema, String)}
    */
   public HCatFieldSchema(String fieldName, Type type, Type mapKeyType, HCatSchema mapValueSchema, String comment) throws HCatException {
     assertTypeInCategory(type, Category.MAP, fieldName);
+    //Hive only supports primitive map keys: 
+    //https://cwiki.apache.org/confluence/display/Hive/LanguageManual+Types#LanguageManualTypes-ComplexTypes
     assertTypeInCategory(mapKeyType, Category.PRIMITIVE, fieldName);
     this.fieldName = fieldName;
     this.type = Type.MAP;
@@ -176,6 +270,16 @@ public class HCatFieldSchema implements 
     this.subSchema.get(0).setName(null);
     this.comment = comment;
   }
+  public static HCatFieldSchema createMapTypeFieldSchema(String fieldName, PrimitiveTypeInfo mapKeyType, 
+                                                         HCatSchema mapValueSchema, 
+                                                         String comment) throws HCatException {
+    HCatFieldSchema mapSchema = new HCatFieldSchema(fieldName, Type.MAP,  
+            Type.getPrimitiveHType(mapKeyType), 
+            mapValueSchema, comment);
+    mapSchema.mapKeyTypeInfo = mapKeyType;
+    return mapSchema;
+  }
+  
 
   public HCatSchema getStructSubSchema() throws HCatException {
     assertTypeInCategory(this.type, Category.STRUCT, this.fieldName);
@@ -186,12 +290,17 @@ public class HCatFieldSchema implements 
     assertTypeInCategory(this.type, Category.ARRAY, this.fieldName);
     return subSchema;
   }
-
+  /**
+   * @deprecated use {@link #getMapKeyTypeInfo()}
+   */
   public Type getMapKeyType() throws HCatException {
     assertTypeInCategory(this.type, Category.MAP, this.fieldName);
     return mapKeyType;
   }
-
+  public PrimitiveTypeInfo getMapKeyTypeInfo() throws HCatException {
+    assertTypeInCategory(this.type, Category.MAP, this.fieldName);
+    return mapKeyTypeInfo;
+  }
   public HCatSchema getMapValueSchema() throws HCatException {
     assertTypeInCategory(this.type, Category.MAP, this.fieldName);
     return subSchema;
@@ -227,8 +336,8 @@ public class HCatFieldSchema implements 
     }
 
     StringBuilder sb = new StringBuilder();
-    if (Category.PRIMITIVE == category) {
-      sb.append(type);
+    if (!isComplex()) {
+      sb.append(typeInfo == null ? type : typeInfo.getTypeName());
     } else if (Category.STRUCT == category) {
       sb.append("struct<");
       sb.append(subSchema.getSchemaAsTypeString());
@@ -239,7 +348,7 @@ public class HCatFieldSchema implements 
       sb.append(">");
     } else if (Category.MAP == category) {
       sb.append("map<");
-      sb.append(mapKeyType);
+      sb.append(mapKeyTypeInfo == null ? mapKeyType : mapKeyTypeInfo.getTypeName());
       sb.append(",");
       sb.append(subSchema.getSchemaAsTypeString());
       sb.append(">");

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchemaUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchemaUtils.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchemaUtils.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchemaUtils.java Wed Feb  5 21:02:57 2014
@@ -79,13 +79,14 @@ public class HCatSchemaUtils {
 
   public static class MapBuilder extends HCatSchemaBuilder {
 
-    Type keyType = null;
+    PrimitiveTypeInfo keyType = null;
     HCatSchema valueSchema = null;
 
     @Override
     public HCatSchema build() throws HCatException {
       List<HCatFieldSchema> fslist = new ArrayList<HCatFieldSchema>();
-      fslist.add(new HCatFieldSchema(null, Type.MAP, keyType, valueSchema, null));
+      fslist.add(HCatFieldSchema.createMapTypeFieldSchema(null, keyType, valueSchema, null));
+      
       return new HCatSchema(fslist);
     }
 
@@ -94,7 +95,7 @@ public class HCatSchemaUtils {
       return this;
     }
 
-    public MapBuilder withKeyType(Type keyType) {
+    public MapBuilder withKeyType(PrimitiveTypeInfo keyType) {
       this.keyType = keyType;
       return this;
     }
@@ -118,7 +119,7 @@ public class HCatSchemaUtils {
     Category typeCategory = fieldTypeInfo.getCategory();
     HCatFieldSchema hCatFieldSchema;
     if (Category.PRIMITIVE == typeCategory) {
-      hCatFieldSchema = new HCatFieldSchema(fieldName, getPrimitiveHType(fieldTypeInfo), null);
+      hCatFieldSchema = new HCatFieldSchema(fieldName, (PrimitiveTypeInfo)fieldTypeInfo, null);
     } else if (Category.STRUCT == typeCategory) {
       HCatSchema subSchema = constructHCatSchema((StructTypeInfo) fieldTypeInfo);
       hCatFieldSchema = new HCatFieldSchema(fieldName, HCatFieldSchema.Type.STRUCT, subSchema, null);
@@ -126,40 +127,15 @@ public class HCatSchemaUtils {
       HCatSchema subSchema = getHCatSchema(((ListTypeInfo) fieldTypeInfo).getListElementTypeInfo());
       hCatFieldSchema = new HCatFieldSchema(fieldName, HCatFieldSchema.Type.ARRAY, subSchema, null);
     } else if (Category.MAP == typeCategory) {
-      HCatFieldSchema.Type mapKeyType = getPrimitiveHType(((MapTypeInfo) fieldTypeInfo).getMapKeyTypeInfo());
       HCatSchema subSchema = getHCatSchema(((MapTypeInfo) fieldTypeInfo).getMapValueTypeInfo());
-      hCatFieldSchema = new HCatFieldSchema(fieldName, HCatFieldSchema.Type.MAP, mapKeyType, subSchema, null);
+      hCatFieldSchema = HCatFieldSchema.createMapTypeFieldSchema(fieldName, 
+              (PrimitiveTypeInfo)((MapTypeInfo)fieldTypeInfo).getMapKeyTypeInfo(), subSchema, null);
     } else {
       throw new TypeNotPresentException(fieldTypeInfo.getTypeName(), null);
     }
     return hCatFieldSchema;
   }
 
-  private static Type getPrimitiveHType(TypeInfo basePrimitiveTypeInfo) {
-    switch (((PrimitiveTypeInfo) basePrimitiveTypeInfo).getPrimitiveCategory()) {
-    case BOOLEAN:
-      return Type.BOOLEAN;
-    case BYTE:
-      return Type.TINYINT;
-    case DOUBLE:
-      return Type.DOUBLE;
-    case FLOAT:
-      return Type.FLOAT;
-    case INT:
-      return Type.INT;
-    case LONG:
-      return Type.BIGINT;
-    case SHORT:
-      return Type.SMALLINT;
-    case STRING:
-      return Type.STRING;
-    case BINARY:
-      return Type.BINARY;
-    default:
-      throw new TypeNotPresentException(((PrimitiveTypeInfo) basePrimitiveTypeInfo).getTypeName(), null);
-    }
-  }
-
   public static HCatSchema getHCatSchema(Schema schema) throws HCatException {
     return getHCatSchema(schema.getFieldSchemas());
   }
@@ -174,8 +150,8 @@ public class HCatSchemaUtils {
 
   private static HCatSchema constructHCatSchema(StructTypeInfo stypeInfo) throws HCatException {
     CollectionBuilder builder = getStructSchemaBuilder();
-    for (String fieldName : ((StructTypeInfo) stypeInfo).getAllStructFieldNames()) {
-      builder.addField(getHCatFieldSchema(fieldName, ((StructTypeInfo) stypeInfo).getStructFieldTypeInfo(fieldName)));
+    for (String fieldName : stypeInfo.getAllStructFieldNames()) {
+      builder.addField(getHCatFieldSchema(fieldName, stypeInfo.getStructFieldTypeInfo(fieldName)));
     }
     return builder.build();
   }
@@ -184,7 +160,7 @@ public class HCatSchemaUtils {
     Category typeCategory = typeInfo.getCategory();
     HCatSchema hCatSchema;
     if (Category.PRIMITIVE == typeCategory) {
-      hCatSchema = getStructSchemaBuilder().addField(new HCatFieldSchema(null, getPrimitiveHType(typeInfo), null)).build();
+      hCatSchema = getStructSchemaBuilder().addField(new HCatFieldSchema(null, (PrimitiveTypeInfo)typeInfo, null)).build();
     } else if (Category.STRUCT == typeCategory) {
       HCatSchema subSchema = constructHCatSchema((StructTypeInfo) typeInfo);
       hCatSchema = getStructSchemaBuilder().addField(new HCatFieldSchema(null, Type.STRUCT, subSchema, null)).build();
@@ -193,10 +169,10 @@ public class HCatSchemaUtils {
       builder.addField(getHCatFieldSchema(null, ((ListTypeInfo) typeInfo).getListElementTypeInfo()));
       hCatSchema = new HCatSchema(Arrays.asList(new HCatFieldSchema("", Type.ARRAY, builder.build(), "")));
     } else if (Category.MAP == typeCategory) {
-      HCatFieldSchema.Type mapKeyType = getPrimitiveHType(((MapTypeInfo) typeInfo).getMapKeyTypeInfo());
       HCatSchema subSchema = getHCatSchema(((MapTypeInfo) typeInfo).getMapValueTypeInfo());
       MapBuilder builder = getMapSchemaBuilder();
-      hCatSchema = builder.withKeyType(mapKeyType).withValueSchema(subSchema).build();
+      hCatSchema = builder.withKeyType((PrimitiveTypeInfo)((MapTypeInfo) typeInfo).getMapKeyTypeInfo())
+              .withValueSchema(subSchema).build();
     } else {
       throw new TypeNotPresentException(typeInfo.getTypeName(), null);
     }

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/OutputJobInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/OutputJobInfo.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/OutputJobInfo.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/OutputJobInfo.java Wed Feb  5 21:02:57 2014
@@ -71,10 +71,6 @@ public class OutputJobInfo implements Se
    * @param databaseName the db name
    * @param tableName the table name
    * @param partitionValues The partition values to publish to, can be null or empty Map to
-   * work with hadoop security, the kerberos principal name of the server - else null
-   * The principal name should be of the form:
-   * <servicename>/_HOST@<realm> like "hcat/_HOST@myrealm.com"
-   * The special string _HOST will be replaced automatically with the correct host name
    * indicate write to a unpartitioned table. For partitioned tables, this map should
    * contain keys for all partition columns with corresponding values.
    */

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/HcatTestUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/HcatTestUtils.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/HcatTestUtils.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/HcatTestUtils.java Wed Feb  5 21:02:57 2014
@@ -97,10 +97,4 @@ public class HcatTestUtils {
     }
 
   }
-  /**
-   * Used by various tests to make sure the path is safe for Windows
-   */
-  public static String makePathASafeFileName(String filePath) {
-    return new File(filePath).getPath().replaceAll("\\\\", "/");
-  }
 }

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/HCatDataCheckUtil.java Wed Feb  5 21:02:57 2014
@@ -19,6 +19,7 @@
 package org.apache.hive.hcatalog.data;
 
 import java.io.IOException;
+import java.sql.Date;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map.Entry;
@@ -87,14 +88,23 @@ public class HCatDataCheckUtil {
 
 
   public static boolean recordsEqual(HCatRecord first, HCatRecord second) {
-    return (compareRecords(first, second) == 0);
+    return recordsEqual(first, second, null);
+  }
+  public static boolean recordsEqual(HCatRecord first, HCatRecord second, 
+                                     StringBuilder debugDetail) {
+    return (compareRecords(first, second, debugDetail) == 0);
   }
 
   public static int compareRecords(HCatRecord first, HCatRecord second) {
-    return compareRecordContents(first.getAll(), second.getAll());
+    return compareRecords(first, second, null);
+  }
+  public static int compareRecords(HCatRecord first, HCatRecord second, 
+                                   StringBuilder debugDetail) {
+    return compareRecordContents(first.getAll(), second.getAll(), debugDetail);
   }
 
-  public static int compareRecordContents(List<Object> first, List<Object> second) {
+  public static int compareRecordContents(List<Object> first, List<Object> second, 
+                                          StringBuilder debugDetail) {
     int mySz = first.size();
     int urSz = second.size();
     if (mySz != urSz) {
@@ -103,6 +113,22 @@ public class HCatDataCheckUtil {
       for (int i = 0; i < first.size(); i++) {
         int c = DataType.compare(first.get(i), second.get(i));
         if (c != 0) {
+          if(debugDetail != null) {
+            String msg = "first.get(" + i + "}='" + first.get(i) + "' second.get(" +
+                    i + ")='" + second.get(i) + "' compared as " + c + "\n" +
+            "Types 1st/2nd=" + DataType.findType(first.get(i)) + "/" +DataType.findType(
+                    second.get(i)) + '\n' + 
+                    "first='" + first.get(i) + "' second='" + second.get(i) + "'";
+            if(first.get(i) instanceof Date) {
+              msg += "\n((Date)first.get(i)).getTime()=" + ((Date)first.get(i)).getTime();
+            }
+            if(second.get(i) instanceof Date) {
+              msg += "\n((Date)second.get(i)).getTime()=" + ((Date)second.get(i)).getTime();
+            }
+
+            debugDetail.append(msg);
+            throw new RuntimeException(debugDetail.toString());
+          }
           return c;
         }
       }

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java Wed Feb  5 21:02:57 2014
@@ -28,20 +28,32 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
+import java.util.Calendar;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
 
 import junit.framework.Assert;
 import junit.framework.TestCase;
+import org.apache.pig.parser.AliasMasker;
 
 public class TestDefaultHCatRecord extends TestCase {
 
+  /**
+   * test that we properly serialize/deserialize HCatRecordS
+   * @throws IOException
+   */
   public void testRYW() throws IOException {
 
     File f = new File("binary.dat");
@@ -65,7 +77,9 @@ public class TestDefaultHCatRecord exten
     for (int i = 0; i < recs.length; i++) {
       HCatRecord rec = new DefaultHCatRecord();
       rec.readFields(inpStream);
-      Assert.assertTrue(HCatDataCheckUtil.recordsEqual(recs[i], rec));
+      StringBuilder msg = new StringBuilder("recs[" + i + "]='" + recs[i] + "' rec='" + rec + "'");
+      boolean isEqual = HCatDataCheckUtil.recordsEqual(recs[i], rec, msg);
+      Assert.assertTrue(msg.toString(), isEqual);
     }
 
     Assert.assertEquals(fInStream.available(), 0);
@@ -134,6 +148,21 @@ public class TestDefaultHCatRecord exten
     Assert.assertTrue(HCatDataCheckUtil.recordsEqual(newRec, inpRec));
   }
 
+  /**
+   * Test type specific get/set methods on HCatRecord types added in Hive 13
+   * @throws HCatException
+   */
+  public void testGetSetByType3() throws HCatException {
+    HCatRecord inpRec = getHCat13TypesRecord();
+    HCatRecord newRec = new DefaultHCatRecord(inpRec.size());
+    HCatSchema hsch = HCatSchemaUtils.getHCatSchema(
+            "a:decimal(5,2),b:char(10),c:varchar(20),d:date,e:timestamp");
+    newRec.setDecimal("a", hsch, inpRec.getDecimal("a", hsch));
+    newRec.setChar("b", hsch, inpRec.getChar("b", hsch));
+    newRec.setVarchar("c", hsch, inpRec.getVarchar("c", hsch));
+    newRec.setDate("d", hsch, inpRec.getDate("d", hsch));
+    newRec.setTimestamp("e", hsch, inpRec.getTimestamp("e", hsch));
+  }
 
   private HCatRecord getGetSet2InpRec() {
     List<Object> rlist = new ArrayList<Object>();
@@ -238,9 +267,32 @@ public class TestDefaultHCatRecord exten
     rec_6.add(getList());
     HCatRecord tup_6 = new DefaultHCatRecord(rec_6);
 
-
-    return new HCatRecord[]{tup_1, tup_2, tup_3, tup_4, tup_5, tup_6};
-
+    return new HCatRecord[]{tup_1, tup_2, tup_3, tup_4, tup_5, tup_6, getHCat13TypesRecord(), 
+            getHCat13TypesComplexRecord()};
+  }
+  private static HCatRecord getHCat13TypesRecord() {
+    List<Object> rec_hcat13types = new ArrayList<Object>(5);
+    rec_hcat13types.add(HiveDecimal.create(new BigDecimal("123.45")));//prec 5, scale 2
+    rec_hcat13types.add(new HiveChar("hive_char", 10));
+    rec_hcat13types.add(new HiveVarchar("hive_varchar", 20));
+    rec_hcat13types.add(Date.valueOf("2014-01-06"));
+    rec_hcat13types.add(new Timestamp(System.currentTimeMillis()));
+    return new DefaultHCatRecord(rec_hcat13types);
+  }
+  private static HCatRecord getHCat13TypesComplexRecord() {
+    List<Object> rec_hcat13ComplexTypes = new ArrayList<Object>();
+    Map<HiveDecimal, String> m = new HashMap<HiveDecimal, String>();
+    m.put(HiveDecimal.create(new BigDecimal("1234.12")), "1234.12");
+    m.put(HiveDecimal.create(new BigDecimal("1234.13")), "1234.13");
+    rec_hcat13ComplexTypes.add(m);
+
+    Map<Timestamp, List<Object>> m2 = new HashMap<Timestamp, List<Object>>();
+    List<Object> list = new ArrayList<Object>();
+    list.add(Date.valueOf("2014-01-05"));
+    list.add(new HashMap<HiveDecimal, String>(m));
+    m2.put(new Timestamp(System.currentTimeMillis()), list);
+    rec_hcat13ComplexTypes.add(m2);
+    return new DefaultHCatRecord(rec_hcat13ComplexTypes);
   }
 
   private Object getList() {

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java Wed Feb  5 21:02:57 2014
@@ -18,6 +18,9 @@
  */
 package org.apache.hive.hcatalog.data;
 
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -27,6 +30,9 @@ import java.util.Properties;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.io.Text;
@@ -85,6 +91,11 @@ public class TestJsonSerDe extends TestC
     c1_1.add(i2);
     c1.add(c1_1);
     rlist.add(c1);
+    rlist.add(HiveDecimal.create(new BigDecimal("123.45")));//prec 5, scale 2
+    rlist.add(new HiveChar("hive_char", 10));
+    rlist.add(new HiveVarchar("hive_varchar", 20));
+    rlist.add(Date.valueOf("2014-01-07"));
+    rlist.add(new Timestamp(System.currentTimeMillis()));
 
     List<Object> nlist = new ArrayList<Object>(13);
     nlist.add(null); // tinyint
@@ -100,20 +111,26 @@ public class TestJsonSerDe extends TestC
     nlist.add(null); // map
     nlist.add(null); // bool
     nlist.add(null); // complex
+    nlist.add(null); //decimal(5,2)
+    nlist.add(null); //char(10)
+    nlist.add(null); //varchar(20)
+    nlist.add(null); //date
+    nlist.add(null); //timestamp
 
     String typeString =
         "tinyint,smallint,int,bigint,double,float,string,string,"
             + "struct<a:string,b:string>,array<int>,map<smallint,string>,boolean,"
-            + "array<struct<i1:int,i2:struct<ii1:array<int>,ii2:map<string,struct<iii1:int>>>>>";
+            + "array<struct<i1:int,i2:struct<ii1:array<int>,ii2:map<string,struct<iii1:int>>>>>," +
+                "decimal(5,2),char(10),varchar(20),date,timestamp";
     Properties props = new Properties();
 
-    props.put(serdeConstants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1");
+    props.put(serdeConstants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1,bd,hc,hvc,dt,ts");
     props.put(serdeConstants.LIST_COLUMN_TYPES, typeString);
 //    props.put(Constants.SERIALIZATION_NULL_FORMAT, "\\N");
 //    props.put(Constants.SERIALIZATION_FORMAT, "1");
 
-    data.add(new Pair(props, new DefaultHCatRecord(rlist)));
-    data.add(new Pair(props, new DefaultHCatRecord(nlist)));
+    data.add(new Pair<Properties, HCatRecord>(props, new DefaultHCatRecord(rlist)));
+    data.add(new Pair<Properties, HCatRecord>(props, new DefaultHCatRecord(nlist)));
     return data;
   }
 
@@ -137,14 +154,17 @@ public class TestJsonSerDe extends TestC
       LOG.info("ONE:{}", s);
 
       Object o1 = hrsd.deserialize(s);
-      assertTrue(HCatDataCheckUtil.recordsEqual(r, (HCatRecord) o1));
+      StringBuilder msg = new StringBuilder();
+      boolean isEqual = HCatDataCheckUtil.recordsEqual(r, (HCatRecord) o1); 
+      assertTrue(msg.toString(), isEqual);
 
       Writable s2 = jsde.serialize(o1, hrsd.getObjectInspector());
       LOG.info("TWO:{}", s2);
       Object o2 = jsde.deserialize(s2);
       LOG.info("deserialized TWO : {} ", o2);
-
-      assertTrue(HCatDataCheckUtil.recordsEqual(r, (HCatRecord) o2));
+      msg.setLength(0);
+      isEqual = HCatDataCheckUtil.recordsEqual(r, (HCatRecord) o2, msg);
+      assertTrue(msg.toString(), isEqual);
     }
 
   }
@@ -153,7 +173,7 @@ public class TestJsonSerDe extends TestC
     /**
      *  This test has been added to account for HCATALOG-436
      *  We write out columns with "internal column names" such
-     *  as "_col0", but try to read with retular column names.
+     *  as "_col0", but try to read with regular column names.
      */
 
     Configuration conf = new Configuration();
@@ -190,7 +210,9 @@ public class TestJsonSerDe extends TestC
 
       Object o2 = rjsd.deserialize(s);
       LOG.info("deserialized TWO : {} ", o2);
-      assertTrue(HCatDataCheckUtil.recordsEqual(r, (HCatRecord) o2));
+      StringBuilder msg = new StringBuilder();
+      boolean isEqual = HCatDataCheckUtil.recordsEqual(r, (HCatRecord) o2, msg);
+      assertTrue(msg.toString(), isEqual);
     }
 
   }

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchema.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchema.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchema.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/schema/TestHCatSchema.java Wed Feb  5 21:02:57 2014
@@ -19,6 +19,7 @@
 package org.apache.hive.hcatalog.data.schema;
 
 import junit.framework.TestCase;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hive.hcatalog.common.HCatException;
 
 import java.util.ArrayList;
@@ -62,6 +63,14 @@ public class TestHCatSchema extends Test
     HCatFieldSchema memberID2 = new HCatFieldSchema("memberID", HCatFieldSchema.Type.INT, "as a number");
     assertTrue("Expected objects to be equal", memberID1.equals(memberID2));
     assertTrue("Expected hash codes to be equal", memberID1.hashCode() == memberID2.hashCode());
+    memberID1 = new HCatFieldSchema("memberID", TypeInfoFactory.getDecimalTypeInfo(5,2), "decimal(5,2)");
+    memberID2 = new HCatFieldSchema("memberID", TypeInfoFactory.getDecimalTypeInfo(5,3), "decimal(5)");
+    assertFalse("Expected objects to be unequal", memberID1.equals(memberID2));
+    assertFalse("Expected hash codes to be unequal", memberID1.hashCode() == memberID2.hashCode());
+    memberID1 = new HCatFieldSchema("memberID", TypeInfoFactory.getVarcharTypeInfo(5), "varchar(5)");
+    memberID2 = new HCatFieldSchema("memberID", TypeInfoFactory.getVarcharTypeInfo(5), "varchar(5)");
+    assertTrue("Expected objects to be equal", memberID1.equals(memberID2));
+    assertTrue("Expected hash codes to be equal", memberID1.hashCode() == memberID2.hashCode());
   }
 
   public void testCannotInstantiateSchemaWithRepeatedFieldNames() throws HCatException {

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java Wed Feb  5 21:02:57 2014
@@ -26,7 +26,9 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.hcatalog.common.HCatUtil;
+import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
+import org.apache.pig.backend.executionengine.ExecException;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -35,6 +37,7 @@ import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.Properties;
 
 /**
  * Simplify writing HCatalog tests that require a HiveMetaStore.
@@ -81,7 +84,26 @@ public class HCatBaseTest {
   }
 
   protected void logAndRegister(PigServer server, String query) throws IOException {
+    logAndRegister(server, query, 1);
+  }
+  protected void logAndRegister(PigServer server, String query, int lineNumber) throws IOException {
+    assert lineNumber > 0 : "(lineNumber > 0) is false";
     LOG.info("Registering pig query: " + query);
-    server.registerQuery(query);
+    server.registerQuery(query, lineNumber);
+  }
+
+  /**
+   * creates PigServer in LOCAL mode.  
+   * http://pig.apache.org/docs/r0.12.0/perf.html#error-handling
+   * @param stopOnFailure equivalent of "-stop_on_failure" command line arg, setting to 'true' makes
+   *                      debugging easier
+   */
+  public static PigServer createPigServer(boolean stopOnFailure) throws ExecException {
+    if(stopOnFailure) {
+      Properties p = new Properties();
+      p.put("stop.on.failure", Boolean.TRUE.toString());
+      return new PigServer(ExecType.LOCAL, p);
+    }
+    return new PigServer(ExecType.LOCAL);
   }
 }

Modified: hive/trunk/hcatalog/hcatalog-pig-adapter/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/hcatalog-pig-adapter/pom.xml?rev=1564924&r1=1564923&r2=1564924&view=diff
==============================================================================
--- hive/trunk/hcatalog/hcatalog-pig-adapter/pom.xml (original)
+++ hive/trunk/hcatalog/hcatalog-pig-adapter/pom.xml Wed Feb  5 21:02:57 2014
@@ -70,6 +70,13 @@
           <artifactId>pig</artifactId>
           <version>${pig.version}</version>
         </dependency>
+        <dependency>
+          <!--this should be automatically brought in by Pig, it's not in Pig 0.12 due to a bug
+              in Pig which requires it This is fixed in Pig's pom file in ASF trunk (pig 13)-->
+          <groupId>joda-time</groupId>
+          <artifactId>joda-time</artifactId>
+          <version>2.2</version>
+        </dependency>
       </dependencies>
     </profile>
    <profile>
@@ -91,6 +98,13 @@
           <version>${pig.version}</version>
           <classifier>h2</classifier>
         </dependency>
+        <dependency>
+          <!--this should be automatically brought in by Pig, it's not in Pig 0.12 due to a bug
+              in Pig which requires it This is fixed in Pig's pom file in ASF trunk (pig 13)-->
+          <groupId>joda-time</groupId>
+          <artifactId>joda-time</artifactId>
+          <version>2.2</version>
+        </dependency>
       </dependencies>
     </profile>
   </profiles>