You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/10/07 08:39:53 UTC

svn commit: r1529771 [1/3] - in /hive/trunk: contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/io/orc/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/...

Author: hashutosh
Date: Mon Oct  7 06:39:52 2013
New Revision: 1529771

URL: http://svn.apache.org/r1529771
Log:
HIVE-5372 : Refactor TypeInfo and PrimitiveTypeEntry class hierachy to eliminate info repetition (Xuefu Zhang via Ashutosh Chauhan)

Added:
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/BaseCharTypeInfo.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharTypeInfo.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharUtils.java
Removed:
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/BaseTypeParams.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/ParameterizedPrimitiveTypeUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/PrimitiveTypeSpec.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharTypeParams.java
Modified:
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordReader.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/SettableUDF.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLower.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUpper.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMacro.java
    hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_1.q.out
    hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_2.q.out
    hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_3.q.out
    hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBinaryObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyByteObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyFloatObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveDecimalObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyIntObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyLongObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyShortObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyStringObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyVoidObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveVarchar.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveJavaObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveWritableObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBooleanObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaByteObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDoubleObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaFloatObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveDecimalObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaIntObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaLongObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaShortObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaStringObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaVoidObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableBinaryObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableBooleanObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableByteObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveVarcharObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDoubleObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableFloatObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveDecimalObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableIntObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableLongObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableShortObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableStringObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableVoidObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/PrimitiveTypeInfo.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java
    hive/trunk/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
    hive/trunk/service/src/java/org/apache/hive/service/cli/TypeQualifiers.java

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordReader.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordReader.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordReader.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordReader.java Mon Oct  7 06:39:52 2013
@@ -39,9 +39,8 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
-import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -59,16 +58,16 @@ public class TypedBytesRecordReader impl
   private DataInputStream din;
   private TypedBytesWritableInput tbIn;
 
-  private NonSyncDataOutputBuffer barrStr = new NonSyncDataOutputBuffer();
+  private final NonSyncDataOutputBuffer barrStr = new NonSyncDataOutputBuffer();
   private TypedBytesWritableOutput tbOut;
 
-  private ArrayList<Writable> row = new ArrayList<Writable>(0);
-  private ArrayList<String> rowTypeName = new ArrayList<String>(0);
+  private final ArrayList<Writable> row = new ArrayList<Writable>(0);
+  private final ArrayList<String> rowTypeName = new ArrayList<String>(0);
   private List<String> columnTypes;
 
-  private ArrayList<ObjectInspector> srcOIns = new ArrayList<ObjectInspector>();
-  private ArrayList<ObjectInspector> dstOIns = new ArrayList<ObjectInspector>();
-  private ArrayList<Converter> converters = new ArrayList<Converter>();
+  private final ArrayList<ObjectInspector> srcOIns = new ArrayList<ObjectInspector>();
+  private final ArrayList<ObjectInspector> dstOIns = new ArrayList<ObjectInspector>();
+  private final ArrayList<Converter> converters = new ArrayList<Converter>();
 
   private static Map<Type, String> typedBytesToTypeName = new HashMap<Type, String>();
   static {
@@ -89,10 +88,9 @@ public class TypedBytesRecordReader impl
     String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
     columnTypes = Arrays.asList(columnTypeProperty.split(","));
     for (String columnType : columnTypes) {
-      PrimitiveTypeEntry dstTypeEntry = PrimitiveObjectInspectorUtils
-          .getTypeEntryFromTypeName(columnType);
+      PrimitiveTypeInfo dstTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(columnType);
       dstOIns.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
-          dstTypeEntry));
+          dstTypeInfo));
     }
   }
 
@@ -152,11 +150,10 @@ public class TypedBytesRecordReader impl
         row.add(wrt);
         rowTypeName.add(type.name());
         String typeName = typedBytesToTypeName.get(type);
-        PrimitiveTypeEntry srcTypeEntry = PrimitiveObjectInspectorUtils
-            .getTypeEntryFromTypeName(typeName);
+        PrimitiveTypeInfo srcTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(typeName);
         srcOIns
             .add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
-                srcTypeEntry));
+                srcTypeInfo));
         converters.add(ObjectInspectorConverters.getConverter(srcOIns.get(pos),
             dstOIns.get(pos)));
       } else {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Mon Oct  7 06:39:52 2013
@@ -39,7 +39,6 @@ import javax.xml.parsers.DocumentBuilder
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -156,7 +155,6 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.w3c.dom.Document;
@@ -663,12 +661,7 @@ public final class FunctionRegistry {
         int maxLength = getCommonLength(
             TypeInfoUtils.getCharacterLengthForType(a),
             TypeInfoUtils.getCharacterLengthForType(b));
-        VarcharTypeParams varcharParams = new VarcharTypeParams();
-        varcharParams.setLength(maxLength);
-        // Generate type name so that we can retrieve the TypeInfo for that type.
-        String typeName = PrimitiveObjectInspectorUtils
-            .getTypeEntryFromTypeSpecs(typeCategory, varcharParams).toString();
-        return TypeInfoFactory.getPrimitiveTypeInfo(typeName);
+        return TypeInfoFactory.getVarcharTypeInfo(maxLength);
 
       default:
         // Type doesn't require any qualifiers.
@@ -1329,9 +1322,9 @@ public final class FunctionRegistry {
       // The original may have settable info that needs to be added to the new copy.
       if (genericUDF instanceof SettableUDF) {
         try {
-          Object settableData = ((SettableUDF)genericUDF).getParams();
-          if (settableData != null) {
-            ((SettableUDF)clonedUDF).setParams(settableData);
+          TypeInfo typeInfo = ((SettableUDF)genericUDF).getTypeInfo();
+          if (typeInfo != null) {
+            ((SettableUDF)clonedUDF).setTypeInfo(typeInfo);
           }
         } catch (UDFArgumentException err) {
           // In theory this should not happen - if the original copy of the UDF had this
@@ -1761,7 +1754,9 @@ public final class FunctionRegistry {
   }
 
   private static void registerNativeStatus(FunctionInfo fi) {
-    if (!fi.isNative()) return;
+    if (!fi.isNative()) {
+      return;
+    }
     nativeUdfs.add(fi.getFunctionClass());
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java Mon Oct  7 06:39:52 2013
@@ -32,18 +32,14 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.SettableMapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
-import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.io.Writable;
 
 final class OrcStruct implements Writable {
@@ -487,12 +483,6 @@ final class OrcStruct implements Writabl
           case STRING:
             return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
           case VARCHAR:
-            // For varchar we need to retrieve the string length from the TypeInfo.
-            VarcharTypeParams varcharParams = (VarcharTypeParams)
-                ParameterizedPrimitiveTypeUtils.getTypeParamsFromTypeInfo(info);
-            if (varcharParams == null) {
-              throw new IllegalArgumentException("varchar type used without type params");
-            }
             return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
                 (PrimitiveTypeInfo) info);
           case TIMESTAMP:
@@ -546,11 +536,8 @@ final class OrcStruct implements Writabl
           throw new UnsupportedOperationException(
               "Illegal use of varchar type without length in ORC type definition.");
         }
-        VarcharTypeParams varcharParams = new VarcharTypeParams();
-        varcharParams.setLength(type.getMaximumLength());
         return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
-            PrimitiveObjectInspectorUtils.getTypeEntryFromTypeSpecs(
-                PrimitiveCategory.VARCHAR, varcharParams));
+            TypeInfoFactory.getVarcharTypeInfo(type.getMaximumLength()));
       case TIMESTAMP:
         return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
       case DATE:

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java Mon Oct  7 06:39:52 2013
@@ -59,8 +59,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 
@@ -1609,14 +1608,9 @@ class WriterImpl implements Writer, Memo
           case VARCHAR:
             // The varchar length needs to be written to file and should be available
             // from the object inspector
-            VarcharTypeParams varcharParams = (VarcharTypeParams)
-                ParameterizedPrimitiveTypeUtils.getTypeParamsFromPrimitiveObjectInspector(
-                    (PrimitiveObjectInspector) treeWriter.inspector);
-            if (varcharParams == null) {
-              throw new IllegalArgumentException("No varchar length specified in ORC type");
-            }
+            VarcharTypeInfo typeInfo = (VarcharTypeInfo) ((PrimitiveObjectInspector) treeWriter.inspector).getTypeInfo();
             type.setKind(Type.Kind.VARCHAR);
-            type.setMaximumLength(varcharParams.getLength());
+            type.setMaximumLength(typeInfo.getLength());
             break;
           case BINARY:
             type.setKind(OrcProto.Type.Kind.BINARY);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Mon Oct  7 06:39:52 2013
@@ -129,7 +129,7 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 
@@ -172,9 +172,8 @@ public class DDLSemanticAnalyzer extends
     case HiveParser.TOK_VARCHAR:
       PrimitiveCategory primitiveCategory = PrimitiveCategory.VARCHAR;
       typeName = TokenToTypeName.get(token);
-      VarcharTypeParams varcharParams = ParseUtils.getVarcharParams(typeName, node);
-      typeName = PrimitiveObjectInspectorUtils.getTypeEntryFromTypeSpecs(
-          primitiveCategory, varcharParams).toString();
+      VarcharTypeInfo varcharTypeInfo = ParseUtils.getVarcharTypeInfo(typeName, node);
+      typeName = varcharTypeInfo.getQualifiedName();
       break;
     default:
       typeName = TokenToTypeName.get(token);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java Mon Oct  7 06:39:52 2013
@@ -20,17 +20,15 @@ package org.apache.hadoop.hive.ql.parse;
 
 import java.util.ArrayList;
 import java.util.Iterator;
-import java.util.LinkedList;
 import java.util.List;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 
 
 /**
@@ -110,49 +108,24 @@ public final class ParseUtils {
    */
   static ExprNodeDesc createConversionCast(ExprNodeDesc column, PrimitiveTypeInfo tableFieldTypeInfo)
       throws SemanticException {
-    ExprNodeDesc ret;
-
     // Get base type, since type string may be parameterized
     String baseType = TypeInfoUtils.getBaseName(tableFieldTypeInfo.getTypeName());
-    BaseTypeParams typeParams = null;
-    // If TypeInfo is parameterized, provide the params to the UDF factory method.
-    typeParams = tableFieldTypeInfo.getTypeParams();
-    if (typeParams != null) {
-      switch (tableFieldTypeInfo.getPrimitiveCategory()) {
-        case VARCHAR:
-          // Nothing to do here - the parameter will be passed to the UDF factory method below
-          break;
-        default:
-          throw new SemanticException("Type cast for " + tableFieldTypeInfo.getPrimitiveCategory() +
-              " does not take type parameters");
-      }
-    }
 
     // If the type cast UDF is for a parameterized type, then it should implement
     // the SettableUDF interface so that we can pass in the params.
     // Not sure if this is the cleanest solution, but there does need to be a way
     // to provide the type params to the type cast.
-    ret = TypeCheckProcFactory.DefaultExprProcessor
-        .getFuncExprNodeDescWithUdfData(baseType, typeParams, column);
-
-    return ret;
+    return TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDescWithUdfData(baseType,
+        tableFieldTypeInfo, column);
   }
 
-  public static VarcharTypeParams getVarcharParams(String typeName, ASTNode node)
+  public static VarcharTypeInfo getVarcharTypeInfo(String typeName, ASTNode node)
       throws SemanticException {
     if (node.getChildCount() != 1) {
       throw new SemanticException("Bad params for type " + typeName);
     }
 
-    try {
-      VarcharTypeParams typeParams = new VarcharTypeParams();
-      String lengthStr = node.getChild(0).getText();
-      Integer length = Integer.valueOf(lengthStr);
-      typeParams.setLength(length.intValue());
-      typeParams.validateParams();
-      return typeParams;
-    } catch (SerDeException err) {
-      throw new SemanticException(err);
-    }
+    String lengthStr = node.getChild(0).getText();
+    return TypeInfoFactory.getVarcharTypeInfo(Integer.valueOf(lengthStr));
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Mon Oct  7 06:39:52 2013
@@ -62,13 +62,13 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 
 /**
  * The Factory for creating typecheck processors. The typecheck processors are
@@ -652,7 +652,7 @@ public final class TypeCheckProcFactory 
      *
      * @throws UDFArgumentException
      */
-    static ExprNodeDesc getFuncExprNodeDescWithUdfData(String udfName, Object udfData,
+    static ExprNodeDesc getFuncExprNodeDescWithUdfData(String udfName, TypeInfo typeInfo,
         ExprNodeDesc... children) throws UDFArgumentException {
 
       FunctionInfo fi = FunctionRegistry.getFunctionInfo(udfName);
@@ -667,9 +667,9 @@ public final class TypeCheckProcFactory 
       }
 
       // Add udfData to UDF if necessary
-      if (udfData != null) {
+      if (typeInfo != null) {
         if (genericUDF instanceof SettableUDF) {
-          ((SettableUDF)genericUDF).setParams(udfData);
+          ((SettableUDF)genericUDF).setTypeInfo(typeInfo);
         }
       }
 
@@ -793,10 +793,10 @@ public final class TypeCheckProcFactory 
           switch (funcNameNode.getType()) {
             case HiveParser.TOK_VARCHAR:
               // Add type params
-              VarcharTypeParams varcharTypeParams = new VarcharTypeParams();
-              varcharTypeParams.length = Integer.valueOf((funcNameNode.getChild(0).getText()));
+              VarcharTypeInfo varcharTypeInfo = TypeInfoFactory.getVarcharTypeInfo(
+                  Integer.valueOf((funcNameNode.getChild(0).getText())));
               if (genericUDF != null) {
-                ((SettableUDF)genericUDF).setParams(varcharTypeParams);
+                ((SettableUDF)genericUDF).setTypeInfo(varcharTypeInfo);
               }
               break;
             default:

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java Mon Oct  7 06:39:52 2013
@@ -66,8 +66,7 @@ public class ExprNodeConstantDesc extend
         .getPrimitiveJavaObjectInspector(pc).getPrimitiveWritableObject(
           getValue());
     return PrimitiveObjectInspectorFactory
-        .getPrimitiveWritableConstantObjectInspector(
-            (PrimitiveTypeInfo) getTypeInfo(), writableValue);
+        .getPrimitiveWritableConstantObjectInspector((PrimitiveTypeInfo) getTypeInfo(), writableValue);
   }
 
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/SettableUDF.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/SettableUDF.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/SettableUDF.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/SettableUDF.java Mon Oct  7 06:39:52 2013
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
 /**
  * THIS INTERFACE IS UNSTABLE AND SHOULD NOT BE USED BY 3RD PARTY UDFS.
@@ -31,8 +32,8 @@ public interface SettableUDF {
    * An exception may be thrown if the UDF doesn't know what to do with this data.
    * @param params UDF-specific data to add to the UDF
    */
-  void setParams(Object params) throws UDFArgumentException;
+  void setTypeInfo(TypeInfo typeInfo) throws UDFArgumentException;
 
-  Object getParams();
+  TypeInfo getTypeInfo();
 
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java Mon Oct  7 06:39:52 2013
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.StringConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.BytesWritable;
 
 /**
@@ -123,10 +123,8 @@ public class GenericUDFConcat extends Ge
         case STRING:
           return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
         case VARCHAR:
-          VarcharTypeParams varcharParams = new VarcharTypeParams();
-          varcharParams.setLength(returnLength);
-          return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
-              PrimitiveObjectInspectorUtils.getTypeEntryFromTypeSpecs(returnType, varcharParams));
+          VarcharTypeInfo typeInfo = TypeInfoFactory.getVarcharTypeInfo(returnLength);
+          return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
         default:
           throw new UDFArgumentException("Unexpected CONCAT return type of " + returnType);
       }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLower.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLower.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLower.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLower.java Mon Oct  7 06:39:52 2013
@@ -29,7 +29,8 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.StringConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 
 /**
  * UDFLower.
@@ -65,11 +66,9 @@ public class GenericUDFLower extends Gen
       case VARCHAR:
         // return type should have same length as the input.
         returnType = inputType;
-        VarcharTypeParams varcharParams = new VarcharTypeParams();
-        varcharParams.setLength(
+        VarcharTypeInfo typeInfo = TypeInfoFactory.getVarcharTypeInfo(
             GenericUDFUtils.StringHelper.getFixedStringSizeForType(argumentOI));
-        outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
-            argumentOI);
+        outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
         break;
       default:
         returnType = PrimitiveCategory.STRING;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java Mon Oct  7 06:39:52 2013
@@ -94,12 +94,10 @@ public class GenericUDFReflect2 extends 
 
     try {
       method = findMethod(targetClass, methodName.toString(), null, true);
-      // While getTypeFor() returns a TypeEntry, we won't actually be able to get any
-      // type parameter information from this since the TypeEntry is derived from a return type.
+      // Note: type param is not available here.
       PrimitiveTypeEntry typeEntry = getTypeFor(method.getReturnType());
       returnOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
-          PrimitiveObjectInspectorUtils.getTypeEntryFromTypeSpecs(
-              typeEntry.primitiveCategory, typeEntry.typeParams));
+          typeEntry.primitiveCategory);
       returnObj = (Writable) returnOI.getPrimitiveWritableClass().newInstance();
     } catch (Exception e) {
       throw new UDFArgumentException(e);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java Mon Oct  7 06:39:52 2013
@@ -27,12 +27,11 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.udf.SettableUDF;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.HiveVarcharConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveVarcharObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 
 @Description(name = "varchar",
 value = "CAST(<value> as VARCHAR(length)) - Converts the argument to a varchar value.",
@@ -41,15 +40,15 @@ extended = "Values will be truncated if 
 + "Example:\n "
 + "  > SELECT CAST(1234 AS varchar(10)) FROM src LIMIT 1;\n"
 + "  '1234'")
-public class GenericUDFToVarchar extends GenericUDF
-    implements SettableUDF, Serializable {
+public class GenericUDFToVarchar extends GenericUDF implements SettableUDF, Serializable {
   private static final Log LOG = LogFactory.getLog(GenericUDFToVarchar.class.getName());
+
   private transient PrimitiveObjectInspector argumentOI;
   private transient HiveVarcharConverter converter;
 
-  // The varchar type parameters need to be set prior to initialization,
+  // The varchar type info need to be set prior to initialization,
   // and must be preserved when the plan serialized to other processes.
-  private VarcharTypeParams typeParams;
+  private VarcharTypeInfo typeInfo;
 
   public GenericUDFToVarchar() {
   }
@@ -68,14 +67,8 @@ public class GenericUDFToVarchar extends
 
     // Check if this UDF has been provided with type params for the output varchar type
     SettableHiveVarcharObjectInspector outputOI;
-    if (typeParams != null) {
-      outputOI = (SettableHiveVarcharObjectInspector)
-          PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
-              PrimitiveObjectInspectorUtils.getTypeEntryFromTypeSpecs(
-                  PrimitiveCategory.VARCHAR, typeParams));
-    } else {
-      outputOI = PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector;
-    }
+    outputOI = (SettableHiveVarcharObjectInspector)
+          PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
 
     converter = new HiveVarcharConverter(argumentOI, outputOI);
     return outputOI;
@@ -98,11 +91,7 @@ public class GenericUDFToVarchar extends
     sb.append("CAST( ");
     sb.append(children[0]);
     sb.append(" AS VARCHAR(");
-    String paramsStr = "";
-    if (typeParams != null) {
-      paramsStr = typeParams.toString();
-    }
-    sb.append(paramsStr);
+    sb.append("" + typeInfo.getLength());
     sb.append(")");
     return sb.toString();
   }
@@ -112,21 +101,13 @@ public class GenericUDFToVarchar extends
   * This should be done before the UDF is initialized.
  */
   @Override
-  public void setParams(Object typeParams) throws UDFArgumentException {
-    if (converter != null) {
-      LOG.warn("Type converter already initialized, setting type params now will not be useful");
-    }
-    if (typeParams instanceof VarcharTypeParams) {
-      this.typeParams = (VarcharTypeParams)typeParams;
-    } else {
-      throw new UDFArgumentException(
-          "Was expecting VarcharTypeParams, instead got " + typeParams.getClass().getName());
-    }
+  public void setTypeInfo(TypeInfo typeInfo) throws UDFArgumentException {
+    this.typeInfo = (VarcharTypeInfo) typeInfo;
   }
 
   @Override
-  public Object getParams() {
-    return typeParams;
+  public TypeInfo getTypeInfo() {
+    return typeInfo;
   }
 
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUpper.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUpper.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUpper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUpper.java Mon Oct  7 06:39:52 2013
@@ -29,7 +29,8 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.StringConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 
 /**
  * UDFUpper.
@@ -65,11 +66,10 @@ public class GenericUDFUpper extends Gen
       case VARCHAR:
         // return type should have same length as the input.
         returnType = inputType;
-        VarcharTypeParams varcharParams = new VarcharTypeParams();
-        varcharParams.setLength(
+        VarcharTypeInfo varcharTypeInfo = TypeInfoFactory.getVarcharTypeInfo(
             GenericUDFUtils.StringHelper.getFixedStringSizeForType(argumentOI));
         outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
-            argumentOI);
+            varcharTypeInfo);
         break;
       default:
         returnType = PrimitiveCategory.STRING;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java Mon Oct  7 06:39:52 2013
@@ -32,18 +32,18 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.IdentityConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -52,7 +52,7 @@ import org.apache.hadoop.io.Text;
 public final class GenericUDFUtils {
   /**
    * Checks if b is the first byte of a UTF-8 character.
-   * 
+   *
    */
   public static boolean isUtfStartByte(byte b) {
     return (b & 0xC0) != 0x80;
@@ -60,15 +60,15 @@ public final class GenericUDFUtils {
 
   /**
    * This class helps to find the return ObjectInspector for a GenericUDF.
-   * 
+   *
    * In many cases like CASE and IF, the GenericUDF is returning a value out of
    * several possibilities. However these possibilities may not always have the
    * same ObjectInspector.
-   * 
+   *
    * This class will help detect whether all possibilities have exactly the same
    * ObjectInspector. If not, then we need to convert the Objects to the same
    * ObjectInspector.
-   * 
+   *
    * A special case is when some values are constant NULL. In this case we can
    * use the same ObjectInspector.
    */
@@ -92,7 +92,7 @@ public final class GenericUDFUtils {
     /**
      * Update returnObjectInspector and valueInspectorsAreTheSame based on the
      * ObjectInspector seen.
-     * 
+     *
      * @return false if there is a type mismatch
      */
     public boolean update(ObjectInspector oi) throws UDFArgumentTypeException {
@@ -403,12 +403,8 @@ public final class GenericUDFUtils {
       // TODO: we can support date, int, .. any types which would have a fixed length value
       switch (poi.getPrimitiveCategory()) {
         case VARCHAR:
-          VarcharTypeParams varcharParams = null;
-          varcharParams = (VarcharTypeParams) poi.getTypeParams();
-          if (varcharParams == null || varcharParams.length < 0) {
-            throw new UDFArgumentException("varchar type used without type params");
-          }
-          return varcharParams.length;
+          VarcharTypeInfo typeInfo = (VarcharTypeInfo) poi.getTypeInfo();
+          return typeInfo.getLength();
         default:
           throw new UDFArgumentException("No fixed size for type " + poi.getTypeName());
       }

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMacro.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMacro.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMacro.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMacro.java Mon Oct  7 06:39:52 2013
@@ -62,10 +62,10 @@ public class TestGenericUDFMacro {
     inspectors = new ObjectInspector[] {
         PrimitiveObjectInspectorFactory.
           getPrimitiveWritableConstantObjectInspector(
-            PrimitiveObjectInspector.PrimitiveCategory.INT, x),
+              TypeInfoFactory.intTypeInfo, x),
         PrimitiveObjectInspectorFactory.
           getPrimitiveWritableConstantObjectInspector(
-            PrimitiveObjectInspector.PrimitiveCategory.INT, y),
+              TypeInfoFactory.intTypeInfo, y),
     };
     arguments = new DeferredObject[] {
         new DeferredJavaObject(x),

Modified: hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_1.q.out?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_1.q.out Mon Oct  7 06:39:52 2013
@@ -2,4 +2,4 @@ PREHOOK: query: drop table if exists inv
 PREHOOK: type: DROPTABLE
 POSTHOOK: query: drop table if exists invalid_varchar_length_1
 POSTHOOK: type: DROPTABLE
-FAILED: SemanticException org.apache.hadoop.hive.serde2.SerDeException: Length 1000000 exceeds max varchar length of 65535
+FAILED: RuntimeException Varchar length 1000000 out of allowed range [1, 65535]

Modified: hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_2.q.out?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_2.q.out Mon Oct  7 06:39:52 2013
@@ -1 +1 @@
-FAILED: RuntimeException Error creating type parameters for varchar(100000): org.apache.hadoop.hive.serde2.SerDeException: Error creating type params for varchar: org.apache.hadoop.hive.serde2.SerDeException: Length 100000 exceeds max varchar length of 65535
+FAILED: RuntimeException Varchar length 100000 out of allowed range [1, 65535]

Modified: hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_3.q.out?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/invalid_varchar_length_3.q.out Mon Oct  7 06:39:52 2013
@@ -2,4 +2,4 @@ PREHOOK: query: drop table if exists inv
 PREHOOK: type: DROPTABLE
 POSTHOOK: query: drop table if exists invalid_varchar_length_3
 POSTHOOK: type: DROPTABLE
-FAILED: SemanticException org.apache.hadoop.hive.serde2.SerDeException: VARCHAR length must be positive
+FAILED: RuntimeException Varchar length 0 out of allowed range [1, 65535]

Modified: hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml (original)
+++ hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml Mon Oct  7 06:39:52 2013
@@ -843,7 +843,7 @@
                        <string>_col0</string> 
                       </void> 
                       <void property="type"> 
-                       <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
+                       <object class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
                         <void property="typeName"> 
                          <string>void</string> 
                         </void> 
@@ -885,18 +885,10 @@
                 <void property="childExprs"> 
                  <object class="java.util.ArrayList"> 
                   <void method="add"> 
-                   <object class="org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc"> 
-                    <void property="typeInfo"> 
-                     <object idref="PrimitiveTypeInfo1"/> 
-                    </void> 
-                   </object> 
+                   <object class="org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc"/> 
                   </void> 
                   <void method="add"> 
-                   <object class="org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc"> 
-                    <void property="typeInfo"> 
-                     <object idref="PrimitiveTypeInfo1"/> 
-                    </void> 
-                   </object> 
+                   <object class="org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc"/> 
                   </void> 
                  </object> 
                 </void> 

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java Mon Oct  7 06:39:52 2013
@@ -35,14 +35,12 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 
@@ -126,41 +124,16 @@ public class RegexSerDe extends Abstract
     List<ObjectInspector> columnOIs = new ArrayList<ObjectInspector>(columnNames.size());
     for (int c = 0; c < numColumns; c++) {
       TypeInfo typeInfo = columnTypes.get(c);
-      String typeName = typeInfo.getTypeName();
-      if (typeName.equals(serdeConstants.STRING_TYPE_NAME)) {
-        columnOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
-      } else if (typeName.equals(serdeConstants.TINYINT_TYPE_NAME)) {
-        columnOIs.add(PrimitiveObjectInspectorFactory.javaByteObjectInspector);
-      } else if (typeName.equals(serdeConstants.SMALLINT_TYPE_NAME)) {
-       columnOIs.add(PrimitiveObjectInspectorFactory.javaShortObjectInspector);
-      } else if (typeName.equals(serdeConstants.INT_TYPE_NAME)) {
-        columnOIs.add(PrimitiveObjectInspectorFactory.javaIntObjectInspector);
-      } else if (typeName.equals(serdeConstants.BIGINT_TYPE_NAME)) {
-       columnOIs.add(PrimitiveObjectInspectorFactory.javaLongObjectInspector);
-      } else if (typeName.equals(serdeConstants.FLOAT_TYPE_NAME)) {
-        columnOIs.add(PrimitiveObjectInspectorFactory.javaFloatObjectInspector);
-      } else if (typeName.equals(serdeConstants.DOUBLE_TYPE_NAME)) {
-       columnOIs.add(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector);
-      } else if (typeName.equals(serdeConstants.BOOLEAN_TYPE_NAME)) {
-        columnOIs.add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
-      } else if (typeName.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
-        columnOIs.add(PrimitiveObjectInspectorFactory.javaTimestampObjectInspector);
-      } else if (typeName.equals(serdeConstants.DATE_TYPE_NAME)) {
-        columnOIs.add(PrimitiveObjectInspectorFactory.javaDateObjectInspector);
-      } else if (typeName.equals(serdeConstants.DECIMAL_TYPE_NAME)) {
-        columnOIs.add(PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector);
-      }  else if (typeInfo instanceof PrimitiveTypeInfo
-          &&
-          ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory() == PrimitiveCategory.VARCHAR) {
-        VarcharTypeParams varcharParams = (VarcharTypeParams)
-            ParameterizedPrimitiveTypeUtils.getTypeParamsFromTypeInfo(typeInfo);
-        columnOIs.add(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(
-            (PrimitiveTypeInfo) typeInfo));
+      if (typeInfo instanceof PrimitiveTypeInfo) {
+        PrimitiveTypeInfo pti = (PrimitiveTypeInfo) columnTypes.get(c);
+        AbstractPrimitiveJavaObjectInspector oi = 
+            PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti);
+        columnOIs.add(oi);
       } else {
-         throw new SerDeException(getClass().getName()
-         + " doesn't allow column [" + c + "] named "
-         + columnNames.get(c) + " with type " + columnTypes.get(c));
-       }
+        throw new SerDeException(getClass().getName()
+            + " doesn't allow column [" + c + "] named "
+            + columnNames.get(c) + " with type " + columnTypes.get(c));
+      }
      }
 
     // StandardStruct uses ArrayList to store the row.
@@ -262,12 +235,8 @@ public class RegexSerDe extends Abstract
           HiveDecimal bd;
           bd = new HiveDecimal(t);
           row.set(c, bd);
-        } else if (typeInfo instanceof PrimitiveTypeInfo
-            &&
-            ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory() == PrimitiveCategory.VARCHAR) {
-          VarcharTypeParams varcharParams = (VarcharTypeParams)
-              ParameterizedPrimitiveTypeUtils.getTypeParamsFromTypeInfo(typeInfo);
-          HiveVarchar hv = new HiveVarchar(t, varcharParams != null ? varcharParams.length : -1);
+        } else if (typeInfo instanceof VarcharTypeInfo) {
+          HiveVarchar hv = new HiveVarchar(t, ((VarcharTypeInfo)typeInfo).getLength());
           row.set(c, hv);
         }
       } catch (RuntimeException e) {

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java Mon Oct  7 06:39:52 2013
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.avro;
 
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.avro.Schema;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -25,15 +28,11 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
 
-import java.util.ArrayList;
-import java.util.List;
-
 /**
  * An AvroObjectInspectorGenerator takes an Avro schema and creates the three
  * data structures Hive needs to work with Avro-encoded data:
@@ -56,9 +55,10 @@ class AvroObjectInspectorGenerator {
   }
 
   private void verifySchemaIsARecord(Schema schema) throws SerDeException {
-    if(!schema.getType().equals(Schema.Type.RECORD))
+    if(!schema.getType().equals(Schema.Type.RECORD)) {
       throw new AvroSerdeException("Schema for table must be of type RECORD. " +
           "Received type: " + schema.getType());
+    }
   }
 
   public List<String> getColumnNames() {
@@ -86,8 +86,9 @@ class AvroObjectInspectorGenerator {
   private ObjectInspector createObjectInspectorWorker(TypeInfo ti) throws SerDeException {
     // We don't need to do the check for U[T,Null] here because we'll give the real type
     // at deserialization and the object inspector will never see the actual union.
-    if(!supportedCategories(ti))
+    if(!supportedCategories(ti)) {
       throw new AvroSerdeException("Don't yet support this type: " + ti);
+    }
     ObjectInspector result;
     switch(ti.getCategory()) {
       case PRIMITIVE:

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java Mon Oct  7 06:39:52 2013
@@ -20,8 +20,6 @@ package org.apache.hadoop.hive.serde2.bi
 
 import java.io.IOException;
 import java.math.BigInteger;
-import java.nio.ByteBuffer;
-import java.nio.charset.CharacterCodingException;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -33,7 +31,6 @@ import java.util.Properties;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
@@ -42,8 +39,8 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -60,24 +57,22 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils.HiveVarcharSerDeHelper;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -531,11 +526,7 @@ public class BinarySortableSerDe extends
   }
 
   static int getVarcharMaxLength(TypeInfo type) {
-    VarcharTypeParams typeParams = (VarcharTypeParams) ((PrimitiveTypeInfo) type).getTypeParams();
-    if (typeParams != null ) {
-      return typeParams.length;
-    }
-    return -1;
+    return ((VarcharTypeInfo)type).getLength();
   }
 
   static Text deserializeText(InputByteBuffer buffer, boolean invert, Text r)

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java Mon Oct  7 06:39:52 2013
@@ -38,7 +38,6 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
 import org.apache.hadoop.hive.serde2.thrift.ConfigurableTProtocol;
 import org.apache.hadoop.hive.serde2.thrift.TReflectionUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -74,6 +73,7 @@ public class DynamicSerDe extends Abstra
 
   TIOStreamTransport tios;
 
+  @Override
   public void initialize(Configuration job, Properties tbl) throws SerDeException {
     try {
 
@@ -146,6 +146,7 @@ public class DynamicSerDe extends Abstra
 
   Object deserializeReuse = null;
 
+  @Override
   public Object deserialize(Writable field) throws SerDeException {
     try {
       if (field instanceof Text) {
@@ -177,7 +178,7 @@ public class DynamicSerDe extends Abstra
     } else if (bt.isPrimitive()) {
       PrimitiveTypeEntry pte = PrimitiveObjectInspectorUtils
           .getTypeEntryFromPrimitiveJavaClass(bt.getRealType());
-      return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pte);
+      return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pte.primitiveCategory);
     } else {
       // Must be a struct
       DynamicSerDeStructBase btStruct = (DynamicSerDeStructBase) bt;
@@ -196,16 +197,19 @@ public class DynamicSerDe extends Abstra
     }
   }
 
+  @Override
   public ObjectInspector getObjectInspector() throws SerDeException {
     return dynamicSerDeStructBaseToObjectInspector(bt);
   }
 
+  @Override
   public Class<? extends Writable> getSerializedClass() {
     return BytesWritable.class;
   }
 
   BytesWritable ret = new BytesWritable();
 
+  @Override
   public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
     try {
       bos_.reset();
@@ -220,6 +224,7 @@ public class DynamicSerDe extends Abstra
   }
 
 
+  @Override
   public SerDeStats getSerDeStats() {
     // no support for statistics
     return null;

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java Mon Oct  7 06:39:52 2013
@@ -54,7 +54,6 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -219,7 +218,6 @@ public final class LazyFactory {
     ObjectInspector.Category c = typeInfo.getCategory();
     switch (c) {
     case PRIMITIVE:
-      BaseTypeParams typeParams = ((PrimitiveTypeInfo)typeInfo).getTypeParams();
       return LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(
           (PrimitiveTypeInfo) typeInfo, escaped, escapeChar);
     case MAP:

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java Mon Oct  7 06:39:52 2013
@@ -23,7 +23,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveVarcharObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -39,11 +39,7 @@ public class LazyHiveVarchar extends
 
   public LazyHiveVarchar(LazyHiveVarcharObjectInspector oi) {
     super(oi);
-    VarcharTypeParams typeParams = (VarcharTypeParams)oi.getTypeParams();
-    if (typeParams == null) {
-      throw new RuntimeException("varchar type used without type params");
-    }
-    maxLength = typeParams.getLength();
+    maxLength = ((VarcharTypeInfo)oi.getTypeInfo()).getLength();
     data = new HiveVarcharWritable();
   }
 

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java Mon Oct  7 06:39:52 2013
@@ -37,6 +37,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
@@ -45,7 +46,6 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.BytesWritable;

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java Mon Oct  7 06:39:52 2013
@@ -19,7 +19,7 @@ package org.apache.hadoop.hive.serde2.la
 
 import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.io.Writable;
 
 /**
@@ -31,8 +31,8 @@ public abstract class AbstractPrimitiveL
   protected AbstractPrimitiveLazyObjectInspector() {
     super();
   }
-  protected AbstractPrimitiveLazyObjectInspector(PrimitiveTypeEntry typeEntry) {
-    super(typeEntry);
+  protected AbstractPrimitiveLazyObjectInspector(PrimitiveTypeInfo typeInfo) {
+    super(typeInfo);
   }
 
   @Override

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBinaryObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBinaryObjectInspector.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBinaryObjectInspector.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBinaryObjectInspector.java Mon Oct  7 06:39:52 2013
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.serde2.la
 import org.apache.hadoop.hive.serde2.lazy.LazyBinary;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.BytesWritable;
 
 public class LazyBinaryObjectInspector extends
@@ -29,7 +29,7 @@ public class LazyBinaryObjectInspector e
     BinaryObjectInspector {
 
   public LazyBinaryObjectInspector() {
-    super(PrimitiveObjectInspectorUtils.binaryTypeEntry);
+    super(TypeInfoFactory.binaryTypeInfo);
   }
 
   @Override

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java Mon Oct  7 06:39:52 2013
@@ -19,7 +19,7 @@ package org.apache.hadoop.hive.serde2.la
 
 import org.apache.hadoop.hive.serde2.lazy.LazyBoolean;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.BooleanWritable;
 
 /**
@@ -30,7 +30,7 @@ public class LazyBooleanObjectInspector 
     BooleanObjectInspector {
 
   LazyBooleanObjectInspector() {
-    super(PrimitiveObjectInspectorUtils.booleanTypeEntry);
+    super(TypeInfoFactory.booleanTypeInfo);
   }
 
   @Override

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyByteObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyByteObjectInspector.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyByteObjectInspector.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyByteObjectInspector.java Mon Oct  7 06:39:52 2013
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.serde2.la
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyByte;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 /**
  * A WritableByteObjectInspector inspects a ByteWritable Object.
@@ -30,7 +30,7 @@ public class LazyByteObjectInspector ext
     ByteObjectInspector {
 
   LazyByteObjectInspector() {
-    super(PrimitiveObjectInspectorUtils.byteTypeEntry);
+    super(TypeInfoFactory.byteTypeInfo);
   }
 
   @Override

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java Mon Oct  7 06:39:52 2013
@@ -22,7 +22,7 @@ import java.sql.Date;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyDate;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 /**
  * A WritableDateObjectInspector inspects a DateWritable Object.
@@ -32,7 +32,7 @@ public class LazyDateObjectInspector
     implements DateObjectInspector {
 
   protected LazyDateObjectInspector() {
-    super(PrimitiveObjectInspectorUtils.dateTypeEntry);
+    super(TypeInfoFactory.dateTypeInfo);
   }
 
   @Override

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java Mon Oct  7 06:39:52 2013
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.serde2.la
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyDouble;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 /**
  * A WritableDoubleObjectInspector inspects a DoubleWritable Object.
@@ -30,7 +30,7 @@ public class LazyDoubleObjectInspector e
     DoubleObjectInspector {
 
   LazyDoubleObjectInspector() {
-    super(PrimitiveObjectInspectorUtils.doubleTypeEntry);
+    super(TypeInfoFactory.doubleTypeInfo);
   }
 
   @Override

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyFloatObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyFloatObjectInspector.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyFloatObjectInspector.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyFloatObjectInspector.java Mon Oct  7 06:39:52 2013
@@ -19,7 +19,7 @@ package org.apache.hadoop.hive.serde2.la
 
 import org.apache.hadoop.hive.serde2.lazy.LazyFloat;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.FloatWritable;
 
 /**
@@ -30,7 +30,7 @@ public class LazyFloatObjectInspector ex
     FloatObjectInspector {
 
   LazyFloatObjectInspector() {
-    super(PrimitiveObjectInspectorUtils.floatTypeEntry);
+    super(TypeInfoFactory.floatTypeInfo);
   }
 
   @Override

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveDecimalObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveDecimalObjectInspector.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveDecimalObjectInspector.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveDecimalObjectInspector.java Mon Oct  7 06:39:52 2013
@@ -22,14 +22,14 @@ import org.apache.hadoop.hive.common.typ
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyHiveDecimal;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 public class LazyHiveDecimalObjectInspector
     extends AbstractPrimitiveLazyObjectInspector<HiveDecimalWritable>
     implements HiveDecimalObjectInspector {
 
   protected LazyHiveDecimalObjectInspector() {
-    super(PrimitiveObjectInspectorUtils.decimalTypeEntry);
+    super(TypeInfoFactory.decimalTypeInfo);
   }
 
   @Override

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java?rev=1529771&r1=1529770&r2=1529771&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java Mon Oct  7 06:39:52 2013
@@ -22,9 +22,8 @@ import org.apache.hadoop.hive.common.typ
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyHiveVarchar;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
-import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharUtils;
 
 public class LazyHiveVarcharObjectInspector
     extends AbstractPrimitiveLazyObjectInspector<HiveVarcharWritable>
@@ -34,12 +33,8 @@ public class LazyHiveVarcharObjectInspec
   public LazyHiveVarcharObjectInspector() {
   }
 
-  public LazyHiveVarcharObjectInspector(PrimitiveTypeEntry typeEntry) {
-    super(typeEntry);
-    if (typeEntry.primitiveCategory != PrimitiveCategory.VARCHAR) {
-      throw new RuntimeException(
-          "TypeEntry of type varchar expected, got " + typeEntry.primitiveCategory);
-    }
+  public LazyHiveVarcharObjectInspector(VarcharTypeInfo typeInfo) {
+    super(typeInfo);
   }
 
   @Override
@@ -60,10 +55,9 @@ public class LazyHiveVarcharObjectInspec
     }
 
     HiveVarchar ret = ((LazyHiveVarchar) o).getWritableObject().getHiveVarchar();
-    VarcharTypeParams typeParams = (VarcharTypeParams)getTypeParams();
-    if (!ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
-        ret, typeParams)) {
-      HiveVarchar newValue = new HiveVarchar(ret, typeParams.length);
+    if (!VarcharUtils.doesPrimitiveMatchTypeParams(
+        ret, (VarcharTypeInfo)typeInfo)) {
+      HiveVarchar newValue = new HiveVarchar(ret, ((VarcharTypeInfo)typeInfo).getLength());
       return newValue;
     }
     return ret;