You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by vi...@apache.org on 2018/03/12 03:52:26 UTC

[5/5] hive git commit: HIVE-17580 : Remove dependency of get_fields_with_environment_context API to serde (Vihang Karajgaonkar, reviewed by Alan Gates)

HIVE-17580 : Remove dependency of get_fields_with_environment_context API to serde (Vihang Karajgaonkar, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/40ee74eb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/40ee74eb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/40ee74eb

Branch: refs/heads/standalone-metastore
Commit: 40ee74ebc9a8b06bd8f3b2d7f625b4da23dbff87
Parents: c4d2285
Author: Vihang Karajgaonkar <vi...@cloudera.com>
Authored: Tue Feb 27 21:32:47 2018 -0800
Committer: Vihang Karajgaonkar <vi...@cloudera.com>
Committed: Sun Mar 11 20:49:37 2018 -0700

----------------------------------------------------------------------
 .../accumulo/serde/AccumuloRowSerializer.java   |   2 +-
 .../hive/contrib/serde2/TypedBytesSerDe.java    |   2 +-
 .../hadoop/hive/hbase/ColumnMappings.java       |  26 +-
 .../hadoop/hive/hbase/HBaseStorageHandler.java  |   2 +-
 .../hcatalog/data/schema/HCatSchemaUtils.java   |   4 +-
 .../hive/llap/io/api/impl/LlapRecordReader.java |   2 +-
 .../metastore/SerDeStorageSchemaReader.java     |   4 +
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |  26 +-
 .../hive/ql/exec/persistence/MapJoinKey.java    |   2 +-
 .../hive/ql/exec/vector/VectorAssignRow.java    |   7 +-
 .../ql/exec/vector/VectorDeserializeRow.java    |   2 +-
 .../hive/ql/exec/vector/VectorExtractRow.java   |   2 +-
 .../hive/ql/exec/vector/VectorSerializeRow.java |   2 +-
 .../ql/exec/vector/VectorizationContext.java    |  16 +-
 .../VectorMapJoinGenerateResultOperator.java    |   2 +-
 .../ql/exec/vector/udf/VectorUDFArgDesc.java    |   2 +-
 .../io/parquet/convert/HiveSchemaConverter.java |  10 +-
 .../io/parquet/convert/HiveStructConverter.java |   7 +-
 .../parquet/read/DataWritableReadSupport.java   |   2 +-
 .../serde/ArrayWritableObjectInspector.java     |   6 +-
 .../ql/io/parquet/serde/ParquetHiveSerDe.java   |   2 +-
 .../hive/ql/io/sarg/ConvertAstToSearchArg.java  |   2 +-
 .../hive/ql/optimizer/ColumnPrunerProcCtx.java  |   4 +-
 .../optimizer/ConstantPropagateProcFactory.java |   4 +-
 ...tedDynPartitionTimeGranularityOptimizer.java |   2 +-
 .../calcite/translator/ExprNodeConverter.java   |   2 +-
 .../calcite/translator/RexNodeConverter.java    |   2 +-
 .../ql/optimizer/pcr/PcrExprProcFactory.java    |   2 +-
 .../hive/ql/optimizer/physical/Vectorizer.java  |  14 +-
 .../ql/parse/ColumnStatsSemanticAnalyzer.java   |   2 +-
 .../apache/hadoop/hive/ql/parse/ParseUtils.java |   3 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |   4 +-
 .../hive/ql/parse/TypeCheckProcFactory.java     |   8 +-
 .../hive/ql/plan/ExprNodeConstantDesc.java      |   4 +-
 .../hive/ql/plan/VectorPartitionConversion.java |   4 +-
 .../apache/hadoop/hive/ql/stats/StatsUtils.java |   2 +-
 .../hive/ql/udf/generic/GenericUDAFAverage.java |   2 +-
 .../generic/GenericUDAFBinarySetFunctions.java  |   4 +-
 .../ql/udf/generic/GenericUDAFComputeStats.java |   2 +-
 .../udf/generic/GenericUDAFContextNGrams.java   |  10 +-
 .../ql/udf/generic/GenericUDAFCorrelation.java  |   4 +-
 .../ql/udf/generic/GenericUDAFCovariance.java   |   4 +-
 .../generic/GenericUDAFCovarianceSample.java    |   4 +-
 .../generic/GenericUDAFHistogramNumeric.java    |   4 +-
 .../hive/ql/udf/generic/GenericUDAFStd.java     |   2 +-
 .../ql/udf/generic/GenericUDAFStdSample.java    |   2 +-
 .../hive/ql/udf/generic/GenericUDAFSum.java     |   4 +-
 .../udf/generic/GenericUDAFSumEmptyIsZero.java  |   2 +-
 .../ql/udf/generic/GenericUDAFVariance.java     |   2 +-
 .../udf/generic/GenericUDAFVarianceSample.java  |   2 +-
 .../hive/ql/udf/generic/GenericUDAFnGrams.java  |   8 +-
 .../hive/ql/udf/generic/GenericUDFUtils.java    |   2 +-
 .../hive/ql/exec/vector/TestVectorSerDeRow.java |   2 +-
 .../ql/exec/vector/VectorRandomRowSource.java   |   4 +-
 .../mapjoin/fast/CheckFastRowHashMap.java       |   2 +-
 .../results/clientnegative/avro_decimal.q.out   |   2 +-
 serde/pom.xml                                   |   5 +
 .../apache/hadoop/hive/serde2/SerDeUtils.java   |   1 -
 .../avro/AvroObjectInspectorGenerator.java      |   9 +-
 .../hadoop/hive/serde2/avro/AvroSerializer.java |   6 +-
 .../hadoop/hive/serde2/avro/InstanceCache.java  |  72 ---
 .../serde2/avro/SchemaResolutionProblem.java    |  59 --
 .../hive/serde2/avro/SchemaToHiveTypeInfo.java  |  18 +
 .../hive/serde2/avro/SchemaToTypeInfo.java      | 283 ---------
 .../hive/serde2/avro/TypeInfoToSchema.java      | 283 ---------
 .../fast/BinarySortableDeserializeRead.java     |   2 +-
 .../hive/serde2/fast/DeserializeRead.java       |   2 +-
 .../hadoop/hive/serde2/lazy/LazyFactory.java    |   2 +-
 .../lazy/fast/LazySimpleDeserializeRead.java    |   2 +-
 .../fast/LazyBinaryDeserializeRead.java         |   2 +-
 .../serde2/objectinspector/ObjectInspector.java |  87 ++-
 .../hive/serde2/typeinfo/BaseCharTypeInfo.java  |   5 +
 .../hive/serde2/typeinfo/DecimalTypeInfo.java   |   5 +
 .../hive/serde2/typeinfo/ListTypeInfo.java      |  93 ---
 .../hive/serde2/typeinfo/MapTypeInfo.java       | 109 ----
 .../hive/serde2/typeinfo/PrimitiveTypeInfo.java |  48 +-
 .../hive/serde2/typeinfo/StructTypeInfo.java    | 151 -----
 .../typeinfo/TimestampLocalTZTypeInfo.java      |   5 +
 .../hadoop/hive/serde2/typeinfo/TypeInfo.java   |  85 ---
 .../hive/serde2/typeinfo/TypeInfoFactory.java   |  52 +-
 .../hive/serde2/typeinfo/TypeInfoUtils.java     | 344 +----------
 .../hive/serde2/typeinfo/UnionTypeInfo.java     | 108 ----
 .../hive/serde2/typeinfo/VarcharTypeInfo.java   |   1 -
 .../hive/serde2/SerdeRandomRowSource.java       |   4 +-
 .../avro/TestAvroObjectInspectorGenerator.java  |   7 +-
 .../hive/serde2/avro/TestInstanceCache.java     |  95 ---
 .../hive/serde2/avro/TestSchemaToTypeInfo.java  |   7 +-
 .../binarysortable/TestBinarySortableFast.java  |   2 +-
 .../hive/serde2/lazy/TestLazySimpleFast.java    |   4 +-
 .../serde2/lazybinary/TestLazyBinaryFast.java   |   2 +-
 .../TestStandardObjectInspectors.java           |   2 +-
 .../hive/metastore/AvroStorageSchemaReader.java |  48 ++
 .../hadoop/hive/metastore/ColumnType.java       |   5 +-
 .../metastore/DefaultStorageSchemaReader.java   | 108 +++-
 .../hive/metastore/StorageSchemaReader.java     |   2 +-
 .../utils/AvroFieldSchemaGenerator.java         |  97 +++
 .../hive/metastore/utils/AvroSchemaUtils.java   | 366 ++++++++++++
 .../metastore/utils/StorageSchemaUtils.java     |  37 ++
 .../hive/serde2/avro/AvroSerDeConstants.java    |  45 ++
 .../hadoop/hive/serde2/avro/InstanceCache.java  |  72 +++
 .../serde2/avro/SchemaResolutionProblem.java    |  62 ++
 .../serde2/avro/SchemaToMetastoreTypeInfo.java  |  31 +
 .../hive/serde2/avro/SchemaToTypeInfo.java      | 294 +++++++++
 .../hive/serde2/avro/TypeInfoToSchema.java      | 277 +++++++++
 .../hive/serde2/typeinfo/ITypeInfoFactory.java  |  71 +++
 .../hive/serde2/typeinfo/ListTypeInfo.java      |  94 +++
 .../hive/serde2/typeinfo/MapTypeInfo.java       | 110 ++++
 .../typeinfo/MetastorePrimitiveTypeInfo.java    |  91 +++
 .../serde2/typeinfo/MetastoreTypeCategory.java  |  37 ++
 .../typeinfo/MetastoreTypeInfoFactory.java      | 128 ++++
 .../serde2/typeinfo/MetastoreTypeInfoUtils.java |  59 ++
 .../hive/serde2/typeinfo/StructTypeInfo.java    | 150 +++++
 .../hadoop/hive/serde2/typeinfo/TypeInfo.java   |  82 +++
 .../hive/serde2/typeinfo/TypeInfoParser.java    | 343 +++++++++++
 .../hive/serde2/typeinfo/UnionTypeInfo.java     | 107 ++++
 .../reader/TestDefaultStorageSchemaReader.java  | 598 +++++++++++++++++++
 .../hive/serde2/avro/TestInstanceCache.java     |  99 +++
 117 files changed, 3747 insertions(+), 1885 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java
index 7ad6a45..9072c2e 100644
--- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java
+++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java
@@ -208,7 +208,7 @@ public class AccumuloRowSerializer {
     TypeInfo rowIdTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(rowIdMappingType);
 
     if (!rowIdFieldOI.getCategory().equals(ObjectInspector.Category.PRIMITIVE)
-        && rowIdTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE) {
+        && rowIdTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       // we always serialize the String type using the escaped algorithm for LazyString
       writeString(output, SerDeUtils.getJSONString(rowId, rowIdFieldOI),
           PrimitiveObjectInspectorFactory.javaStringObjectInspector);

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java
----------------------------------------------------------------------
diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java
index bb6d779..3d2774f 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java
+++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java
@@ -125,7 +125,7 @@ public class TypedBytesSerDe extends AbstractSerDe {
 
     // All columns have to be primitive.
     for (int c = 0; c < numColumns; c++) {
-      if (columnTypes.get(c).getCategory() != Category.PRIMITIVE) {
+      if (columnTypes.get(c).getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
         throw new SerDeException(getClass().getName()
             + " only accepts primitive columns, but column[" + c + "] named "
             + columnNames.get(c) + " has category "

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/hbase-handler/src/java/org/apache/hadoop/hive/hbase/ColumnMappings.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/ColumnMappings.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/ColumnMappings.java
index f1887b5..e4a4f92 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/ColumnMappings.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/ColumnMappings.java
@@ -121,9 +121,9 @@ public class ColumnMappings implements Iterable<ColumnMappings.ColumnMapping> {
       colMap.columnType = columnTypes.get(i);
       if (colMap.qualifierName == null && !colMap.hbaseRowKey && !colMap.hbaseTimestamp) {
         TypeInfo typeInfo = columnTypes.get(i);
-        if ((typeInfo.getCategory() != ObjectInspector.Category.MAP) ||
+        if ((typeInfo.getCategory() != ObjectInspector.Category.MAP.toMetastoreTypeCategory()) ||
             (((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getCategory()
-                != ObjectInspector.Category.PRIMITIVE)) {
+                != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory())) {
 
           throw new SerDeException(
               serdeName + ": hbase column family '" + colMap.familyName
@@ -192,24 +192,24 @@ public class ColumnMappings implements Iterable<ColumnMappings.ColumnMapping> {
       if (storageInfo == null) {
 
         // use the table default storage specification
-        if (colType.getCategory() == ObjectInspector.Category.PRIMITIVE) {
+        if (colType.getCategory() == ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
           if (!colType.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
             colMap.binaryStorage.add(tableBinaryStorage);
           } else {
             colMap.binaryStorage.add(false);
           }
-        } else if (colType.getCategory() == ObjectInspector.Category.MAP) {
+        } else if (colType.getCategory() == ObjectInspector.Category.MAP.toMetastoreTypeCategory()) {
           TypeInfo keyTypeInfo = ((MapTypeInfo) colType).getMapKeyTypeInfo();
           TypeInfo valueTypeInfo = ((MapTypeInfo) colType).getMapValueTypeInfo();
 
-          if (keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE &&
+          if (keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory() &&
               !keyTypeInfo.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
             colMap.binaryStorage.add(tableBinaryStorage);
           } else {
             colMap.binaryStorage.add(false);
           }
 
-          if (valueTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE &&
+          if (valueTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory() &&
               !valueTypeInfo.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
             colMap.binaryStorage.add(tableBinaryStorage);
           } else {
@@ -223,7 +223,7 @@ public class ColumnMappings implements Iterable<ColumnMappings.ColumnMapping> {
         // we have a storage specification for a primitive column type
         String storageOption = storageInfo[0];
 
-        if ((colType.getCategory() == ObjectInspector.Category.MAP) ||
+        if ((colType.getCategory() == ObjectInspector.Category.MAP.toMetastoreTypeCategory()) ||
             !(storageOption.equals("-") || "string".startsWith(storageOption) ||
                 "binary".startsWith(storageOption))) {
           throw new SerDeException("Error: A column storage specification is one of the following:"
@@ -232,7 +232,7 @@ public class ColumnMappings implements Iterable<ColumnMappings.ColumnMapping> {
               + colMap.columnName);
         }
 
-        if (colType.getCategory() == ObjectInspector.Category.PRIMITIVE &&
+        if (colType.getCategory() == ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory() &&
             !colType.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
 
           if ("-".equals(storageOption)) {
@@ -252,7 +252,7 @@ public class ColumnMappings implements Iterable<ColumnMappings.ColumnMapping> {
         String keyStorage = storageInfo[0];
         String valStorage = storageInfo[1];
 
-        if ((colType.getCategory() != ObjectInspector.Category.MAP) ||
+        if ((colType.getCategory() != ObjectInspector.Category.MAP.toMetastoreTypeCategory()) ||
             !(keyStorage.equals("-") || "string".startsWith(keyStorage) ||
                 "binary".startsWith(keyStorage)) ||
             !(valStorage.equals("-") || "string".startsWith(valStorage) ||
@@ -270,7 +270,7 @@ public class ColumnMappings implements Iterable<ColumnMappings.ColumnMapping> {
         TypeInfo keyTypeInfo = ((MapTypeInfo) colType).getMapKeyTypeInfo();
         TypeInfo valueTypeInfo = ((MapTypeInfo) colType).getMapValueTypeInfo();
 
-        if (keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE &&
+        if (keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory() &&
             !keyTypeInfo.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
 
           if (keyStorage.equals("-")) {
@@ -284,7 +284,7 @@ public class ColumnMappings implements Iterable<ColumnMappings.ColumnMapping> {
           colMap.binaryStorage.add(false);
         }
 
-        if (valueTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE &&
+        if (valueTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory() &&
             !valueTypeInfo.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
           if (valStorage.equals("-")) {
             colMap.binaryStorage.add(tableBinaryStorage);
@@ -405,11 +405,11 @@ public class ColumnMappings implements Iterable<ColumnMappings.ColumnMapping> {
     }
 
     public boolean isCategory(ObjectInspector.Category category) {
-      return columnType.getCategory() == category;
+      return columnType.getCategory() == category.toMetastoreTypeCategory();
     }
 
     public boolean isCategory(PrimitiveCategory category) {
-      return columnType.getCategory() == ObjectInspector.Category.PRIMITIVE &&
+      return columnType.getCategory() == ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory() &&
           ((PrimitiveTypeInfo)columnType).getPrimitiveCategory() == category;
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
index 8c9271c..94b4cdb 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
@@ -425,7 +425,7 @@ public class HBaseStorageHandler extends DefaultStorageHandler
         continue;
       }
       TypeInfo typeInfo = searchConditions.get(0).getColumnDesc().getTypeInfo();
-      if (typeInfo.getCategory() == Category.PRIMITIVE && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(
+      if (typeInfo.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory() && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(
               ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) == PrimitiveGrouping.NUMERIC_GROUP) {
         // If the predicate is on a numeric column, and it specifies an
         // open range e.g. key < 20 , we do not support conversion, as negative

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchemaUtils.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchemaUtils.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchemaUtils.java
index 999abcb..63bbada 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchemaUtils.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatSchemaUtils.java
@@ -116,7 +116,7 @@ public class HCatSchemaUtils {
   }
 
   private static HCatFieldSchema getHCatFieldSchema(String fieldName, TypeInfo fieldTypeInfo, String comment) throws HCatException {
-    Category typeCategory = fieldTypeInfo.getCategory();
+    Category typeCategory = Category.fromMetastoreTypeCategory(fieldTypeInfo.getCategory());
     HCatFieldSchema hCatFieldSchema;
     if (Category.PRIMITIVE == typeCategory) {
       hCatFieldSchema = new HCatFieldSchema(fieldName, (PrimitiveTypeInfo)fieldTypeInfo, comment);
@@ -157,7 +157,7 @@ public class HCatSchemaUtils {
   }
 
   public static HCatSchema getHCatSchema(TypeInfo typeInfo) throws HCatException {
-    Category typeCategory = typeInfo.getCategory();
+    Category typeCategory = Category.fromMetastoreTypeCategory(typeInfo.getCategory());
     HCatSchema hCatSchema;
     if (Category.PRIMITIVE == typeCategory) {
       hCatSchema = getStructSchemaBuilder().addField(new HCatFieldSchema(null, (PrimitiveTypeInfo)typeInfo, null)).build();

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java
index d252279..1c4c418 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapRecordReader.java
@@ -219,7 +219,7 @@ class LlapRecordReader
     double totalWeight = 0;
     for (TypeInfo ti : typeInfos) {
       int colWeight = 1;
-      if (ti.getCategory() != Category.PRIMITIVE) {
+      if (ti.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
         colWeight = COL_WEIGHT_COMPLEX;
       } else {
         PrimitiveTypeInfo pti = (PrimitiveTypeInfo)ti;

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java b/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java
index 59bcd5c..05e1078 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/SerDeStorageSchemaReader.java
@@ -27,6 +27,10 @@ import org.apache.hadoop.hive.metastore.utils.StringUtils;
 
 import java.util.List;
 
+/**
+ * In order to use this Storage schema reader you should add the hive-serde jar in the classpath
+ * of the metastore.
+ */
 public class SerDeStorageSchemaReader implements StorageSchemaReader {
   @Override
   public List<FieldSchema> readSchema(Table tbl, EnvironmentContext envContext, Configuration conf)

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index f7801bb..d24803c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -741,7 +741,8 @@ public final class FunctionRegistry {
     if (a.equals(b)) {
       return a;
     }
-    if (a.getCategory() != Category.PRIMITIVE || b.getCategory() != Category.PRIMITIVE) {
+    if (a.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()
+        || b.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
       return null;
     }
     PrimitiveCategory pcA = ((PrimitiveTypeInfo)a).getPrimitiveCategory();
@@ -794,7 +795,8 @@ public final class FunctionRegistry {
     if (a.equals(b)) {
       return a;
     }
-    if (a.getCategory() != Category.PRIMITIVE || b.getCategory() != Category.PRIMITIVE) {
+    if (a.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()
+        || b.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
       return null;
     }
     PrimitiveCategory pcA = ((PrimitiveTypeInfo)a).getPrimitiveCategory();
@@ -849,7 +851,8 @@ public final class FunctionRegistry {
   }
 
   public static PrimitiveCategory getPrimitiveCommonCategory(TypeInfo a, TypeInfo b) {
-    if (a.getCategory() != Category.PRIMITIVE || b.getCategory() != Category.PRIMITIVE) {
+    if (a.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()
+        || b.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
       return null;
     }
 
@@ -928,7 +931,8 @@ public final class FunctionRegistry {
       return getTypeInfoForPrimitiveCategory((PrimitiveTypeInfo)a, (PrimitiveTypeInfo)b, commonCat);
     }
     // It is not primitive; check if it is a struct and we can infer a common class
-    if (a.getCategory() == Category.STRUCT && b.getCategory() == Category.STRUCT) {
+    if (a.getCategory() == Category.STRUCT.toMetastoreTypeCategory()
+        && b.getCategory() == Category.STRUCT.toMetastoreTypeCategory()) {
       return getCommonClassForStruct((StructTypeInfo)a, (StructTypeInfo)b);
     }
     return null;
@@ -1109,8 +1113,8 @@ public final class FunctionRegistry {
       // passing null matches everything
       return 0;
     }
-    if (argumentPassed.getCategory().equals(Category.LIST)
-        && argumentAccepted.getCategory().equals(Category.LIST)) {
+    if (argumentPassed.getCategory().equals(Category.LIST.toMetastoreTypeCategory())
+        && argumentAccepted.getCategory().equals(Category.LIST.toMetastoreTypeCategory())) {
       // lists are compatible if and only-if the elements are compatible
       TypeInfo argumentPassedElement = ((ListTypeInfo) argumentPassed)
           .getListElementTypeInfo();
@@ -1118,8 +1122,8 @@ public final class FunctionRegistry {
           .getListElementTypeInfo();
       return matchCost(argumentPassedElement, argumentAcceptedElement, exact);
     }
-    if (argumentPassed.getCategory().equals(Category.MAP)
-        && argumentAccepted.getCategory().equals(Category.MAP)) {
+    if (argumentPassed.getCategory().equals(Category.MAP.toMetastoreTypeCategory())
+        && argumentAccepted.getCategory().equals(Category.MAP.toMetastoreTypeCategory())) {
       // lists are compatible if and only-if the elements are compatible
       TypeInfo argumentPassedKey = ((MapTypeInfo) argumentPassed)
           .getMapKeyTypeInfo();
@@ -1173,8 +1177,8 @@ public final class FunctionRegistry {
           // Check the affinity of the argument passed in with the accepted argument,
           // based on the PrimitiveGrouping
           TypeInfo passedType = argsPassedIter.next();
-          if (acceptedType.getCategory() == Category.PRIMITIVE
-              && passedType.getCategory() == Category.PRIMITIVE) {
+          if (acceptedType.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory()
+              && passedType.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory()) {
             PrimitiveGrouping acceptedPg = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(
                 ((PrimitiveTypeInfo) acceptedType).getPrimitiveCategory());
             PrimitiveGrouping passedPg = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(
@@ -1306,7 +1310,7 @@ public final class FunctionRegistry {
 
           boolean acceptedIsPrimitive = false;
           PrimitiveCategory acceptedPrimCat = PrimitiveCategory.UNKNOWN;
-          if (accepted.getCategory() == Category.PRIMITIVE) {
+          if (accepted.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory()) {
             acceptedIsPrimitive = true;
             acceptedPrimCat = ((PrimitiveTypeInfo) accepted).getPrimitiveCategory();
           }

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
index 6504a5f..8685b8f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
@@ -94,7 +94,7 @@ public abstract class MapJoinKey {
   }
 
   public static boolean isSupportedField(TypeInfo typeInfo) {
-    if (typeInfo.getCategory() != Category.PRIMITIVE) return false; // not supported
+    if (typeInfo.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) return false; // not supported
     PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
     PrimitiveCategory pc = primitiveTypeInfo.getPrimitiveCategory();
     if (!SUPPORTED_PRIMITIVES.contains(pc)) return false; // not supported

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
index e96619c..dd0a2aa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspecto
 import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.MetastoreTypeCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
 import org.slf4j.Logger;
@@ -141,7 +142,7 @@ public class VectorAssignRow {
     isConvert[logicalColumnIndex] = false;
     projectionColumnNums[logicalColumnIndex] = projectionColumnNum;
     targetTypeInfos[logicalColumnIndex] = typeInfo;
-    if (typeInfo.getCategory() == Category.PRIMITIVE) {
+    if (typeInfo.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory()) {
       final PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
       final PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory();
       switch (primitiveCategory) {
@@ -164,7 +165,7 @@ public class VectorAssignRow {
    */
   private void initConvertSourceEntry(int logicalColumnIndex, TypeInfo convertSourceTypeInfo) {
     isConvert[logicalColumnIndex] = true;
-    final Category convertSourceCategory = convertSourceTypeInfo.getCategory();
+    final Category convertSourceCategory = Category.fromMetastoreTypeCategory(convertSourceTypeInfo.getCategory());
     convertSourceOI[logicalColumnIndex] =
         TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(convertSourceTypeInfo);
 
@@ -654,7 +655,7 @@ public class VectorAssignRow {
       TypeInfo targetTypeInfo, ObjectInspector sourceObjectInspector,
       Writable convertTargetWritable, Object object) {
 
-    final Category targetCategory = targetTypeInfo.getCategory();
+    final Category targetCategory = Category.fromMetastoreTypeCategory(targetTypeInfo.getCategory());
     if (targetCategory == null) {
       /*
        * This is a column that we don't want (i.e. not included) -- we are done.

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
index 8ea625e..06d9913 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
@@ -265,7 +265,7 @@ public final class VectorDeserializeRow<T extends DeserializeRead> {
   }
 
   private Field allocateComplexField(TypeInfo sourceTypeInfo) {
-    final Category category = sourceTypeInfo.getCategory();
+    final Category category = Category.fromMetastoreTypeCategory(sourceTypeInfo.getCategory());
     switch (category) {
     case LIST:
       {

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
index 681d9ca..b296f63 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
@@ -201,7 +201,7 @@ public class VectorExtractRow {
       return null;
     }
 
-    final Category category = typeInfo.getCategory();
+    final Category category = Category.fromMetastoreTypeCategory(typeInfo.getCategory());
     switch (category) {
     case PRIMITIVE:
       {

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
index cb2efb7..22c8ad5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
@@ -106,7 +106,7 @@ public final class VectorSerializeRow<T extends SerializeWrite> {
 
   private Field createField(TypeInfo typeInfo) {
     final Field field = new Field();
-    final Category category = typeInfo.getCategory();
+    final Category category = Category.fromMetastoreTypeCategory(typeInfo.getCategory());
     field.category = category;
     field.typeInfo = typeInfo;
     if (category == Category.PRIMITIVE) {

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
index 8264e8a..b2e0ba6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
@@ -634,7 +634,7 @@ public class VectorizationContext {
       case FILTER:
         // Evaluate the column as a boolean, converting if necessary.
         TypeInfo typeInfo = exprDesc.getTypeInfo();
-        if (typeInfo.getCategory() == Category.PRIMITIVE &&
+        if (typeInfo.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory() &&
             ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory() == PrimitiveCategory.BOOLEAN) {
           expr = new SelectColumnIsTrue(columnNum);
         } else {
@@ -882,11 +882,11 @@ public class VectorizationContext {
       }
     } else if (genericUdf instanceof GenericUDFIn) {
       TypeInfo colTi = children.get(0).getTypeInfo();
-      if (colTi.getCategory() != Category.PRIMITIVE) {
+      if (colTi.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
         return colTi; // Handled later, only struct will be supported.
       }
       TypeInfo opTi = GenericUDFUtils.deriveInType(children);
-      if (opTi == null || opTi.getCategory() != Category.PRIMITIVE) {
+      if (opTi == null || opTi.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
         throw new HiveException("Cannot vectorize IN() - common type is " + opTi);
       }
       if (((PrimitiveTypeInfo)colTi).getPrimitiveCategory() !=
@@ -2162,7 +2162,7 @@ public class VectorizationContext {
     for (int f = 0; f < fieldCount; f++) {
       TypeInfo fieldTypeInfo = fieldTypeInfos.get(f);
       // Only primitive fields supports for now.
-      if (fieldTypeInfo.getCategory() != Category.PRIMITIVE) {
+      if (fieldTypeInfo.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
         return null;
       }
 
@@ -2293,7 +2293,7 @@ public class VectorizationContext {
     String colType = colExpr.getTypeString();
     colType = VectorizationContext.mapTypeNameSynonyms(colType);
     TypeInfo colTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(colType);
-    Category category = colTypeInfo.getCategory();
+    Category category = Category.fromMetastoreTypeCategory(colTypeInfo.getCategory());
     if (category == Category.STRUCT) {
       return getStructInExpression(childExpr, colExpr, colTypeInfo, inChildren, mode, returnType);
     } else if (category != Category.PRIMITIVE) {
@@ -3010,7 +3010,7 @@ public class VectorizationContext {
 
       // Is output type a BOOLEAN?
       TypeInfo resultTypeInfo = expr.getTypeInfo();
-      if (resultTypeInfo.getCategory() == Category.PRIMITIVE &&
+      if (resultTypeInfo.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory() &&
           ((PrimitiveTypeInfo) resultTypeInfo).getPrimitiveCategory() == PrimitiveCategory.BOOLEAN) {
         isFilter = true;
       } else {
@@ -3285,13 +3285,13 @@ public class VectorizationContext {
   static String getScratchName(TypeInfo typeInfo) throws HiveException {
     // For now, leave DECIMAL precision/scale in the name so DecimalColumnVector scratch columns
     // don't need their precision/scale adjusted...
-    if (typeInfo.getCategory() == Category.PRIMITIVE &&
+    if (typeInfo.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory() &&
         ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory() == PrimitiveCategory.DECIMAL) {
       return typeInfo.getTypeName();
     }
 
     // And, for Complex Types, also leave the children types in place...
-    if (typeInfo.getCategory() != Category.PRIMITIVE) {
+    if (typeInfo.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
       return typeInfo.getTypeName();
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java
index 92ec1ee..fbba720 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java
@@ -447,7 +447,7 @@ public abstract class VectorMapJoinGenerateResultOperator extends VectorMapJoinC
     for (int i = 0; i < projectionSize; i++) {
       int projectedColumn = projectedColumns.get(i);
       if (batch.cols[projectedColumn] != null &&
-          inputObjInspectorsTypeInfos[i].getCategory() == Category.PRIMITIVE) {
+          inputObjInspectorsTypeInfos[i].getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory()) {
         // Only columns present in the batch and non-complex types.
         typeInfoList.add(inputObjInspectorsTypeInfos[i]);
         noNullsProjectionList.add(projectedColumn);

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java
index 69a2bef..8ea5f27 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java
@@ -54,7 +54,7 @@ public class VectorUDFArgDesc implements Serializable {
   public void setConstant(ExprNodeConstantDesc expr) {
     isConstant = true;
     if (expr != null) {
-      if (expr.getTypeInfo().getCategory() == Category.PRIMITIVE) {
+      if (expr.getTypeInfo().getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory()) {
         PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) expr.getTypeInfo())
             .getPrimitiveCategory();
         if (primitiveCategory == PrimitiveCategory.VOID) {

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java
index 302321c..7111534 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveSchemaConverter.java
@@ -58,7 +58,7 @@ public class HiveSchemaConverter {
 
   private static Type convertType(final String name, final TypeInfo typeInfo,
                                   final Repetition repetition) {
-    if (typeInfo.getCategory().equals(Category.PRIMITIVE)) {
+    if (typeInfo.getCategory().equals(Category.PRIMITIVE.toMetastoreTypeCategory())) {
       if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
         return Types.primitive(PrimitiveTypeName.BINARY, repetition).as(OriginalType.UTF8)
           .named(name);
@@ -107,13 +107,13 @@ public class HiveSchemaConverter {
       } else {
         throw new IllegalArgumentException("Unknown type: " + typeInfo);
       }
-    } else if (typeInfo.getCategory().equals(Category.LIST)) {
+    } else if (typeInfo.getCategory().equals(Category.LIST.toMetastoreTypeCategory())) {
       return convertArrayType(name, (ListTypeInfo) typeInfo);
-    } else if (typeInfo.getCategory().equals(Category.STRUCT)) {
+    } else if (typeInfo.getCategory().equals(Category.STRUCT.toMetastoreTypeCategory())) {
       return convertStructType(name, (StructTypeInfo) typeInfo);
-    } else if (typeInfo.getCategory().equals(Category.MAP)) {
+    } else if (typeInfo.getCategory().equals(Category.MAP.toMetastoreTypeCategory())) {
       return convertMapType(name, (MapTypeInfo) typeInfo);
-    } else if (typeInfo.getCategory().equals(Category.UNION)) {
+    } else if (typeInfo.getCategory().equals(Category.UNION.toMetastoreTypeCategory())) {
       throw new UnsupportedOperationException("Union type not implemented");
     } else {
       throw new IllegalArgumentException("Unknown type: " + typeInfo);

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java
index bdffdf4..beb75be 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java
@@ -77,7 +77,8 @@ public class HiveStructConverter extends HiveGroupConverter {
     converters = new Converter[selectedFieldCount];
     this.repeatedConverters = new ArrayList<Repeated>();
 
-    if (hiveTypeInfo != null && hiveTypeInfo.getCategory().equals(ObjectInspector.Category.STRUCT)) {
+    if (hiveTypeInfo != null && hiveTypeInfo.getCategory()
+        .equals(ObjectInspector.Category.STRUCT.toMetastoreTypeCategory())) {
       this.hiveFieldNames = ((StructTypeInfo) hiveTypeInfo).getAllStructFieldNames();
       this.hiveFieldTypeInfos = ((StructTypeInfo) hiveTypeInfo).getAllStructFieldTypeInfos();
     }
@@ -126,9 +127,9 @@ public class HiveStructConverter extends HiveGroupConverter {
   private TypeInfo getFieldTypeIgnoreCase(TypeInfo hiveTypeInfo, String fieldName, int fieldIndex) {
     if (hiveTypeInfo == null) {
       return null;
-    } else if (hiveTypeInfo.getCategory().equals(ObjectInspector.Category.STRUCT)) {
+    } else if (hiveTypeInfo.getCategory().equals(ObjectInspector.Category.STRUCT.toMetastoreTypeCategory())) {
       return getStructFieldTypeInfo(fieldName, fieldIndex);
-    } else if (hiveTypeInfo.getCategory().equals(ObjectInspector.Category.MAP)) {
+    } else if (hiveTypeInfo.getCategory().equals(ObjectInspector.Category.MAP.toMetastoreTypeCategory())) {
       //This cover the case where hive table may have map<key, value> but the data file is
       // of type array<struct<value1, value2>>
       //Using index in place of type name.

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
index 7f2a684..f69559c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
@@ -146,7 +146,7 @@ public class DataWritableReadSupport extends ReadSupport<ArrayWritable> {
           .named(fieldType.getName());
       case LIST:
         TypeInfo elemType = ((ListTypeInfo) colType).getListElementTypeInfo();
-        if (elemType.getCategory() == ObjectInspector.Category.STRUCT) {
+        if (elemType.getCategory() == ObjectInspector.Category.STRUCT.toMetastoreTypeCategory()) {
           Type subFieldType = fieldType.asGroupType().getType(0);
           if (!subFieldType.isPrimitive()) {
             String subFieldName = subFieldType.getName();

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java
index d83376d..9f915a2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ArrayWritableObjectInspector.java
@@ -107,12 +107,12 @@ public class ArrayWritableObjectInspector extends SettableStructObjectInspector
       return ParquetPrimitiveInspectorFactory.parquetStringInspector;
     }  else if (typeInfo instanceof DecimalTypeInfo) {
       return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector((DecimalTypeInfo) typeInfo);
-    } else if (typeInfo.getCategory().equals(Category.STRUCT)) {
+    } else if (typeInfo.getCategory().equals(Category.STRUCT.toMetastoreTypeCategory())) {
       return new ArrayWritableObjectInspector(false, (StructTypeInfo) typeInfo, (StructTypeInfo) prunedTypeInfo);
-    } else if (typeInfo.getCategory().equals(Category.LIST)) {
+    } else if (typeInfo.getCategory().equals(Category.LIST.toMetastoreTypeCategory())) {
       final TypeInfo subTypeInfo = ((ListTypeInfo) typeInfo).getListElementTypeInfo();
       return new ParquetHiveArrayInspector(getObjectInspector(subTypeInfo, null));
-    } else if (typeInfo.getCategory().equals(Category.MAP)) {
+    } else if (typeInfo.getCategory().equals(Category.MAP.toMetastoreTypeCategory())) {
       final TypeInfo keyTypeInfo = ((MapTypeInfo) typeInfo).getMapKeyTypeInfo();
       final TypeInfo valueTypeInfo = ((MapTypeInfo) typeInfo).getMapValueTypeInfo();
       if (keyTypeInfo.equals(TypeInfoFactory.stringTypeInfo) || keyTypeInfo.equals(TypeInfoFactory.byteTypeInfo)

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
index e0018a5..86de2b8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
@@ -254,7 +254,7 @@ public class ParquetHiveSerDe extends AbstractSerDe {
       this.selected = new boolean[typeInfo.getAllStructFieldTypeInfos().size()];
       for (int i = 0; i < typeInfo.getAllStructFieldTypeInfos().size(); ++i) {
         TypeInfo ti = typeInfo.getAllStructFieldTypeInfos().get(i);
-        if (ti.getCategory() == Category.STRUCT) {
+        if (ti.getCategory() == Category.STRUCT.toMetastoreTypeCategory()) {
           this.children.put(typeInfo.getAllStructFieldNames().get(i).toLowerCase(),
               new PrunedStructTypeInfo((StructTypeInfo) ti));
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
index 27fe828..0164085 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
@@ -112,7 +112,7 @@ public class ConvertAstToSearchArg {
    */
   private static BoxType getType(ExprNodeDesc expr) {
     TypeInfo type = expr.getTypeInfo();
-    if (type.getCategory() == ObjectInspector.Category.PRIMITIVE) {
+    if (type.getCategory() == ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       switch (((PrimitiveTypeInfo) type).getPrimitiveCategory()) {
         case BYTE:
         case SHORT:

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java
index c2a2fb1..7eae2a7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java
@@ -265,8 +265,8 @@ public class ColumnPrunerProcCtx implements NodeProcessorCtx {
     // Check cases for arr[i].f and map[key].v
     // For these we should not generate paths like arr.f or map.v
     // Otherwise we would have a mismatch between type info and path
-    if (ti.getCategory() != ObjectInspector.Category.LIST
-        && ti.getCategory() != ObjectInspector.Category.MAP) {
+    if (ti.getCategory() != ObjectInspector.Category.LIST.toMetastoreTypeCategory()
+        && ti.getCategory() != ObjectInspector.Category.MAP.toMetastoreTypeCategory()) {
       fn.addFieldNodes(pathToRoot);
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
index 54d52f8..40a3c39 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
@@ -912,7 +912,7 @@ public final class ConstantPropagateProcFactory {
             return null;
           }
         }
-        if (constant.getTypeInfo().getCategory() != Category.PRIMITIVE) {
+        if (constant.getTypeInfo().getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
           // nested complex types cannot be folded cleanly
           return null;
         }
@@ -932,7 +932,7 @@ public final class ConstantPropagateProcFactory {
           return null;
         }
         ExprNodeConstantDesc constant = (ExprNodeConstantDesc) evaluatedFn;
-        if (constant.getTypeInfo().getCategory() != Category.PRIMITIVE) {
+        if (constant.getTypeInfo().getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
           // nested complex types cannot be folded cleanly
           return null;
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionTimeGranularityOptimizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionTimeGranularityOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionTimeGranularityOptimizer.java
index e3dee93..ab5edde 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionTimeGranularityOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionTimeGranularityOptimizer.java
@@ -229,7 +229,7 @@ public class SortedDynPartitionTimeGranularityOptimizer extends Transform {
         ExprNodeColumnDesc columnDesc = new ExprNodeColumnDesc(ci);
         descs.add(columnDesc);
         colNames.add(columnDesc.getExprString());
-        if (columnDesc.getTypeInfo().getCategory() == ObjectInspector.Category.PRIMITIVE
+        if (columnDesc.getTypeInfo().getCategory() == ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()
                 && ((PrimitiveTypeInfo) columnDesc.getTypeInfo()).getPrimitiveCategory() == PrimitiveCategory.TIMESTAMPLOCALTZ) {
           if (timestampPos != -1) {
             throw new SemanticException("Multiple columns with timestamp with local time-zone type on query result; "

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
index 12af94e..872fc41 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
@@ -151,7 +151,7 @@ public class ExprNodeConverter extends RexVisitorImpl<ExprNodeDesc> {
     String child = fieldAccess.getField().getName();
     TypeInfo parentType = parent.getTypeInfo();
     // Allow accessing a field of list element structs directly from a list
-    boolean isList = (parentType.getCategory() == ObjectInspector.Category.LIST);
+    boolean isList = (parentType.getCategory() == ObjectInspector.Category.LIST.toMetastoreTypeCategory());
     if (isList) {
       parentType = ((ListTypeInfo) parentType).getListElementTypeInfo();
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
index 39ff591..e04b357 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
@@ -248,7 +248,7 @@ public class RexNodeConverter {
     GenericUDF tgtUdf = func.getGenericUDF();
 
     boolean isNumeric = (tgtUdf instanceof GenericUDFBaseBinary
-        && func.getTypeInfo().getCategory() == Category.PRIMITIVE
+        && func.getTypeInfo().getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory()
         && (PrimitiveGrouping.NUMERIC_GROUP == PrimitiveObjectInspectorUtils.getPrimitiveGrouping(
             ((PrimitiveTypeInfo) func.getTypeInfo()).getPrimitiveCategory())));
     boolean isCompare = !isNumeric && tgtUdf instanceof GenericUDFBaseCompare;

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
index ea042bf..7c8d40d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
@@ -306,7 +306,7 @@ public final class PcrExprProcFactory {
         return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, nodeOutputs));
       }
 
-      if (has_part_col && fd.getTypeInfo().getCategory() == Category.PRIMITIVE) {
+      if (has_part_col && fd.getTypeInfo().getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory()) {
         //  we need to evaluate result for every pruned partition
         if (fd.getTypeInfo().equals(TypeInfoFactory.booleanTypeInfo)) {
           // if the return type of the GenericUDF is boolean and all partitions agree on

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
index 190771e..c72b2ac 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
@@ -2557,7 +2557,7 @@ public class Vectorizer implements PhysicalPlanResolver {
 
           // COUNT, DENSE_RANK, and RANK do not care about column types.  The rest do.
           TypeInfo typeInfo = exprNodeDesc.getTypeInfo();
-          Category category = typeInfo.getCategory();
+          Category category = Category.fromMetastoreTypeCategory(typeInfo.getCategory());
           boolean isSupportedType;
           if (category != Category.PRIMITIVE) {
             isSupportedType = false;
@@ -2664,7 +2664,7 @@ public class Vectorizer implements PhysicalPlanResolver {
     }
     if (desc.getChildren() != null) {
       if (isInExpression
-          && desc.getChildren().get(0).getTypeInfo().getCategory() == Category.STRUCT) {
+          && desc.getChildren().get(0).getTypeInfo().getCategory() == Category.STRUCT.toMetastoreTypeCategory()) {
         // Don't restrict child expressions for projection.
         // Always use loose FILTER mode.
         if (!validateStructInExpression(desc, expressionTitle, VectorExpressionDescriptor.Mode.FILTER)) {
@@ -2688,7 +2688,7 @@ public class Vectorizer implements PhysicalPlanResolver {
       String expressionTitle, VectorExpressionDescriptor.Mode mode) {
     for (ExprNodeDesc d : desc.getChildren()) {
       TypeInfo typeInfo = d.getTypeInfo();
-      if (typeInfo.getCategory() != Category.STRUCT) {
+      if (typeInfo.getCategory() != Category.STRUCT.toMetastoreTypeCategory()) {
         return false;
       }
       StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
@@ -2699,7 +2699,7 @@ public class Vectorizer implements PhysicalPlanResolver {
       final int fieldCount = fieldTypeInfos.size();
       for (int f = 0; f < fieldCount; f++) {
         TypeInfo fieldTypeInfo = fieldTypeInfos.get(f);
-        Category category = fieldTypeInfo.getCategory();
+        Category category = Category.fromMetastoreTypeCategory(fieldTypeInfo.getCategory());
         if (category != Category.PRIMITIVE) {
           setExpressionIssue(expressionTitle,
               "Cannot vectorize struct field " + fieldNames.get(f)
@@ -2788,7 +2788,7 @@ public class Vectorizer implements PhysicalPlanResolver {
 
     if (!result) {
       TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(type);
-      if (typeInfo.getCategory() != Category.PRIMITIVE) {
+      if (typeInfo.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
         if (allowComplex) {
           return true;
         }
@@ -2808,7 +2808,7 @@ public class Vectorizer implements PhysicalPlanResolver {
 
     if (!result) {
       TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(type);
-      if (typeInfo.getCategory() != Category.PRIMITIVE) {
+      if (typeInfo.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
         if (allowComplex && isVectorizationComplexTypesEnabled) {
           return null;
         } else if (!allowComplex) {
@@ -3131,7 +3131,7 @@ public class Vectorizer implements PhysicalPlanResolver {
       // same check used in HashTableLoader.
       if (!MapJoinKey.isSupportedField(typeInfo)) {
         supportsKeyTypes = false;
-        Category category = typeInfo.getCategory();
+        Category category = Category.fromMetastoreTypeCategory(typeInfo.getCategory());
         notSupportedKeyTypes.add(
             (category != Category.PRIMITIVE ? category.toString() :
               ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory().toString()));

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
index c97e2a9..2dfcc16 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
@@ -228,7 +228,7 @@ public class ColumnStatsSemanticAnalyzer extends SemanticAnalyzer {
         if (colName.equalsIgnoreCase(col.getName())) {
           String type = col.getType();
           TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(type);
-          if (typeInfo.getCategory() != ObjectInspector.Category.PRIMITIVE) {
+          if (typeInfo.getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
             logTypeWarning(colName, type);
             colNames.remove(colName);
           } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
index 89e8412..e1710fd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hive.ql.parse.CalcitePlanner.ASTSearcher;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.MetastoreTypeInfoUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
@@ -149,7 +150,7 @@ public final class ParseUtils {
   public static ExprNodeDesc createConversionCast(ExprNodeDesc column, PrimitiveTypeInfo tableFieldTypeInfo)
       throws SemanticException {
     // Get base type, since type string may be parameterized
-    String baseType = TypeInfoUtils.getBaseName(tableFieldTypeInfo.getTypeName());
+    String baseType = MetastoreTypeInfoUtils.getBaseName(tableFieldTypeInfo.getTypeName());
 
     // If the type cast UDF is for a parameterized type, then it should implement
     // the SettableUDF interface so that we can pass in the params.

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 5c96653..ee860ab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -7585,7 +7585,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
         if (!tableFieldTypeInfo.equals(rowFieldTypeInfo)) {
           // need to do some conversions here
           converted = true;
-          if (tableFieldTypeInfo.getCategory() != Category.PRIMITIVE) {
+          if (tableFieldTypeInfo.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
             // cannot convert to complex types
             column = null;
           } else {
@@ -7850,7 +7850,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
 
       if (convert && !tableFieldTypeInfo.equals(rowFieldTypeInfo)) {
         // need to do some conversions here
-        if (tableFieldTypeInfo.getCategory() != Category.PRIMITIVE) {
+        if (tableFieldTypeInfo.getCategory() != Category.PRIMITIVE.toMetastoreTypeCategory()) {
           // cannot convert to complex types
           column = null;
         } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index 14217e3..b20028f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -935,11 +935,11 @@ public class TypeCheckProcFactory {
         TypeInfo objectTypeInfo = object.getTypeInfo();
 
         // Allow accessing a field of list element structs directly from a list
-        boolean isList = (object.getTypeInfo().getCategory() == ObjectInspector.Category.LIST);
+        boolean isList = (object.getTypeInfo().getCategory() == ObjectInspector.Category.LIST.toMetastoreTypeCategory());
         if (isList) {
           objectTypeInfo = ((ListTypeInfo) objectTypeInfo).getListElementTypeInfo();
         }
-        if (objectTypeInfo.getCategory() != Category.STRUCT) {
+        if (objectTypeInfo.getCategory() != Category.STRUCT.toMetastoreTypeCategory()) {
           throw new SemanticException(ErrorMsg.INVALID_DOT.getMsg(expr));
         }
         TypeInfo t = ((StructTypeInfo) objectTypeInfo).getStructFieldTypeInfo(fieldNameString);
@@ -958,7 +958,7 @@ public class TypeCheckProcFactory {
         // Check whether this is a list or a map
         TypeInfo myt = children.get(0).getTypeInfo();
 
-        if (myt.getCategory() == Category.LIST) {
+        if (myt.getCategory() == Category.LIST.toMetastoreTypeCategory()) {
           // Only allow integer index for now
           if (!TypeInfoUtils.implicitConvertible(children.get(1).getTypeInfo(),
               TypeInfoFactory.intTypeInfo)) {
@@ -969,7 +969,7 @@ public class TypeCheckProcFactory {
           // Calculate TypeInfo
           TypeInfo t = ((ListTypeInfo) myt).getListElementTypeInfo();
           desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry.getGenericUDFForIndex(), children);
-        } else if (myt.getCategory() == Category.MAP) {
+        } else if (myt.getCategory() == Category.MAP.toMetastoreTypeCategory()) {
           if (!TypeInfoUtils.implicitConvertible(children.get(1).getTypeInfo(),
               ((MapTypeInfo) myt).getMapKeyTypeInfo())) {
             throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_TYPE

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
index 73f449f..6d02542 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
@@ -122,9 +122,9 @@ public class ExprNodeConstantDesc extends ExprNodeDesc implements Serializable {
 
   @Override
   public String getExprString() {
-    if (typeInfo.getCategory() == Category.PRIMITIVE) {
+    if (typeInfo.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory()) {
       return getFormatted(typeInfo, value);
-    } else if (typeInfo.getCategory() == Category.STRUCT) {
+    } else if (typeInfo.getCategory() == Category.STRUCT.toMetastoreTypeCategory()) {
       StringBuilder sb = new StringBuilder();
       sb.append("const struct(");
       List<?> items = (List<?>) getWritableObjectInspector().getWritableConstantValue();

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorPartitionConversion.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorPartitionConversion.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorPartitionConversion.java
index 0cfa646..3a4efd9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorPartitionConversion.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorPartitionConversion.java
@@ -82,8 +82,8 @@ public class VectorPartitionConversion  {
   public static boolean isImplicitVectorColumnConversion(TypeInfo fromTypeInfo,
       TypeInfo toTypeInfo) {
 
-    if (fromTypeInfo.getCategory() == Category.PRIMITIVE &&
-        toTypeInfo.getCategory() == Category.PRIMITIVE) {
+    if (fromTypeInfo.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory() &&
+        toTypeInfo.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory()) {
 
       PrimitiveCategory fromPrimitiveCategory =
           ((PrimitiveTypeInfo) fromTypeInfo).getPrimitiveCategory();

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
index cef87f5..69c3371 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
@@ -1952,7 +1952,7 @@ public class StatsUtils {
   public static boolean hasDiscreteRange(ColStatistics colStat) {
     if (colStat.getRange() != null) {
       TypeInfo colType = TypeInfoUtils.getTypeInfoFromTypeString(colStat.getColumnType());
-      if (colType.getCategory() == Category.PRIMITIVE) {
+      if (colType.getCategory() == Category.PRIMITIVE.toMetastoreTypeCategory()) {
         PrimitiveTypeInfo pti = (PrimitiveTypeInfo) colType;
         switch (pti.getPrimitiveCategory()) {
           case BOOLEAN:

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
index d170d86..7950840 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
@@ -76,7 +76,7 @@ public class GenericUDAFAverage extends AbstractGenericUDAFResolver {
           "Exactly one argument is expected.");
     }
 
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
               + parameters[0].getTypeName() + " is passed.");

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBinarySetFunctions.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBinarySetFunctions.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBinarySetFunctions.java
index 397ec92..063a5c0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBinarySetFunctions.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBinarySetFunctions.java
@@ -412,12 +412,12 @@ public class GenericUDAFBinarySetFunctions extends AbstractGenericUDAFResolver {
           "Exactly two arguments are expected.");
     }
 
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
           + parameters[0].getTypeName() + " is passed.");
     }
 
-    if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but "
           + parameters[1].getTypeName() + " is passed.");
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
index 2267589..59ef5d1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
@@ -71,7 +71,7 @@ public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver {
           "Exactly 2 (col + hll) or 3 (col + fm + #bitvectors) arguments are expected.");
     }
 
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
           + parameters[0].getTypeName() + " is passed.");

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java
index 2e7a559..9ac3ea3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java
@@ -69,7 +69,7 @@ public class GenericUDAFContextNGrams implements GenericUDAFResolver {
     // Validate the first parameter, which is the expression to compute over. This should be an
     // array of strings type, or an array of arrays of strings.
     PrimitiveTypeInfo pti;
-    if (parameters[0].getCategory() != ObjectInspector.Category.LIST) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.LIST.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only list type arguments are accepted but "
           + parameters[0].getTypeName() + " was passed as parameter 1.");
@@ -100,9 +100,9 @@ public class GenericUDAFContextNGrams implements GenericUDAFResolver {
     }
 
     // Validate the second parameter, which should be an array of strings
-    if(parameters[1].getCategory() != ObjectInspector.Category.LIST ||
+    if(parameters[1].getCategory() != ObjectInspector.Category.LIST.toMetastoreTypeCategory() ||
        ((ListTypeInfo) parameters[1]).getListElementTypeInfo().getCategory() !=
-         ObjectInspector.Category.PRIMITIVE) {
+         ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(1, "Only arrays of strings are accepted but "
           + parameters[1].getTypeName() + " was passed as parameter 2.");
     }
@@ -113,7 +113,7 @@ public class GenericUDAFContextNGrams implements GenericUDAFResolver {
     }
 
     // Validate the third parameter, which should be an integer to represent 'k'
-    if(parameters[2].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if(parameters[2].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(2, "Only integers are accepted but "
             + parameters[2].getTypeName() + " was passed as parameter 3.");
     }
@@ -133,7 +133,7 @@ public class GenericUDAFContextNGrams implements GenericUDAFResolver {
     // If the fourth parameter -- precision factor 'pf' -- has been specified, make sure it's
     // an integer.
     if(parameters.length == 4) {
-      if(parameters[3].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+      if(parameters[3].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
         throw new UDFArgumentTypeException(3, "Only integers are accepted but "
             + parameters[3].getTypeName() + " was passed as parameter 4.");
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
index d1517ab..8140724 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
@@ -82,13 +82,13 @@ public class GenericUDAFCorrelation extends AbstractGenericUDAFResolver {
           "Exactly two arguments are expected.");
     }
 
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
           + parameters[0].getTypeName() + " is passed.");
     }
 
-    if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
         throw new UDFArgumentTypeException(1,
             "Only primitive type arguments are accepted but "
             + parameters[1].getTypeName() + " is passed.");

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
index 8b088f8..fb9fc5b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
@@ -69,13 +69,13 @@ public class GenericUDAFCovariance extends AbstractGenericUDAFResolver {
           "Exactly two arguments are expected.");
     }
 
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
           + parameters[0].getTypeName() + " is passed.");
     }
 
-    if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
         throw new UDFArgumentTypeException(1,
             "Only primitive type arguments are accepted but "
             + parameters[1].getTypeName() + " is passed.");

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java
index 0dc3918..db8a066 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovarianceSample.java
@@ -50,13 +50,13 @@ public class GenericUDAFCovarianceSample extends GenericUDAFCovariance {
           "Exactly two arguments are expected.");
     }
 
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
           + parameters[0].getTypeName() + " is passed.");
     }
 
-    if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
         throw new UDFArgumentTypeException(1,
             "Only primitive type arguments are accepted but "
             + parameters[1].getTypeName() + " is passed.");

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java
index 4910fe1..a8f369f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java
@@ -69,7 +69,7 @@ public class GenericUDAFHistogramNumeric extends AbstractGenericUDAFResolver {
     }
 
     // validate the first parameter, which is the expression to compute over
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
           + parameters[0].getTypeName() + " was passed as parameter 1.");
@@ -94,7 +94,7 @@ public class GenericUDAFHistogramNumeric extends AbstractGenericUDAFResolver {
     }
 
     // validate the second parameter, which is the number of histogram bins
-    if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(1,
           "Only primitive type arguments are accepted but "
           + parameters[1].getTypeName() + " was passed as parameter 2.");

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
index 79b519c..478db4b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
@@ -44,7 +44,7 @@ public class GenericUDAFStd extends GenericUDAFVariance {
           "Exactly one argument is expected.");
     }
 
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
           + parameters[0].getTypeName() + " is passed.");

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
index c68d77c..7e33892 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
@@ -46,7 +46,7 @@ public class GenericUDAFStdSample extends GenericUDAFVariance {
           "Exactly one argument is expected.");
     }
 
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
           + parameters[0].getTypeName() + " is passed.");

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
index 1439b64..f755c52 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
@@ -69,7 +69,7 @@ public class GenericUDAFSum extends AbstractGenericUDAFResolver {
           "Exactly one argument is expected.");
     }
 
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
               + parameters[0].getTypeName() + " is passed.");
@@ -111,7 +111,7 @@ public class GenericUDAFSum extends AbstractGenericUDAFResolver {
   }
 
   public static PrimitiveObjectInspector.PrimitiveCategory getReturnType(TypeInfo type) {
-    if (type.getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (type.getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       return null;
     }
     switch (((PrimitiveTypeInfo) type).getPrimitiveCategory()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumEmptyIsZero.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumEmptyIsZero.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumEmptyIsZero.java
index 01c933c..725b361 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumEmptyIsZero.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSumEmptyIsZero.java
@@ -35,7 +35,7 @@ public class GenericUDAFSumEmptyIsZero extends GenericUDAFSum {
           "Exactly one argument is expected.");
     }
 
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
               + parameters[0].getTypeName() + " is passed.");

http://git-wip-us.apache.org/repos/asf/hive/blob/40ee74eb/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
index c9fb3df..a89d2a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
@@ -158,7 +158,7 @@ public class GenericUDAFVariance extends AbstractGenericUDAFResolver {
           "Exactly one argument is expected.");
     }
 
-    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+    if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE.toMetastoreTypeCategory()) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
           + parameters[0].getTypeName() + " is passed.");