You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2012/11/08 10:44:29 UTC

svn commit: r1406984 [22/29] - in /hive/trunk: contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/ contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/ contrib/src/test...

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Thu Nov  8 09:44:19 2012
@@ -19,14 +19,14 @@
 package org.apache.hadoop.hive.ql.metadata;
 
 import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
-import static org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE;
-import static org.apache.hadoop.hive.serde.Constants.COLLECTION_DELIM;
-import static org.apache.hadoop.hive.serde.Constants.ESCAPE_CHAR;
-import static org.apache.hadoop.hive.serde.Constants.FIELD_DELIM;
-import static org.apache.hadoop.hive.serde.Constants.LINE_DELIM;
-import static org.apache.hadoop.hive.serde.Constants.MAPKEY_DELIM;
-import static org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT;
-import static org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME;
+import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE;
+import static org.apache.hadoop.hive.serde.serdeConstants.COLLECTION_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.ESCAPE_CHAR;
+import static org.apache.hadoop.hive.serde.serdeConstants.FIELD_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.LINE_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.MAPKEY_DELIM;
+import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT;
+import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -56,7 +56,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
-import org.apache.hadoop.hive.metastore.api.Constants;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
@@ -370,7 +370,7 @@ public class Hive {
     try {
       // Remove the DDL_TIME so it gets refreshed
       if (newTbl.getParameters() != null) {
-        newTbl.getParameters().remove(Constants.DDL_TIME);
+        newTbl.getParameters().remove(hive_metastoreConstants.DDL_TIME);
       }
       getMSC().alter_table(t.getDbName(), t.getTableName(), newTbl.getTTable());
     } catch (MetaException e) {
@@ -419,7 +419,7 @@ public class Hive {
     try {
       // Remove the DDL time so that it gets refreshed
       if (newPart.getParameters() != null) {
-        newPart.getParameters().remove(Constants.DDL_TIME);
+        newPart.getParameters().remove(hive_metastoreConstants.DDL_TIME);
       }
       getMSC().alter_partition(t.getDbName(), t.getTableName(),
           newPart.getTPartition());
@@ -451,7 +451,7 @@ public class Hive {
       // Remove the DDL time so that it gets refreshed
       for (Partition tmpPart: newParts) {
         if (tmpPart.getParameters() != null) {
-          tmpPart.getParameters().remove(Constants.DDL_TIME);
+          tmpPart.getParameters().remove(hive_metastoreConstants.DDL_TIME);
         }
         newTParts.add(tmpPart.getTPartition());
       }
@@ -552,7 +552,7 @@ public class Hive {
       }
       tbl.checkValidity();
       if (tbl.getParameters() != null) {
-        tbl.getParameters().remove(Constants.DDL_TIME);
+        tbl.getParameters().remove(hive_metastoreConstants.DDL_TIME);
       }
       org.apache.hadoop.hive.metastore.api.Table tTbl = tbl.getTTable();
       PrincipalPrivilegeSet principalPrivs = new PrincipalPrivilegeSet();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Thu Nov  8 09:44:19 2012
@@ -47,7 +47,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -136,7 +136,7 @@ public class Table implements Serializab
       // We have to use MetadataTypedColumnsetSerDe because LazySimpleSerDe does
       // not support a table with no columns.
       sd.getSerdeInfo().setSerializationLib(MetadataTypedColumnsetSerDe.class.getName());
-      sd.getSerdeInfo().getParameters().put(Constants.SERIALIZATION_FORMAT, "1");
+      sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
       sd.setInputFormat(SequenceFileInputFormat.class.getName());
       sd.setOutputFormat(HiveSequenceFileOutputFormat.class.getName());
       SkewedInfo skewInfo = new SkewedInfo();
@@ -279,7 +279,7 @@ public class Table implements Serializab
       storageHandler = HiveUtils.getStorageHandler(
         Hive.get().getConf(),
         getProperty(
-          org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE));
+          org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE));
     } catch (Exception e) {
       throw new RuntimeException(e);
     }
@@ -845,7 +845,7 @@ public class Table implements Serializab
 
   public boolean isNonNative() {
     return getProperty(
-      org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE)
+      org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE)
       != null;
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java Thu Nov  8 09:44:19 2012
@@ -316,7 +316,7 @@ public class GenMRFileSink1 implements N
 
     TableDesc ts = (TableDesc) fsConf.getTableInfo().clone();
     fsConf.getTableInfo().getProperties().remove(
-        org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS);
+        org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
 
     FileSinkDesc newFSD = new FileSinkDesc(finalName, ts, parseCtx.getConf()
         .getBoolVar(HiveConf.ConfVars.COMPRESSRESULT));
@@ -443,12 +443,12 @@ public class GenMRFileSink1 implements N
 
       // update the FileSinkOperator to include partition columns
       fsInputDesc.getTableInfo().getProperties().setProperty(
-        org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS,
+        org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS,
         partCols.toString()); // list of dynamic partition column names
     } else {
       // non-partitioned table
       fsInputDesc.getTableInfo().getProperties().remove(
-        org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS);
+        org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
     }
 
     //

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java Thu Nov  8 09:44:19 2012
@@ -75,7 +75,7 @@ import org.apache.hadoop.hive.ql.plan.Pl
 import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
 import org.apache.hadoop.hive.ql.plan.SelectDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 /**
@@ -422,7 +422,7 @@ public class MapJoinProcessor implements
       }
       if (filterMap != null && filterMap[pos] != null && pos != mapJoinPos) {
         ExprNodeColumnDesc isFilterDesc = new ExprNodeColumnDesc(TypeInfoFactory
-            .getPrimitiveTypeInfo(Constants.TINYINT_TYPE_NAME), "filter", "filter", false);
+            .getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME), "filter", "filter", false);
         valueFilteredCols.add(isFilterDesc);
       }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Thu Nov  8 09:44:19 2012
@@ -57,7 +57,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
@@ -576,10 +576,10 @@ public abstract class BaseSemanticAnalyz
       throws SemanticException {
     switch (typeNode.getType()) {
     case HiveParser.TOK_LIST:
-      return Constants.LIST_TYPE_NAME + "<"
+      return serdeConstants.LIST_TYPE_NAME + "<"
           + getTypeStringFromAST((ASTNode) typeNode.getChild(0)) + ">";
     case HiveParser.TOK_MAP:
-      return Constants.MAP_TYPE_NAME + "<"
+      return serdeConstants.MAP_TYPE_NAME + "<"
           + getTypeStringFromAST((ASTNode) typeNode.getChild(0)) + ","
           + getTypeStringFromAST((ASTNode) typeNode.getChild(1)) + ">";
     case HiveParser.TOK_STRUCT:
@@ -593,7 +593,7 @@ public abstract class BaseSemanticAnalyz
 
   private static String getStructTypeStringFromAST(ASTNode typeNode)
       throws SemanticException {
-    String typeStr = Constants.STRUCT_TYPE_NAME + "<";
+    String typeStr = serdeConstants.STRUCT_TYPE_NAME + "<";
     typeNode = (ASTNode) typeNode.getChild(0);
     int children = typeNode.getChildCount();
     if (children <= 0) {
@@ -615,7 +615,7 @@ public abstract class BaseSemanticAnalyz
 
   private static String getUnionTypeStringFromAST(ASTNode typeNode)
       throws SemanticException {
-    String typeStr = Constants.UNION_TYPE_NAME + "<";
+    String typeStr = serdeConstants.UNION_TYPE_NAME + "<";
     typeNode = (ASTNode) typeNode.getChild(0);
     int children = typeNode.getChildCount();
     if (children <= 0) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Thu Nov  8 09:44:19 2012
@@ -125,7 +125,7 @@ import org.apache.hadoop.hive.ql.plan.Un
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
@@ -140,18 +140,18 @@ public class DDLSemanticAnalyzer extends
 
   private final Set<String> reservedPartitionValues;
   static {
-    TokenToTypeName.put(HiveParser.TOK_BOOLEAN, Constants.BOOLEAN_TYPE_NAME);
-    TokenToTypeName.put(HiveParser.TOK_TINYINT, Constants.TINYINT_TYPE_NAME);
-    TokenToTypeName.put(HiveParser.TOK_SMALLINT, Constants.SMALLINT_TYPE_NAME);
-    TokenToTypeName.put(HiveParser.TOK_INT, Constants.INT_TYPE_NAME);
-    TokenToTypeName.put(HiveParser.TOK_BIGINT, Constants.BIGINT_TYPE_NAME);
-    TokenToTypeName.put(HiveParser.TOK_FLOAT, Constants.FLOAT_TYPE_NAME);
-    TokenToTypeName.put(HiveParser.TOK_DOUBLE, Constants.DOUBLE_TYPE_NAME);
-    TokenToTypeName.put(HiveParser.TOK_STRING, Constants.STRING_TYPE_NAME);
-    TokenToTypeName.put(HiveParser.TOK_BINARY, Constants.BINARY_TYPE_NAME);
-    TokenToTypeName.put(HiveParser.TOK_DATE, Constants.DATE_TYPE_NAME);
-    TokenToTypeName.put(HiveParser.TOK_DATETIME, Constants.DATETIME_TYPE_NAME);
-    TokenToTypeName.put(HiveParser.TOK_TIMESTAMP, Constants.TIMESTAMP_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_BOOLEAN, serdeConstants.BOOLEAN_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_TINYINT, serdeConstants.TINYINT_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_SMALLINT, serdeConstants.SMALLINT_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_INT, serdeConstants.INT_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_BIGINT, serdeConstants.BIGINT_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_FLOAT, serdeConstants.FLOAT_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_DOUBLE, serdeConstants.DOUBLE_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_STRING, serdeConstants.STRING_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_BINARY, serdeConstants.BINARY_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_DATE, serdeConstants.DATE_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_DATETIME, serdeConstants.DATETIME_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME);
   }
 
   public static String getTypeName(int token) throws SemanticException {
@@ -1617,8 +1617,8 @@ public class DDLSemanticAnalyzer extends
   private FetchTask createFetchTask(String schema) {
     Properties prop = new Properties();
 
-    prop.setProperty(Constants.SERIALIZATION_FORMAT, "9");
-    prop.setProperty(Constants.SERIALIZATION_NULL_FORMAT, " ");
+    prop.setProperty(serdeConstants.SERIALIZATION_FORMAT, "9");
+    prop.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, " ");
     String[] colTypes = schema.split("#");
     prop.setProperty("columns", colTypes[0]);
     prop.setProperty("columns.types", colTypes[1]);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java Thu Nov  8 09:44:19 2012
@@ -56,7 +56,7 @@ import org.apache.hadoop.hive.ql.plan.Cr
 import org.apache.hadoop.hive.ql.plan.DDLWork;
 import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
 import org.apache.hadoop.hive.ql.plan.MoveWork;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 
 /**
  * ImportSemanticAnalyzer.
@@ -457,9 +457,9 @@ public class ImportSemanticAnalyzer exte
                 .getMsg(" Table Serde class does not match"));
       }
       String existingSerdeFormat = table
-          .getSerdeParam(Constants.SERIALIZATION_FORMAT);
+          .getSerdeParam(serdeConstants.SERIALIZATION_FORMAT);
       String importedSerdeFormat = tableDesc.getSerdeProps().get(
-          Constants.SERIALIZATION_FORMAT);
+          serdeConstants.SERIALIZATION_FORMAT);
       if (!ObjectUtils.equals(existingSerdeFormat, importedSerdeFormat)) {
         throw new SemanticException(
             ErrorMsg.INCOMPATIBLE_SCHEMA

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Nov  8 09:44:19 2012
@@ -164,7 +164,7 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
 import org.apache.hadoop.hive.ql.util.ObjectPair;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -1742,28 +1742,28 @@ public class SemanticAnalyzer extends Ba
         case HiveParser.TOK_TABLEROWFORMATFIELD:
           String fieldDelim = unescapeSQLString(rowChild.getChild(0).getText());
           tblDesc.getProperties()
-              .setProperty(Constants.FIELD_DELIM, fieldDelim);
-          tblDesc.getProperties().setProperty(Constants.SERIALIZATION_FORMAT,
+              .setProperty(serdeConstants.FIELD_DELIM, fieldDelim);
+          tblDesc.getProperties().setProperty(serdeConstants.SERIALIZATION_FORMAT,
               fieldDelim);
 
           if (rowChild.getChildCount() >= 2) {
             String fieldEscape = unescapeSQLString(rowChild.getChild(1)
                 .getText());
-            tblDesc.getProperties().setProperty(Constants.ESCAPE_CHAR,
+            tblDesc.getProperties().setProperty(serdeConstants.ESCAPE_CHAR,
                 fieldEscape);
           }
           break;
         case HiveParser.TOK_TABLEROWFORMATCOLLITEMS:
-          tblDesc.getProperties().setProperty(Constants.COLLECTION_DELIM,
+          tblDesc.getProperties().setProperty(serdeConstants.COLLECTION_DELIM,
               unescapeSQLString(rowChild.getChild(0).getText()));
           break;
         case HiveParser.TOK_TABLEROWFORMATMAPKEYS:
-          tblDesc.getProperties().setProperty(Constants.MAPKEY_DELIM,
+          tblDesc.getProperties().setProperty(serdeConstants.MAPKEY_DELIM,
               unescapeSQLString(rowChild.getChild(0).getText()));
           break;
         case HiveParser.TOK_TABLEROWFORMATLINES:
           String lineDelim = unescapeSQLString(rowChild.getChild(0).getText());
-          tblDesc.getProperties().setProperty(Constants.LINE_DELIM, lineDelim);
+          tblDesc.getProperties().setProperty(serdeConstants.LINE_DELIM, lineDelim);
           if (!lineDelim.equals("\n") && !lineDelim.equals("10")) {
             throw new SemanticException(generateErrorMessage(rowChild,
                     ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg()));
@@ -4694,8 +4694,8 @@ public class SemanticAnalyzer extends Ba
         // converted.
         //
         String tName = colInfo.getType().getTypeName();
-        if (tName.equals(Constants.VOID_TYPE_NAME)) {
-          colTypes = colTypes.concat(Constants.STRING_TYPE_NAME);
+        if (tName.equals(serdeConstants.VOID_TYPE_NAME)) {
+          colTypes = colTypes.concat(serdeConstants.STRING_TYPE_NAME);
         } else {
           colTypes = colTypes.concat(tName);
         }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Thu Nov  8 09:44:19 2012
@@ -56,7 +56,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
@@ -482,25 +482,25 @@ public final class TypeCheckProcFactory 
       specialFunctionTextHashMap.put(HiveParser.TOK_ISNOTNULL, "isnotnull");
       conversionFunctionTextHashMap = new HashMap<Integer, String>();
       conversionFunctionTextHashMap.put(HiveParser.TOK_BOOLEAN,
-          Constants.BOOLEAN_TYPE_NAME);
+          serdeConstants.BOOLEAN_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_TINYINT,
-          Constants.TINYINT_TYPE_NAME);
+          serdeConstants.TINYINT_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_SMALLINT,
-          Constants.SMALLINT_TYPE_NAME);
+          serdeConstants.SMALLINT_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_INT,
-          Constants.INT_TYPE_NAME);
+          serdeConstants.INT_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_BIGINT,
-          Constants.BIGINT_TYPE_NAME);
+          serdeConstants.BIGINT_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_FLOAT,
-          Constants.FLOAT_TYPE_NAME);
+          serdeConstants.FLOAT_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_DOUBLE,
-          Constants.DOUBLE_TYPE_NAME);
+          serdeConstants.DOUBLE_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_STRING,
-          Constants.STRING_TYPE_NAME);
+          serdeConstants.STRING_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_BINARY,
-          Constants.BINARY_TYPE_NAME);
+          serdeConstants.BINARY_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMP,
-          Constants.TIMESTAMP_TYPE_NAME);
+          serdeConstants.TIMESTAMP_TYPE_NAME);
     }
 
     public static boolean isRedundantConversionFunction(ASTNode expr,
@@ -715,13 +715,13 @@ public final class TypeCheckProcFactory 
               children.get(0) instanceof ExprNodeConstantDesc ? 0 : 1;
 
           Set<String> inferTypes = new HashSet<String>(Arrays.asList(
-              Constants.TINYINT_TYPE_NAME.toLowerCase(),
-              Constants.SMALLINT_TYPE_NAME.toLowerCase(),
-              Constants.INT_TYPE_NAME.toLowerCase(),
-              Constants.BIGINT_TYPE_NAME.toLowerCase(),
-              Constants.FLOAT_TYPE_NAME.toLowerCase(),
-              Constants.DOUBLE_TYPE_NAME.toLowerCase(),
-              Constants.STRING_TYPE_NAME.toLowerCase()
+              serdeConstants.TINYINT_TYPE_NAME.toLowerCase(),
+              serdeConstants.SMALLINT_TYPE_NAME.toLowerCase(),
+              serdeConstants.INT_TYPE_NAME.toLowerCase(),
+              serdeConstants.BIGINT_TYPE_NAME.toLowerCase(),
+              serdeConstants.FLOAT_TYPE_NAME.toLowerCase(),
+              serdeConstants.DOUBLE_TYPE_NAME.toLowerCase(),
+              serdeConstants.STRING_TYPE_NAME.toLowerCase()
               ));
 
           String constType = children.get(constIdx).getTypeString().toLowerCase();
@@ -735,19 +735,19 @@ public final class TypeCheckProcFactory 
 
             Number value = null;
             try {
-              if (columnType.equalsIgnoreCase(Constants.TINYINT_TYPE_NAME)) {
+              if (columnType.equalsIgnoreCase(serdeConstants.TINYINT_TYPE_NAME)) {
                 value = new Byte(constValue);
-              } else if (columnType.equalsIgnoreCase(Constants.SMALLINT_TYPE_NAME)) {
+              } else if (columnType.equalsIgnoreCase(serdeConstants.SMALLINT_TYPE_NAME)) {
                 value = new Short(constValue);
-              } else if (columnType.equalsIgnoreCase(Constants.INT_TYPE_NAME)) {
+              } else if (columnType.equalsIgnoreCase(serdeConstants.INT_TYPE_NAME)) {
                 value = new Integer(constValue);
-              } else if (columnType.equalsIgnoreCase(Constants.BIGINT_TYPE_NAME)) {
+              } else if (columnType.equalsIgnoreCase(serdeConstants.BIGINT_TYPE_NAME)) {
                 value = new Long(constValue);
-              } else if (columnType.equalsIgnoreCase(Constants.FLOAT_TYPE_NAME)) {
+              } else if (columnType.equalsIgnoreCase(serdeConstants.FLOAT_TYPE_NAME)) {
                 value = new Float(constValue);
-              } else if (columnType.equalsIgnoreCase(Constants.DOUBLE_TYPE_NAME)
-                  || (columnType.equalsIgnoreCase(Constants.STRING_TYPE_NAME)
-                     && !constType.equalsIgnoreCase(Constants.BIGINT_TYPE_NAME))) {
+              } else if (columnType.equalsIgnoreCase(serdeConstants.DOUBLE_TYPE_NAME)
+                  || (columnType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)
+                     && !constType.equalsIgnoreCase(serdeConstants.BIGINT_TYPE_NAME))) {
                 // no smart inference for queries like "str_col = bigint_const"
                 triedDouble = true;
                 value = new Double(constValue);
@@ -759,7 +759,7 @@ public final class TypeCheckProcFactory 
               // the operator is EQUAL, return false due to the type mismatch
               if (triedDouble ||
                   (fi.getGenericUDF() instanceof GenericUDFOPEqual
-                  && !columnType.equals(Constants.STRING_TYPE_NAME))) {
+                  && !columnType.equals(serdeConstants.STRING_TYPE_NAME))) {
                 return new ExprNodeConstantDesc(false);
               }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java Thu Nov  8 09:44:19 2012
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 
 import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -82,7 +82,7 @@ public class ExprNodeConstantDesc extend
       return "null";
     }
 
-    if (typeInfo.getTypeName().equals(Constants.STRING_TYPE_NAME)) {
+    if (typeInfo.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
       return "'" + value.toString() + "'";
     } else {
       return value.toString();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java Thu Nov  8 09:44:19 2012
@@ -80,7 +80,7 @@ public class PartitionDesc implements Se
       this.serdeClassName = serdeClassName;
     } else if (properties != null) {
       this.serdeClassName = properties
-          .getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB);
+          .getProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB);
     }
   }
 
@@ -93,7 +93,7 @@ public class PartitionDesc implements Se
     inputFileFormatClass = part.getInputFormatClass();
     outputFileFormatClass = part.getOutputFormatClass();
     serdeClassName = properties
-        .getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB);
+        .getProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB);
     ;
   }
 
@@ -106,7 +106,7 @@ public class PartitionDesc implements Se
     Deserializer deserializer;
     try {
       deserializer = SerDeUtils.lookupDeserializer(
-          properties.getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB));
+          properties.getProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB));
     } catch (SerDeException e) {
       throw new HiveException(e);
     }
@@ -114,7 +114,7 @@ public class PartitionDesc implements Se
     inputFileFormatClass = part.getInputFormatClass();
     outputFileFormatClass = part.getOutputFormatClass();
     serdeClassName = properties.getProperty(
-        org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB);
+        org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB);
   }
 
   @Explain(displayName = "")
@@ -214,7 +214,7 @@ public class PartitionDesc implements Se
   @Explain(displayName = "name")
   public String getTableName() {
     return getProperties().getProperty(
-        org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME);
+        org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME);
   }
 
   @Explain(displayName = "input format")

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Thu Nov  8 09:44:19 2012
@@ -45,7 +45,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.DelimitedJSONSerDe;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
@@ -171,19 +171,19 @@ public final class PlanUtils {
       boolean useDelimitedJSON, String fileFormat) {
 
     Properties properties = Utilities.makeProperties(
-        Constants.SERIALIZATION_FORMAT, separatorCode, Constants.LIST_COLUMNS,
+        serdeConstants.SERIALIZATION_FORMAT, separatorCode, serdeConstants.LIST_COLUMNS,
         columns);
 
     if (!separatorCode.equals(Integer.toString(Utilities.ctrlaCode))) {
-      properties.setProperty(Constants.FIELD_DELIM, separatorCode);
+      properties.setProperty(serdeConstants.FIELD_DELIM, separatorCode);
     }
 
     if (columnTypes != null) {
-      properties.setProperty(Constants.LIST_COLUMN_TYPES, columnTypes);
+      properties.setProperty(serdeConstants.LIST_COLUMN_TYPES, columnTypes);
     }
 
     if (lastColumnTakesRestOfTheLine) {
-      properties.setProperty(Constants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
+      properties.setProperty(serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
           "true");
     }
 
@@ -216,7 +216,7 @@ public final class PlanUtils {
       String fileFormat) {
     TableDesc tblDesc = getTableDesc(LazySimpleSerDe.class, "" + Utilities.ctrlaCode, cols, colTypes,
         false, false, fileFormat);
-    tblDesc.getProperties().setProperty(Constants.ESCAPE_CHAR, "\\");
+    tblDesc.getProperties().setProperty(serdeConstants.ESCAPE_CHAR, "\\");
     return tblDesc;
   }
 
@@ -250,26 +250,26 @@ public final class PlanUtils {
       Properties properties = ret.getProperties();
 
       if (crtTblDesc.getCollItemDelim() != null) {
-        properties.setProperty(Constants.COLLECTION_DELIM, crtTblDesc
+        properties.setProperty(serdeConstants.COLLECTION_DELIM, crtTblDesc
             .getCollItemDelim());
       }
 
       if (crtTblDesc.getMapKeyDelim() != null) {
-        properties.setProperty(Constants.MAPKEY_DELIM, crtTblDesc
+        properties.setProperty(serdeConstants.MAPKEY_DELIM, crtTblDesc
             .getMapKeyDelim());
       }
 
       if (crtTblDesc.getFieldEscape() != null) {
-        properties.setProperty(Constants.ESCAPE_CHAR, crtTblDesc
+        properties.setProperty(serdeConstants.ESCAPE_CHAR, crtTblDesc
             .getFieldEscape());
       }
 
       if (crtTblDesc.getLineDelim() != null) {
-        properties.setProperty(Constants.LINE_DELIM, crtTblDesc.getLineDelim());
+        properties.setProperty(serdeConstants.LINE_DELIM, crtTblDesc.getLineDelim());
       }
 
       if (crtTblDesc.getTableName() != null && crtTblDesc.getDatabaseName() != null) {
-        properties.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME,
+        properties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,
             crtTblDesc.getDatabaseName() + "." + crtTblDesc.getTableName());
       }
 
@@ -299,7 +299,7 @@ public final class PlanUtils {
     return new TableDesc(MetadataTypedColumnsetSerDe.class,
         TextInputFormat.class, IgnoreKeyTextOutputFormat.class, Utilities
         .makeProperties(
-        org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT,
+        org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT,
         separatorCode));
   }
 
@@ -310,11 +310,11 @@ public final class PlanUtils {
       String order) {
     return new TableDesc(BinarySortableSerDe.class,
         SequenceFileInputFormat.class, SequenceFileOutputFormat.class,
-        Utilities.makeProperties(Constants.LIST_COLUMNS, MetaStoreUtils
+        Utilities.makeProperties(serdeConstants.LIST_COLUMNS, MetaStoreUtils
         .getColumnNamesFromFieldSchema(fieldSchemas),
-        Constants.LIST_COLUMN_TYPES, MetaStoreUtils
+        serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
-        Constants.SERIALIZATION_SORT_ORDER, order));
+        serdeConstants.SERIALIZATION_SORT_ORDER, order));
   }
 
   /**
@@ -326,7 +326,7 @@ public final class PlanUtils {
         MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
         "columns.types", MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
-        Constants.ESCAPE_CHAR, "\\"));
+        serdeConstants.ESCAPE_CHAR, "\\"));
   }
 
   /**
@@ -339,7 +339,7 @@ public final class PlanUtils {
         MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
         "columns.types", MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
-        Constants.ESCAPE_CHAR, "\\"));
+        serdeConstants.ESCAPE_CHAR, "\\"));
   }
 
   /**
@@ -349,11 +349,11 @@ public final class PlanUtils {
       List<FieldSchema> fieldSchemas) {
     return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
         SequenceFileOutputFormat.class, Utilities.makeProperties(
-        Constants.LIST_COLUMNS, MetaStoreUtils
+        serdeConstants.LIST_COLUMNS, MetaStoreUtils
         .getColumnNamesFromFieldSchema(fieldSchemas),
-        Constants.LIST_COLUMN_TYPES, MetaStoreUtils
+        serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
-        Constants.ESCAPE_CHAR, "\\"));
+        serdeConstants.ESCAPE_CHAR, "\\"));
   }
 
   /**
@@ -362,11 +362,11 @@ public final class PlanUtils {
   public static TableDesc getReduceValueTableDesc(List<FieldSchema> fieldSchemas) {
     return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
         SequenceFileOutputFormat.class, Utilities.makeProperties(
-        Constants.LIST_COLUMNS, MetaStoreUtils
+        serdeConstants.LIST_COLUMNS, MetaStoreUtils
         .getColumnNamesFromFieldSchema(fieldSchemas),
-        Constants.LIST_COLUMN_TYPES, MetaStoreUtils
+        serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
-        Constants.ESCAPE_CHAR, "\\"));
+        serdeConstants.ESCAPE_CHAR, "\\"));
   }
 
   /**
@@ -686,7 +686,7 @@ public final class PlanUtils {
         HiveUtils.getStorageHandler(
           Hive.get().getConf(),
           tableDesc.getProperties().getProperty(
-            org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE));
+            org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE));
       if (storageHandler != null) {
         Map<String, String> jobProperties = new LinkedHashMap<String, String>();
         if(input) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java Thu Nov  8 09:44:19 2012
@@ -218,12 +218,12 @@ public class ReduceSinkDesc extends Abst
   @Explain(displayName = "sort order")
   public String getOrder() {
     return keySerializeInfo.getProperties().getProperty(
-        org.apache.hadoop.hive.serde.Constants.SERIALIZATION_SORT_ORDER);
+        org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_SORT_ORDER);
   }
 
   public void setOrder(String orderStr) {
     keySerializeInfo.getProperties().setProperty(
-        org.apache.hadoop.hive.serde.Constants.SERIALIZATION_SORT_ORDER,
+        org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_SORT_ORDER,
         orderStr);
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java Thu Nov  8 09:44:19 2012
@@ -54,7 +54,7 @@ public class TableDesc implements Serial
         .getOutputFormatSubstitute(class1);
     this.properties = properties;
     serdeClassName = properties
-        .getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB);
+        .getProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB);
     ;
   }
 
@@ -131,7 +131,7 @@ public class TableDesc implements Serial
   @Explain(displayName = "name")
   public String getTableName() {
     return properties
-        .getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME);
+        .getProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME);
   }
 
   @Explain(displayName = "input format")
@@ -146,7 +146,7 @@ public class TableDesc implements Serial
 
   public boolean isNonNative() {
     return (properties.getProperty(
-        org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE)
+        org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE)
       != null);
   }
   

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java Thu Nov  8 09:44:19 2012
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.processors;
 
-import static org.apache.hadoop.hive.serde.Constants.SERIALIZATION_NULL_FORMAT;
-import static org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME;
+import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_NULL_FORMAT;
+import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME;
 import static org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.defaultNullString;
 
 import java.util.Map;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayContains.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayContains.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayContains.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArrayContains.java Thu Nov  8 09:44:19 2012
@@ -63,7 +63,7 @@ public class GenericUDFArrayContains ext
     // Check if ARRAY_IDX argument is of category LIST
     if (!arguments[ARRAY_IDX].getCategory().equals(Category.LIST)) {
       throw new UDFArgumentTypeException(ARRAY_IDX,
-          "\"" + org.apache.hadoop.hive.serde.Constants.LIST_TYPE_NAME + "\" "
+          "\"" + org.apache.hadoop.hive.serde.serdeConstants.LIST_TYPE_NAME + "\" "
           + "expected at function ARRAY_CONTAINS, but "
           + "\"" + arguments[ARRAY_IDX].getTypeName() + "\" "
           + "is found");

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java Thu Nov  8 09:44:19 2012
@@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -62,18 +62,18 @@ public class GenericUDFConcatWS extends 
       switch(arguments[i].getCategory()) {
         case LIST:
           if (((ListObjectInspector)arguments[i]).getListElementObjectInspector()
-            .getTypeName().equals(Constants.STRING_TYPE_NAME)
+            .getTypeName().equals(serdeConstants.STRING_TYPE_NAME)
             || ((ListObjectInspector)arguments[i]).getListElementObjectInspector()
-            .getTypeName().equals(Constants.VOID_TYPE_NAME))
+            .getTypeName().equals(serdeConstants.VOID_TYPE_NAME))
           break;
         case PRIMITIVE:
-          if (arguments[i].getTypeName().equals(Constants.STRING_TYPE_NAME)
-            || arguments[i].getTypeName().equals(Constants.VOID_TYPE_NAME))
+          if (arguments[i].getTypeName().equals(serdeConstants.STRING_TYPE_NAME)
+            || arguments[i].getTypeName().equals(serdeConstants.VOID_TYPE_NAME))
           break;
         default:
           throw new UDFArgumentTypeException(i, "Argument " + (i + 1)
-            + " of function CONCAT_WS must be \"" + Constants.STRING_TYPE_NAME
-            + " or " + Constants.LIST_TYPE_NAME + "<" + Constants.STRING_TYPE_NAME
+            + " of function CONCAT_WS must be \"" + serdeConstants.STRING_TYPE_NAME
+            + " or " + serdeConstants.LIST_TYPE_NAME + "<" + serdeConstants.STRING_TYPE_NAME
             + ">\", but \"" + arguments[i].getTypeName() + "\" was found.");
       }
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java Thu Nov  8 09:44:19 2012
@@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -77,12 +77,12 @@ public class GenericUDFFormatNumber exte
       default:
         throw new UDFArgumentTypeException(0, "Argument 1"
           + " of function FORMAT_NUMBER must be \""
-          + Constants.TINYINT_TYPE_NAME + "\""
-          + " or \"" + Constants.SMALLINT_TYPE_NAME + "\""
-          + " or \"" + Constants.INT_TYPE_NAME + "\""
-          + " or \"" + Constants.BIGINT_TYPE_NAME + "\""
-          + " or \"" + Constants.DOUBLE_TYPE_NAME + "\""
-          + " or \"" + Constants.FLOAT_TYPE_NAME + "\", but \""
+          + serdeConstants.TINYINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.SMALLINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.INT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.DOUBLE_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\", but \""
           + arguments[0].getTypeName() + "\" was found.");
     }
 
@@ -92,10 +92,10 @@ public class GenericUDFFormatNumber exte
       default:
         throw new UDFArgumentTypeException(1, "Argument 2"
           + " of function FORMAT_NUMBER must be \""
-          + Constants.TINYINT_TYPE_NAME + "\""
-          + " or \"" + Constants.SMALLINT_TYPE_NAME + "\""
-          + " or \"" + Constants.INT_TYPE_NAME + "\""
-          + " or \"" + Constants.BIGINT_TYPE_NAME + "\", but \""
+          + serdeConstants.TINYINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.SMALLINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.INT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\", but \""
           + arguments[1].getTypeName() + "\" was found.");
     }
 
@@ -114,12 +114,12 @@ public class GenericUDFFormatNumber exte
       default:
         throw new UDFArgumentTypeException(0, "Argument 1"
           + " of function FORMAT_NUMBER must be \""
-          + Constants.TINYINT_TYPE_NAME + "\""
-          + " or \"" + Constants.SMALLINT_TYPE_NAME + "\""
-          + " or \"" + Constants.INT_TYPE_NAME + "\""
-          + " or \"" + Constants.BIGINT_TYPE_NAME + "\""
-          + " or \"" + Constants.DOUBLE_TYPE_NAME + "\""
-          + " or \"" + Constants.FLOAT_TYPE_NAME + "\", but \""
+          + serdeConstants.TINYINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.SMALLINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.INT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.DOUBLE_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\", but \""
           + arguments[0].getTypeName() + "\" was found.");
     }
 
@@ -133,10 +133,10 @@ public class GenericUDFFormatNumber exte
       default:
         throw new UDFArgumentTypeException(1, "Argument 2"
           + " of function FORMAT_NUMBER must be \""
-          + Constants.TINYINT_TYPE_NAME + "\""
-          + " or \"" + Constants.SMALLINT_TYPE_NAME + "\""
-          + " or \"" + Constants.INT_TYPE_NAME + "\""
-          + " or \"" + Constants.BIGINT_TYPE_NAME + "\", but \""
+          + serdeConstants.TINYINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.SMALLINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.INT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\", but \""
           + arguments[1].getTypeName() + "\" was found.");
     }
 
@@ -194,12 +194,12 @@ public class GenericUDFFormatNumber exte
         break;
       default:
         throw new HiveException("Argument 1 of function FORMAT_NUMBER must be "
-          + Constants.TINYINT_TYPE_NAME + "\""
-          + " or \"" + Constants.SMALLINT_TYPE_NAME + "\""
-          + " or \"" + Constants.INT_TYPE_NAME + "\""
-          + " or \"" + Constants.BIGINT_TYPE_NAME + "\""
-          + " or \"" + Constants.DOUBLE_TYPE_NAME + "\""
-          + " or \"" + Constants.FLOAT_TYPE_NAME + "\", but \""
+          + serdeConstants.TINYINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.SMALLINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.INT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.DOUBLE_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\", but \""
           + argumentOIs[0].getTypeName() + "\" was found.");
     }
     return resultText;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java Thu Nov  8 09:44:19 2012
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
@@ -56,7 +56,7 @@ public class GenericUDFIf extends Generi
     if (!conditionTypeIsOk) {
       throw new UDFArgumentTypeException(0,
           "The first argument of function IF should be \""
-          + Constants.BOOLEAN_TYPE_NAME + "\", but \""
+          + serdeConstants.BOOLEAN_TYPE_NAME + "\", but \""
           + arguments[0].getTypeName() + "\" is found");
     }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java Thu Nov  8 09:44:19 2012
@@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -70,10 +70,10 @@ public class GenericUDFPrintf extends Ge
           "The function PRINTF(String format, Obj... args) needs at least one arguments.");
     }
 
-    if (arguments[0].getTypeName() != Constants.STRING_TYPE_NAME
-      && arguments[0].getTypeName() != Constants.VOID_TYPE_NAME) {
+    if (arguments[0].getTypeName() != serdeConstants.STRING_TYPE_NAME
+      && arguments[0].getTypeName() != serdeConstants.VOID_TYPE_NAME) {
         throw new UDFArgumentTypeException(0, "Argument 1"
-        + " of function PRINTF must be \"" + Constants.STRING_TYPE_NAME
+        + " of function PRINTF must be \"" + serdeConstants.STRING_TYPE_NAME
         + "\", but \"" + arguments[0].getTypeName() + "\" was found.");
       }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java Thu Nov  8 09:44:19 2012
@@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -50,7 +50,7 @@ public class GenericUDFSize extends Gene
     Category category = arguments[0].getCategory();
     String typeName = arguments[0].getTypeName();
     if (category != Category.MAP && category != Category.LIST
-        && !typeName.equals(Constants.VOID_TYPE_NAME)) {
+        && !typeName.equals(serdeConstants.VOID_TYPE_NAME)) {
       throw new UDFArgumentTypeException(0, "\""
           + Category.MAP.toString().toLowerCase() + "\" or \""
           + Category.LIST.toString().toLowerCase()
@@ -69,7 +69,7 @@ public class GenericUDFSize extends Gene
       result.set(((MapObjectInspector) returnOI).getMapSize(data));
     } else if (returnOI.getCategory() == Category.LIST) {
       result.set(((ListObjectInspector) returnOI).getListLength(data));
-    } else if (returnOI.getTypeName().equals(Constants.VOID_TYPE_NAME)) {
+    } else if (returnOI.getTypeName().equals(serdeConstants.VOID_TYPE_NAME)) {
       // null
       result.set(-1);
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSortArray.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSortArray.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSortArray.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSortArray.java Thu Nov  8 09:44:19 2012
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -73,7 +73,7 @@ public class GenericUDFSortArray extends
         }
       default:
         throw new UDFArgumentTypeException(0, "Argument 1"
-          + " of function SORT_ARRAY must be " + Constants.LIST_TYPE_NAME
+          + " of function SORT_ARRAY must be " + serdeConstants.LIST_TYPE_NAME
           + "<" + Category.PRIMITIVE + ">, but " + arguments[0].getTypeName()
           + " was found.");
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java Thu Nov  8 09:44:19 2012
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.ql.udf.ge
 
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
 
@@ -42,9 +42,9 @@ public class GenericUDFWhen extends Gene
     returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver();
 
     for (int i = 0; i + 1 < arguments.length; i += 2) {
-      if (!arguments[i].getTypeName().equals(Constants.BOOLEAN_TYPE_NAME)) {
+      if (!arguments[i].getTypeName().equals(serdeConstants.BOOLEAN_TYPE_NAME)) {
         throw new UDFArgumentTypeException(i, "\""
-            + Constants.BOOLEAN_TYPE_NAME + "\" is expected after WHEN, "
+            + serdeConstants.BOOLEAN_TYPE_NAME + "\" is expected after WHEN, "
             + "but \"" + arguments[i].getTypeName() + "\" is found");
       }
       if (!returnOIResolver.update(arguments[i + 1])) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java Thu Nov  8 09:44:19 2012
@@ -28,7 +28,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
@@ -110,7 +110,7 @@ public class GenericUDTFJSONTuple extend
 
     for (int i = 0; i < args.length; ++i) {
       if (args[i].getCategory() != ObjectInspector.Category.PRIMITIVE ||
-          !args[i].getTypeName().equals(Constants.STRING_TYPE_NAME)) {
+          !args[i].getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
         throw new UDFArgumentException("json_tuple()'s arguments have to be string type");
       }
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java Thu Nov  8 09:44:19 2012
@@ -29,7 +29,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
@@ -91,7 +91,7 @@ public class GenericUDTFParseUrlTuple ex
 
     for (int i = 0; i < args.length; ++i) {
       if (args[i].getCategory() != ObjectInspector.Category.PRIMITIVE ||
-          !args[i].getTypeName().equals(Constants.STRING_TYPE_NAME)) {
+          !args[i].getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
         throw new UDFArgumentException("parse_url_tuple()'s arguments have to be string type");
       }
     }

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Thu Nov  8 09:44:19 2012
@@ -71,7 +71,7 @@ import org.apache.hadoop.hive.ql.parse.P
 import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer;
 import org.apache.hadoop.hive.serde2.thrift.test.Complex;
 import org.apache.hadoop.hive.shims.HadoopShims;
@@ -593,9 +593,9 @@ public class QTestUtil {
     srcThrift.setInputFormatClass(SequenceFileInputFormat.class.getName());
     srcThrift.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
     srcThrift.setSerializationLib(ThriftDeserializer.class.getName());
-    srcThrift.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class
+    srcThrift.setSerdeParam(serdeConstants.SERIALIZATION_CLASS, Complex.class
         .getName());
-    srcThrift.setSerdeParam(Constants.SERIALIZATION_FORMAT,
+    srcThrift.setSerdeParam(serdeConstants.SERIALIZATION_FORMAT,
         TBinaryProtocol.class.getName());
     db.createTable(srcThrift);
 

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Thu Nov  8 09:44:19 2012
@@ -48,7 +48,7 @@ import org.apache.hadoop.hive.ql.plan.Pl
 import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
 import org.apache.hadoop.hive.ql.plan.ScriptDesc;
 import org.apache.hadoop.hive.ql.plan.SelectDesc;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapred.TextInputFormat;
 
@@ -161,14 +161,14 @@ public class TestExecDriver extends Test
         column, "", false));
     ExprNodeDesc lhs = new ExprNodeGenericFuncDesc(
         TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
-        Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children1);
+        serdeConstants.DOUBLE_TYPE_NAME).getGenericUDF(), children1);
 
     ArrayList<ExprNodeDesc> children2 = new ArrayList<ExprNodeDesc>();
     children2.add(new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long
         .valueOf(100)));
     ExprNodeDesc rhs = new ExprNodeGenericFuncDesc(
         TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
-        Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children2);
+        serdeConstants.DOUBLE_TYPE_NAME).getGenericUDF(), children2);
 
     ArrayList<ExprNodeDesc> children3 = new ArrayList<ExprNodeDesc>();
     children3.add(lhs);

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java Thu Nov  8 09:44:19 2012
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -162,7 +162,7 @@ public class TestExpressionEvaluator ext
           false);
       ExprNodeDesc col11desc = getListIndexNode(col1desc, 1);
       ExprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
-          .getFuncExprNodeDesc(Constants.DOUBLE_TYPE_NAME, col11desc);
+          .getFuncExprNodeDesc(serdeConstants.DOUBLE_TYPE_NAME, col11desc);
       ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
 
       // evaluate on row

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java Thu Nov  8 09:44:19 2012
@@ -37,7 +37,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
@@ -380,12 +380,12 @@ public class TestRCFile extends TestCase
     Properties tbl = new Properties();
 
     // Set the configuration parameters
-    tbl.setProperty(Constants.SERIALIZATION_FORMAT, "9");
+    tbl.setProperty(serdeConstants.SERIALIZATION_FORMAT, "9");
     tbl.setProperty("columns",
         "abyte,ashort,aint,along,adouble,astring,anullint,anullstring");
     tbl.setProperty("columns.types",
         "tinyint:smallint:int:bigint:double:string:int:string");
-    tbl.setProperty(Constants.SERIALIZATION_NULL_FORMAT, "NULL");
+    tbl.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL");
     return tbl;
   }
 

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Thu Nov  8 09:44:19 2012
@@ -34,7 +34,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer;
 import org.apache.hadoop.hive.serde2.thrift.test.Complex;
@@ -94,9 +94,9 @@ public class TestHive extends TestCase {
       Table tbl = new Table(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
       List<FieldSchema> fields = tbl.getCols();
 
-      fields.add(new FieldSchema("col1", Constants.INT_TYPE_NAME, "int -- first column"));
-      fields.add(new FieldSchema("col2", Constants.STRING_TYPE_NAME, "string -- second column"));
-      fields.add(new FieldSchema("col3", Constants.DOUBLE_TYPE_NAME, "double -- thrift column"));
+      fields.add(new FieldSchema("col1", serdeConstants.INT_TYPE_NAME, "int -- first column"));
+      fields.add(new FieldSchema("col2", serdeConstants.STRING_TYPE_NAME, "string -- second column"));
+      fields.add(new FieldSchema("col3", serdeConstants.DOUBLE_TYPE_NAME, "double -- thrift column"));
       tbl.setFields(fields);
 
       tbl.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
@@ -117,7 +117,7 @@ public class TestHive extends TestCase {
       partCols
           .add(new FieldSchema(
           "ds",
-          Constants.STRING_TYPE_NAME,
+          serdeConstants.STRING_TYPE_NAME,
           "partition column, date but in string format as date type is not yet supported in QL"));
       tbl.setPartCols(partCols);
 
@@ -127,12 +127,12 @@ public class TestHive extends TestCase {
 
       // set output format parameters (these are not supported by QL but only
       // for demo purposes)
-      tbl.setSerdeParam(Constants.FIELD_DELIM, "1");
-      tbl.setSerdeParam(Constants.LINE_DELIM, "\n");
-      tbl.setSerdeParam(Constants.MAPKEY_DELIM, "3");
-      tbl.setSerdeParam(Constants.COLLECTION_DELIM, "2");
+      tbl.setSerdeParam(serdeConstants.FIELD_DELIM, "1");
+      tbl.setSerdeParam(serdeConstants.LINE_DELIM, "\n");
+      tbl.setSerdeParam(serdeConstants.MAPKEY_DELIM, "3");
+      tbl.setSerdeParam(serdeConstants.COLLECTION_DELIM, "2");
 
-      tbl.setSerdeParam(Constants.FIELD_DELIM, "1");
+      tbl.setSerdeParam(serdeConstants.FIELD_DELIM, "1");
       tbl.setSerializationLib(LazySimpleSerDe.class.getName());
 
       // create table
@@ -206,8 +206,8 @@ public class TestHive extends TestCase {
       tbl.setInputFormatClass(SequenceFileInputFormat.class.getName());
       tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
       tbl.setSerializationLib(ThriftDeserializer.class.getName());
-      tbl.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName());
-      tbl.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class
+      tbl.setSerdeParam(serdeConstants.SERIALIZATION_CLASS, Complex.class.getName());
+      tbl.setSerdeParam(serdeConstants.SERIALIZATION_FORMAT, TBinaryProtocol.class
           .getName());
       try {
         hm.createTable(tbl);
@@ -257,8 +257,8 @@ public class TestHive extends TestCase {
     tbl.setInputFormatClass(SequenceFileInputFormat.class.getName());
     tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
     tbl.setSerializationLib(ThriftDeserializer.class.getName());
-    tbl.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName());
-    tbl.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class
+    tbl.setSerdeParam(serdeConstants.SERIALIZATION_CLASS, Complex.class.getName());
+    tbl.setSerdeParam(serdeConstants.SERIALIZATION_FORMAT, TBinaryProtocol.class
         .getName());
     return tbl;
   }

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java Thu Nov  8 09:44:19 2012
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.thrift.TException;
 
@@ -63,8 +63,8 @@ public class TestHiveMetaStoreChecker ex
     checker = new HiveMetaStoreChecker(hive);
 
     partCols = new ArrayList<FieldSchema>();
-    partCols.add(new FieldSchema(partDateName, Constants.STRING_TYPE_NAME, ""));
-    partCols.add(new FieldSchema(partCityName, Constants.STRING_TYPE_NAME, ""));
+    partCols.add(new FieldSchema(partDateName, serdeConstants.STRING_TYPE_NAME, ""));
+    partCols.add(new FieldSchema(partCityName, serdeConstants.STRING_TYPE_NAME, ""));
 
     parts = new ArrayList<Map<String, String>>();
     Map<String, String> part1 = new HashMap<String, String>();

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java Thu Nov  8 09:44:19 2012
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -54,10 +54,10 @@ public class GenericUDFEvaluateNPE exten
             + "needs only one argument.");
     }
 
-    if (!arguments[0].getTypeName().equals(Constants.STRING_TYPE_NAME)) {
+    if (!arguments[0].getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
       throw new UDFArgumentTypeException(0,
         "Argument 1 of function evaluate_npe must be \""
-        + Constants.STRING_TYPE_NAME + "but \""
+        + serdeConstants.STRING_TYPE_NAME + "but \""
         + arguments[0].getTypeName() + "\" was found.");
     }
 

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java Thu Nov  8 09:44:19 2012
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
@@ -56,11 +56,11 @@ public class GenericUDFTestTranslate ext
     }
 
     for (int i = 0; i < 3; i++) {
-      if (arguments[i].getTypeName() != Constants.STRING_TYPE_NAME
-          && arguments[i].getTypeName() != Constants.VOID_TYPE_NAME) {
+      if (arguments[i].getTypeName() != serdeConstants.STRING_TYPE_NAME
+          && arguments[i].getTypeName() != serdeConstants.VOID_TYPE_NAME) {
         throw new UDFArgumentTypeException(i, "The " + getOrdinal(i + 1)
             + " argument of function TRANSLATE is expected to \""
-            + Constants.STRING_TYPE_NAME + "\", but \""
+            + serdeConstants.STRING_TYPE_NAME + "\", but \""
             + arguments[i].getTypeName() + "\" is found");
       }
     }

Modified: hive/trunk/ql/src/test/results/clientpositive/convert_enum_to_string.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/convert_enum_to_string.q.out?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/convert_enum_to_string.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/convert_enum_to_string.q.out Thu Nov  8 09:44:19 2012
@@ -32,13 +32,14 @@ my_binary	struct<hb:binary,offset:int,is
 my_string_string_map	map<string,string>	from deserializer
 my_string_enum_map	map<string,string>	from deserializer
 my_enum_string_map	map<string,string>	from deserializer
-my_enum_struct_map	map<string,struct<my_string:string,my_enum:string>>	from deserializer
+my_enum_struct_map	map<string,struct<my_string:string,my_enum:string,optionals:struct<>>>	from deserializer
 my_enum_stringlist_map	map<string,array<string>>	from deserializer
-my_enum_structlist_map	map<string,array<struct<my_string:string,my_enum:string>>>	from deserializer
+my_enum_structlist_map	map<string,array<struct<my_string:string,my_enum:string,optionals:struct<>>>>	from deserializer
 my_stringlist	array<string>	from deserializer
-my_structlist	array<struct<my_string:string,my_enum:string>>	from deserializer
+my_structlist	array<struct<my_string:string,my_enum:string,optionals:struct<>>>	from deserializer
 my_enumlist	array<string>	from deserializer
 my_stringset	struct<>	from deserializer
 my_enumset	struct<>	from deserializer
 my_structset	struct<>	from deserializer
+optionals	struct<>	from deserializer
 b	string	

Modified: hive/trunk/serde/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/serde/build.xml?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/serde/build.xml (original)
+++ hive/trunk/serde/build.xml Thu Nov  8 09:44:19 2012
@@ -66,6 +66,10 @@
     <exec executable="${thrift.home}/bin/thrift"  failonerror="true" dir=".">
       <arg line="--gen java:beans -o ${src.dir}/gen/thrift if/test/testthrift.thrift " />
     </exec>
+    <echo>Executing ${thrift.home}/bin/thrift to build megastruct.thrift classes... </echo>
+    <exec executable="${thrift.home}/bin/thrift"  failonerror="true" dir=".">
+      <arg line="--gen java:beans -o ${src.dir}/gen/thrift if/test/megastruct.thrift " />
+    </exec>
   </target>
 
   <target name="gen-testdata" depends="compile-test,test-jar">

Modified: hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.cpp?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.cpp (original)
+++ hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.cpp Thu Nov  8 09:44:19 2012
@@ -1,7 +1,8 @@
 /**
- * Autogenerated by Thrift Compiler (0.7.0)
+ * Autogenerated by Thrift Compiler (0.9.0)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
  */
 #include "serde_constants.h"
 

Modified: hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.h
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.h?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.h (original)
+++ hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.h Thu Nov  8 09:44:19 2012
@@ -1,7 +1,8 @@
 /**
- * Autogenerated by Thrift Compiler (0.7.0)
+ * Autogenerated by Thrift Compiler (0.9.0)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
  */
 #ifndef serde_CONSTANTS_H
 #define serde_CONSTANTS_H

Modified: hive/trunk/serde/src/gen/thrift/gen-cpp/serde_types.cpp
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-cpp/serde_types.cpp?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/serde/src/gen/thrift/gen-cpp/serde_types.cpp (original)
+++ hive/trunk/serde/src/gen/thrift/gen-cpp/serde_types.cpp Thu Nov  8 09:44:19 2012
@@ -1,10 +1,13 @@
 /**
- * Autogenerated by Thrift Compiler (0.7.0)
+ * Autogenerated by Thrift Compiler (0.9.0)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
  */
 #include "serde_types.h"
 
+#include <algorithm>
+
 namespace Hive {
 
 } // namespace

Modified: hive/trunk/serde/src/gen/thrift/gen-cpp/serde_types.h
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-cpp/serde_types.h?rev=1406984&r1=1406983&r2=1406984&view=diff
==============================================================================
--- hive/trunk/serde/src/gen/thrift/gen-cpp/serde_types.h (original)
+++ hive/trunk/serde/src/gen/thrift/gen-cpp/serde_types.h Thu Nov  8 09:44:19 2012
@@ -1,15 +1,16 @@
 /**
- * Autogenerated by Thrift Compiler (0.7.0)
+ * Autogenerated by Thrift Compiler (0.9.0)
  *
  * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
  */
 #ifndef serde_TYPES_H
 #define serde_TYPES_H
 
-#include <Thrift.h>
-#include <TApplicationException.h>
-#include <protocol/TProtocol.h>
-#include <transport/TTransport.h>
+#include <thrift/Thrift.h>
+#include <thrift/TApplicationException.h>
+#include <thrift/protocol/TProtocol.h>
+#include <thrift/transport/TTransport.h>
 
 
 

Added: hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java?rev=1406984&view=auto
==============================================================================
--- hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java (added)
+++ hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java Thu Nov  8 09:44:19 2012
@@ -0,0 +1,125 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.0)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.hadoop.hive.serde;
+
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class serdeConstants {
+
+  public static final String SERIALIZATION_LIB = "serialization.lib";
+
+  public static final String SERIALIZATION_CLASS = "serialization.class";
+
+  public static final String SERIALIZATION_FORMAT = "serialization.format";
+
+  public static final String SERIALIZATION_DDL = "serialization.ddl";
+
+  public static final String SERIALIZATION_NULL_FORMAT = "serialization.null.format";
+
+  public static final String SERIALIZATION_LAST_COLUMN_TAKES_REST = "serialization.last.column.takes.rest";
+
+  public static final String SERIALIZATION_SORT_ORDER = "serialization.sort.order";
+
+  public static final String SERIALIZATION_USE_JSON_OBJECTS = "serialization.use.json.object";
+
+  public static final String FIELD_DELIM = "field.delim";
+
+  public static final String COLLECTION_DELIM = "colelction.delim";
+
+  public static final String LINE_DELIM = "line.delim";
+
+  public static final String MAPKEY_DELIM = "mapkey.delim";
+
+  public static final String QUOTE_CHAR = "quote.delim";
+
+  public static final String ESCAPE_CHAR = "escape.delim";
+
+  public static final String VOID_TYPE_NAME = "void";
+
+  public static final String BOOLEAN_TYPE_NAME = "boolean";
+
+  public static final String TINYINT_TYPE_NAME = "tinyint";
+
+  public static final String SMALLINT_TYPE_NAME = "smallint";
+
+  public static final String INT_TYPE_NAME = "int";
+
+  public static final String BIGINT_TYPE_NAME = "bigint";
+
+  public static final String FLOAT_TYPE_NAME = "float";
+
+  public static final String DOUBLE_TYPE_NAME = "double";
+
+  public static final String STRING_TYPE_NAME = "string";
+
+  public static final String DATE_TYPE_NAME = "date";
+
+  public static final String DATETIME_TYPE_NAME = "datetime";
+
+  public static final String TIMESTAMP_TYPE_NAME = "timestamp";
+
+  public static final String BINARY_TYPE_NAME = "binary";
+
+  public static final String LIST_TYPE_NAME = "array";
+
+  public static final String MAP_TYPE_NAME = "map";
+
+  public static final String STRUCT_TYPE_NAME = "struct";
+
+  public static final String UNION_TYPE_NAME = "uniontype";
+
+  public static final String LIST_COLUMNS = "columns";
+
+  public static final String LIST_COLUMN_TYPES = "columns.types";
+
+  public static final Set<String> PrimitiveTypes = new HashSet<String>();
+  static {
+    PrimitiveTypes.add("void");
+    PrimitiveTypes.add("boolean");
+    PrimitiveTypes.add("tinyint");
+    PrimitiveTypes.add("smallint");
+    PrimitiveTypes.add("int");
+    PrimitiveTypes.add("bigint");
+    PrimitiveTypes.add("float");
+    PrimitiveTypes.add("double");
+    PrimitiveTypes.add("string");
+    PrimitiveTypes.add("date");
+    PrimitiveTypes.add("datetime");
+    PrimitiveTypes.add("timestamp");
+    PrimitiveTypes.add("binary");
+  }
+
+  public static final Set<String> CollectionTypes = new HashSet<String>();
+  static {
+    CollectionTypes.add("array");
+    CollectionTypes.add("map");
+  }
+
+}