You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by th...@apache.org on 2013/09/17 22:02:23 UTC

svn commit: r1524198 [4/5] - in /hive/branches/branch-0.12: common/src/java/org/apache/hadoop/hive/common/type/ common/src/test/org/apache/hadoop/hive/common/type/ data/files/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hi...

Added: hive/branches/branch-0.12/ql/src/test/results/clientpositive/varchar_union1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/test/results/clientpositive/varchar_union1.q.out?rev=1524198&view=auto
==============================================================================
--- hive/branches/branch-0.12/ql/src/test/results/clientpositive/varchar_union1.q.out (added)
+++ hive/branches/branch-0.12/ql/src/test/results/clientpositive/varchar_union1.q.out Tue Sep 17 20:02:21 2013
@@ -0,0 +1,157 @@
+PREHOOK: query: drop table varchar_union1_vc1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table varchar_union1_vc1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table varchar_union1_vc2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table varchar_union1_vc2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table varchar_union1_str
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table varchar_union1_str
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table  varchar_union1_vc1 (
+  c1 int,
+  c2 varchar(10)
+)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table  varchar_union1_vc1 (
+  c1 int,
+  c2 varchar(10)
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@varchar_union1_vc1
+PREHOOK: query: create table  varchar_union1_vc2 (
+  c1 int,
+  c2 varchar(20)
+)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table  varchar_union1_vc2 (
+  c1 int,
+  c2 varchar(20)
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@varchar_union1_vc2
+PREHOOK: query: create table  varchar_union1_str (
+  c1 int,
+  c2 string
+)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table  varchar_union1_str (
+  c1 int,
+  c2 string
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@varchar_union1_str
+PREHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_vc1
+PREHOOK: type: LOAD
+PREHOOK: Output: default@varchar_union1_vc1
+POSTHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_vc1
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@varchar_union1_vc1
+PREHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_vc2
+PREHOOK: type: LOAD
+PREHOOK: Output: default@varchar_union1_vc2
+POSTHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_vc2
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@varchar_union1_vc2
+PREHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_str
+PREHOOK: type: LOAD
+PREHOOK: Output: default@varchar_union1_str
+POSTHOOK: query: load data local inpath '../data/files/vc1.txt' into table varchar_union1_str
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@varchar_union1_str
+PREHOOK: query: -- union varchar with same length varchar
+select * from (
+  select * from varchar_union1_vc1
+  union all
+  select * from varchar_union1_vc1 limit 1
+) q1 sort by c1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_union1_vc1
+#### A masked pattern was here ####
+POSTHOOK: query: -- union varchar with same length varchar
+select * from (
+  select * from varchar_union1_vc1
+  union all
+  select * from varchar_union1_vc1 limit 1
+) q1 sort by c1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_union1_vc1
+#### A masked pattern was here ####
+1	abc
+1	abc
+2	abc 
+3	 abc
+PREHOOK: query: -- union varchar with different length varchar
+select * from (
+  select * from varchar_union1_vc1
+  union all
+  select * from varchar_union1_vc2 limit 1
+) q1 sort by c1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_union1_vc1
+PREHOOK: Input: default@varchar_union1_vc2
+#### A masked pattern was here ####
+POSTHOOK: query: -- union varchar with different length varchar
+select * from (
+  select * from varchar_union1_vc1
+  union all
+  select * from varchar_union1_vc2 limit 1
+) q1 sort by c1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_union1_vc1
+POSTHOOK: Input: default@varchar_union1_vc2
+#### A masked pattern was here ####
+1	abc
+1	abc
+2	abc 
+3	 abc
+PREHOOK: query: -- union varchar with string
+select * from (
+  select * from varchar_union1_vc1
+  union all
+  select * from varchar_union1_str limit 1
+) q1 sort by c1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_union1_str
+PREHOOK: Input: default@varchar_union1_vc1
+#### A masked pattern was here ####
+POSTHOOK: query: -- union varchar with string
+select * from (
+  select * from varchar_union1_vc1
+  union all
+  select * from varchar_union1_str limit 1
+) q1 sort by c1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_union1_str
+POSTHOOK: Input: default@varchar_union1_vc1
+#### A masked pattern was here ####
+1	abc
+1	abc
+2	abc 
+3	 abc
+PREHOOK: query: drop table varchar_union1_vc1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@varchar_union1_vc1
+PREHOOK: Output: default@varchar_union1_vc1
+POSTHOOK: query: drop table varchar_union1_vc1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@varchar_union1_vc1
+POSTHOOK: Output: default@varchar_union1_vc1
+PREHOOK: query: drop table varchar_union1_vc2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@varchar_union1_vc2
+PREHOOK: Output: default@varchar_union1_vc2
+POSTHOOK: query: drop table varchar_union1_vc2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@varchar_union1_vc2
+POSTHOOK: Output: default@varchar_union1_vc2
+PREHOOK: query: drop table varchar_union1_str
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@varchar_union1_str
+PREHOOK: Output: default@varchar_union1_str
+POSTHOOK: query: drop table varchar_union1_str
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@varchar_union1_str
+POSTHOOK: Output: default@varchar_union1_str

Modified: hive/branches/branch-0.12/serde/if/serde.thrift
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/if/serde.thrift?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/if/serde.thrift (original)
+++ hive/branches/branch-0.12/serde/if/serde.thrift Tue Sep 17 20:02:21 2013
@@ -50,6 +50,8 @@ const string BIGINT_TYPE_NAME    = "bigi
 const string FLOAT_TYPE_NAME     = "float";
 const string DOUBLE_TYPE_NAME    = "double";
 const string STRING_TYPE_NAME    = "string";
+const string CHAR_TYPE_NAME      = "char";
+const string VARCHAR_TYPE_NAME   = "varchar";
 const string DATE_TYPE_NAME      = "date";
 const string DATETIME_TYPE_NAME  = "datetime";
 const string TIMESTAMP_TYPE_NAME = "timestamp";
@@ -64,7 +66,7 @@ const string UNION_TYPE_NAME  = "unionty
 const string LIST_COLUMNS = "columns";
 const string LIST_COLUMN_TYPES = "columns.types";
 
-const set<string> PrimitiveTypes  = [ VOID_TYPE_NAME BOOLEAN_TYPE_NAME TINYINT_TYPE_NAME SMALLINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME  DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME DECIMAL_TYPE_NAME BINARY_TYPE_NAME],
+const set<string> PrimitiveTypes  = [ VOID_TYPE_NAME BOOLEAN_TYPE_NAME TINYINT_TYPE_NAME SMALLINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME  VARCHAR_TYPE_NAME CHAR_TYPE_NAME DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME DECIMAL_TYPE_NAME BINARY_TYPE_NAME],
 const set<string> CollectionTypes = [ LIST_TYPE_NAME MAP_TYPE_NAME ],
 
 

Modified: hive/branches/branch-0.12/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/gen/thrift/gen-cpp/serde_constants.cpp?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/gen/thrift/gen-cpp/serde_constants.cpp (original)
+++ hive/branches/branch-0.12/serde/src/gen/thrift/gen-cpp/serde_constants.cpp Tue Sep 17 20:02:21 2013
@@ -57,6 +57,10 @@ serdeConstants::serdeConstants() {
 
   STRING_TYPE_NAME = "string";
 
+  CHAR_TYPE_NAME = "char";
+
+  VARCHAR_TYPE_NAME = "varchar";
+
   DATE_TYPE_NAME = "date";
 
   DATETIME_TYPE_NAME = "datetime";
@@ -88,6 +92,8 @@ serdeConstants::serdeConstants() {
   PrimitiveTypes.insert("float");
   PrimitiveTypes.insert("double");
   PrimitiveTypes.insert("string");
+  PrimitiveTypes.insert("varchar");
+  PrimitiveTypes.insert("char");
   PrimitiveTypes.insert("date");
   PrimitiveTypes.insert("datetime");
   PrimitiveTypes.insert("timestamp");

Modified: hive/branches/branch-0.12/serde/src/gen/thrift/gen-cpp/serde_constants.h
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/gen/thrift/gen-cpp/serde_constants.h?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/gen/thrift/gen-cpp/serde_constants.h (original)
+++ hive/branches/branch-0.12/serde/src/gen/thrift/gen-cpp/serde_constants.h Tue Sep 17 20:02:21 2013
@@ -38,6 +38,8 @@ class serdeConstants {
   std::string FLOAT_TYPE_NAME;
   std::string DOUBLE_TYPE_NAME;
   std::string STRING_TYPE_NAME;
+  std::string CHAR_TYPE_NAME;
+  std::string VARCHAR_TYPE_NAME;
   std::string DATE_TYPE_NAME;
   std::string DATETIME_TYPE_NAME;
   std::string TIMESTAMP_TYPE_NAME;

Modified: hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java (original)
+++ hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java Tue Sep 17 20:02:21 2013
@@ -79,6 +79,10 @@ public class serdeConstants {
 
   public static final String STRING_TYPE_NAME = "string";
 
+  public static final String CHAR_TYPE_NAME = "char";
+
+  public static final String VARCHAR_TYPE_NAME = "varchar";
+
   public static final String DATE_TYPE_NAME = "date";
 
   public static final String DATETIME_TYPE_NAME = "datetime";
@@ -112,6 +116,8 @@ public class serdeConstants {
     PrimitiveTypes.add("float");
     PrimitiveTypes.add("double");
     PrimitiveTypes.add("string");
+    PrimitiveTypes.add("varchar");
+    PrimitiveTypes.add("char");
     PrimitiveTypes.add("date");
     PrimitiveTypes.add("datetime");
     PrimitiveTypes.add("timestamp");

Modified: hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java (original)
+++ hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java Tue Sep 17 20:02:21 2013
@@ -528,7 +528,7 @@ public class ThriftTestObj implements or
                 struct.field3 = new ArrayList<InnerStruct>(_list0.size);
                 for (int _i1 = 0; _i1 < _list0.size; ++_i1)
                 {
-                  InnerStruct _elem2; // required
+                  InnerStruct _elem2; // optional
                   _elem2 = new InnerStruct();
                   _elem2.read(iprot);
                   struct.field3.add(_elem2);
@@ -636,7 +636,7 @@ public class ThriftTestObj implements or
           struct.field3 = new ArrayList<InnerStruct>(_list5.size);
           for (int _i6 = 0; _i6 < _list5.size; ++_i6)
           {
-            InnerStruct _elem7; // required
+            InnerStruct _elem7; // optional
             _elem7 = new InnerStruct();
             _elem7.read(iprot);
             struct.field3.add(_elem7);

Modified: hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java (original)
+++ hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java Tue Sep 17 20:02:21 2013
@@ -836,7 +836,7 @@ public class Complex implements org.apac
                 struct.lint = new ArrayList<Integer>(_list0.size);
                 for (int _i1 = 0; _i1 < _list0.size; ++_i1)
                 {
-                  int _elem2; // required
+                  int _elem2; // optional
                   _elem2 = iprot.readI32();
                   struct.lint.add(_elem2);
                 }
@@ -854,7 +854,7 @@ public class Complex implements org.apac
                 struct.lString = new ArrayList<String>(_list3.size);
                 for (int _i4 = 0; _i4 < _list3.size; ++_i4)
                 {
-                  String _elem5; // required
+                  String _elem5; // optional
                   _elem5 = iprot.readString();
                   struct.lString.add(_elem5);
                 }
@@ -872,7 +872,7 @@ public class Complex implements org.apac
                 struct.lintString = new ArrayList<IntString>(_list6.size);
                 for (int _i7 = 0; _i7 < _list6.size; ++_i7)
                 {
-                  IntString _elem8; // required
+                  IntString _elem8; // optional
                   _elem8 = new IntString();
                   _elem8.read(iprot);
                   struct.lintString.add(_elem8);
@@ -1074,7 +1074,7 @@ public class Complex implements org.apac
           struct.lint = new ArrayList<Integer>(_list21.size);
           for (int _i22 = 0; _i22 < _list21.size; ++_i22)
           {
-            int _elem23; // required
+            int _elem23; // optional
             _elem23 = iprot.readI32();
             struct.lint.add(_elem23);
           }
@@ -1087,7 +1087,7 @@ public class Complex implements org.apac
           struct.lString = new ArrayList<String>(_list24.size);
           for (int _i25 = 0; _i25 < _list24.size; ++_i25)
           {
-            String _elem26; // required
+            String _elem26; // optional
             _elem26 = iprot.readString();
             struct.lString.add(_elem26);
           }
@@ -1100,7 +1100,7 @@ public class Complex implements org.apac
           struct.lintString = new ArrayList<IntString>(_list27.size);
           for (int _i28 = 0; _i28 < _list27.size; ++_i28)
           {
-            IntString _elem29; // required
+            IntString _elem29; // optional
             _elem29 = new IntString();
             _elem29.read(iprot);
             struct.lintString.add(_elem29);

Modified: hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java (original)
+++ hive/branches/branch-0.12/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java Tue Sep 17 20:02:21 2013
@@ -2280,7 +2280,7 @@ public class MegaStruct implements org.a
                     _val19 = new ArrayList<String>(_list20.size);
                     for (int _i21 = 0; _i21 < _list20.size; ++_i21)
                     {
-                      String _elem22; // required
+                      String _elem22; // optional
                       _elem22 = iprot.readString();
                       _val19.add(_elem22);
                     }
@@ -2310,7 +2310,7 @@ public class MegaStruct implements org.a
                     _val26 = new ArrayList<MiniStruct>(_list27.size);
                     for (int _i28 = 0; _i28 < _list27.size; ++_i28)
                     {
-                      MiniStruct _elem29; // required
+                      MiniStruct _elem29; // optional
                       _elem29 = new MiniStruct();
                       _elem29.read(iprot);
                       _val26.add(_elem29);
@@ -2333,7 +2333,7 @@ public class MegaStruct implements org.a
                 struct.my_stringlist = new ArrayList<String>(_list30.size);
                 for (int _i31 = 0; _i31 < _list30.size; ++_i31)
                 {
-                  String _elem32; // required
+                  String _elem32; // optional
                   _elem32 = iprot.readString();
                   struct.my_stringlist.add(_elem32);
                 }
@@ -2351,7 +2351,7 @@ public class MegaStruct implements org.a
                 struct.my_structlist = new ArrayList<MiniStruct>(_list33.size);
                 for (int _i34 = 0; _i34 < _list33.size; ++_i34)
                 {
-                  MiniStruct _elem35; // required
+                  MiniStruct _elem35; // optional
                   _elem35 = new MiniStruct();
                   _elem35.read(iprot);
                   struct.my_structlist.add(_elem35);
@@ -2370,7 +2370,7 @@ public class MegaStruct implements org.a
                 struct.my_enumlist = new ArrayList<MyEnum>(_list36.size);
                 for (int _i37 = 0; _i37 < _list36.size; ++_i37)
                 {
-                  MyEnum _elem38; // required
+                  MyEnum _elem38; // optional
                   _elem38 = MyEnum.findByValue(iprot.readI32());
                   struct.my_enumlist.add(_elem38);
                 }
@@ -2388,7 +2388,7 @@ public class MegaStruct implements org.a
                 struct.my_stringset = new HashSet<String>(2*_set39.size);
                 for (int _i40 = 0; _i40 < _set39.size; ++_i40)
                 {
-                  String _elem41; // required
+                  String _elem41; // optional
                   _elem41 = iprot.readString();
                   struct.my_stringset.add(_elem41);
                 }
@@ -2406,7 +2406,7 @@ public class MegaStruct implements org.a
                 struct.my_enumset = new HashSet<MyEnum>(2*_set42.size);
                 for (int _i43 = 0; _i43 < _set42.size; ++_i43)
                 {
-                  MyEnum _elem44; // required
+                  MyEnum _elem44; // optional
                   _elem44 = MyEnum.findByValue(iprot.readI32());
                   struct.my_enumset.add(_elem44);
                 }
@@ -2424,7 +2424,7 @@ public class MegaStruct implements org.a
                 struct.my_structset = new HashSet<MiniStruct>(2*_set45.size);
                 for (int _i46 = 0; _i46 < _set45.size; ++_i46)
                 {
-                  MiniStruct _elem47; // required
+                  MiniStruct _elem47; // optional
                   _elem47 = new MiniStruct();
                   _elem47.read(iprot);
                   struct.my_structset.add(_elem47);
@@ -3023,7 +3023,7 @@ public class MegaStruct implements org.a
               _val95 = new ArrayList<String>(_list96.size);
               for (int _i97 = 0; _i97 < _list96.size; ++_i97)
               {
-                String _elem98; // required
+                String _elem98; // optional
                 _elem98 = iprot.readString();
                 _val95.add(_elem98);
               }
@@ -3047,7 +3047,7 @@ public class MegaStruct implements org.a
               _val102 = new ArrayList<MiniStruct>(_list103.size);
               for (int _i104 = 0; _i104 < _list103.size; ++_i104)
               {
-                MiniStruct _elem105; // required
+                MiniStruct _elem105; // optional
                 _elem105 = new MiniStruct();
                 _elem105.read(iprot);
                 _val102.add(_elem105);
@@ -3064,7 +3064,7 @@ public class MegaStruct implements org.a
           struct.my_stringlist = new ArrayList<String>(_list106.size);
           for (int _i107 = 0; _i107 < _list106.size; ++_i107)
           {
-            String _elem108; // required
+            String _elem108; // optional
             _elem108 = iprot.readString();
             struct.my_stringlist.add(_elem108);
           }
@@ -3077,7 +3077,7 @@ public class MegaStruct implements org.a
           struct.my_structlist = new ArrayList<MiniStruct>(_list109.size);
           for (int _i110 = 0; _i110 < _list109.size; ++_i110)
           {
-            MiniStruct _elem111; // required
+            MiniStruct _elem111; // optional
             _elem111 = new MiniStruct();
             _elem111.read(iprot);
             struct.my_structlist.add(_elem111);
@@ -3091,7 +3091,7 @@ public class MegaStruct implements org.a
           struct.my_enumlist = new ArrayList<MyEnum>(_list112.size);
           for (int _i113 = 0; _i113 < _list112.size; ++_i113)
           {
-            MyEnum _elem114; // required
+            MyEnum _elem114; // optional
             _elem114 = MyEnum.findByValue(iprot.readI32());
             struct.my_enumlist.add(_elem114);
           }
@@ -3104,7 +3104,7 @@ public class MegaStruct implements org.a
           struct.my_stringset = new HashSet<String>(2*_set115.size);
           for (int _i116 = 0; _i116 < _set115.size; ++_i116)
           {
-            String _elem117; // required
+            String _elem117; // optional
             _elem117 = iprot.readString();
             struct.my_stringset.add(_elem117);
           }
@@ -3117,7 +3117,7 @@ public class MegaStruct implements org.a
           struct.my_enumset = new HashSet<MyEnum>(2*_set118.size);
           for (int _i119 = 0; _i119 < _set118.size; ++_i119)
           {
-            MyEnum _elem120; // required
+            MyEnum _elem120; // optional
             _elem120 = MyEnum.findByValue(iprot.readI32());
             struct.my_enumset.add(_elem120);
           }
@@ -3130,7 +3130,7 @@ public class MegaStruct implements org.a
           struct.my_structset = new HashSet<MiniStruct>(2*_set121.size);
           for (int _i122 = 0; _i122 < _set121.size; ++_i122)
           {
-            MiniStruct _elem123; // required
+            MiniStruct _elem123; // optional
             _elem123 = new MiniStruct();
             _elem123.read(iprot);
             struct.my_structset.add(_elem123);

Modified: hive/branches/branch-0.12/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php (original)
+++ hive/branches/branch-0.12/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php Tue Sep 17 20:02:21 2013
@@ -62,6 +62,10 @@ $GLOBALS['serde_CONSTANTS']['DOUBLE_TYPE
 
 $GLOBALS['serde_CONSTANTS']['STRING_TYPE_NAME'] = "string";
 
+$GLOBALS['serde_CONSTANTS']['CHAR_TYPE_NAME'] = "char";
+
+$GLOBALS['serde_CONSTANTS']['VARCHAR_TYPE_NAME'] = "varchar";
+
 $GLOBALS['serde_CONSTANTS']['DATE_TYPE_NAME'] = "date";
 
 $GLOBALS['serde_CONSTANTS']['DATETIME_TYPE_NAME'] = "datetime";
@@ -94,6 +98,8 @@ $GLOBALS['serde_CONSTANTS']['PrimitiveTy
   "float" => true,
   "double" => true,
   "string" => true,
+  "varchar" => true,
+  "char" => true,
   "date" => true,
   "datetime" => true,
   "timestamp" => true,

Modified: hive/branches/branch-0.12/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py (original)
+++ hive/branches/branch-0.12/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py Tue Sep 17 20:02:21 2013
@@ -32,6 +32,8 @@ BIGINT_TYPE_NAME = "bigint"
 FLOAT_TYPE_NAME = "float"
 DOUBLE_TYPE_NAME = "double"
 STRING_TYPE_NAME = "string"
+CHAR_TYPE_NAME = "char"
+VARCHAR_TYPE_NAME = "varchar"
 DATE_TYPE_NAME = "date"
 DATETIME_TYPE_NAME = "datetime"
 TIMESTAMP_TYPE_NAME = "timestamp"
@@ -53,6 +55,8 @@ PrimitiveTypes = set([
   "float",
   "double",
   "string",
+  "varchar",
+  "char",
   "date",
   "datetime",
   "timestamp",

Modified: hive/branches/branch-0.12/serde/src/gen/thrift/gen-rb/serde_constants.rb
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/gen/thrift/gen-rb/serde_constants.rb?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/gen/thrift/gen-rb/serde_constants.rb (original)
+++ hive/branches/branch-0.12/serde/src/gen/thrift/gen-rb/serde_constants.rb Tue Sep 17 20:02:21 2013
@@ -53,6 +53,10 @@ DOUBLE_TYPE_NAME = %q"double"
 
 STRING_TYPE_NAME = %q"string"
 
+CHAR_TYPE_NAME = %q"char"
+
+VARCHAR_TYPE_NAME = %q"varchar"
+
 DATE_TYPE_NAME = %q"date"
 
 DATETIME_TYPE_NAME = %q"datetime"
@@ -85,6 +89,8 @@ PrimitiveTypes = Set.new([
   %q"float",
   %q"double",
   %q"string",
+  %q"varchar",
+  %q"char",
   %q"date",
   %q"datetime",
   %q"timestamp",

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java Tue Sep 17 20:02:21 2013
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
@@ -283,6 +284,13 @@ public final class SerDeUtils {
           sb.append('"');
           break;
         }
+        case VARCHAR: {
+          sb.append('"');
+          sb.append(escapeString(((HiveVarcharObjectInspector) poi)
+              .getPrimitiveJavaObject(o).toString()));
+          sb.append('"');
+          break;
+        }
         case DATE: {
           sb.append('"');
           sb.append(((DateObjectInspector) poi)

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java Tue Sep 17 20:02:21 2013
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -91,8 +92,7 @@ class AvroObjectInspectorGenerator {
     switch(ti.getCategory()) {
       case PRIMITIVE:
         PrimitiveTypeInfo pti = (PrimitiveTypeInfo)ti;
-        result = PrimitiveObjectInspectorFactory
-                .getPrimitiveJavaObjectInspector(pti.getPrimitiveCategory());
+        result = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti);
         break;
       case STRUCT:
         StructTypeInfo sti = (StructTypeInfo)ti;

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java Tue Sep 17 20:02:21 2013
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.serde2.bi
 
 import java.io.IOException;
 import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.nio.charset.CharacterCodingException;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -31,6 +33,7 @@ import java.util.Properties;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
@@ -39,6 +42,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -56,14 +60,18 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils.HiveVarcharSerDeHelper;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -276,48 +284,18 @@ public class BinarySortableSerDe extends
       }
       case STRING: {
         Text r = reuse == null ? new Text() : (Text) reuse;
-        // Get the actual length first
-        int start = buffer.tell();
-        int length = 0;
-        do {
-          byte b = buffer.read(invert);
-          if (b == 0) {
-            // end of string
-            break;
-          }
-          if (b == 1) {
-            // the last char is an escape char. read the actual char
-            buffer.read(invert);
-          }
-          length++;
-        } while (true);
+        return deserializeText(buffer, invert, r);
+      }
 
-        if (length == buffer.tell() - start) {
-          // No escaping happened, so we are already done.
-          r.set(buffer.getData(), start, length);
-        } else {
-          // Escaping happened, we need to copy byte-by-byte.
-          // 1. Set the length first.
-          r.set(buffer.getData(), start, length);
-          // 2. Reset the pointer.
-          buffer.seek(start);
-          // 3. Copy the data.
-          byte[] rdata = r.getBytes();
-          for (int i = 0; i < length; i++) {
-            byte b = buffer.read(invert);
-            if (b == 1) {
-              // The last char is an escape char, read the actual char.
-              // The serialization format escape \0 to \1, and \1 to \2,
-              // to make sure the string is null-terminated.
-              b = (byte) (buffer.read(invert) - 1);
-            }
-            rdata[i] = b;
-          }
-          // 4. Read the null terminator.
-          byte b = buffer.read(invert);
-          assert (b == 0);
-        }
-        return r;
+      case VARCHAR: {
+        HiveVarcharWritable r =
+            reuse == null ? new HiveVarcharWritable() : (HiveVarcharWritable) reuse;
+            // Use HiveVarchar's internal Text member to read the value.
+            deserializeText(buffer, invert, r.getTextValue());
+            // If we cache helper data for deserialization we could avoid having
+            // to call getVarcharMaxLength() on every deserialize call.
+            r.enforceMaxLength(getVarcharMaxLength(type));
+            return r;
       }
 
       case BINARY: {
@@ -552,6 +530,60 @@ public class BinarySortableSerDe extends
     return v;
   }
 
+  static int getVarcharMaxLength(TypeInfo type) {
+    VarcharTypeParams typeParams = (VarcharTypeParams) ((PrimitiveTypeInfo) type).getTypeParams();
+    if (typeParams != null ) {
+      return typeParams.length;
+    }
+    return -1;
+  }
+
+  static Text deserializeText(InputByteBuffer buffer, boolean invert, Text r)
+      throws IOException {
+    // Get the actual length first
+    int start = buffer.tell();
+    int length = 0;
+    do {
+      byte b = buffer.read(invert);
+      if (b == 0) {
+        // end of string
+        break;
+      }
+      if (b == 1) {
+        // the last char is an escape char. read the actual char
+        buffer.read(invert);
+      }
+      length++;
+    } while (true);
+
+    if (length == buffer.tell() - start) {
+      // No escaping happened, so we are already done.
+      r.set(buffer.getData(), start, length);
+    } else {
+      // Escaping happened, we need to copy byte-by-byte.
+      // 1. Set the length first.
+      r.set(buffer.getData(), start, length);
+      // 2. Reset the pointer.
+      buffer.seek(start);
+      // 3. Copy the data.
+      byte[] rdata = r.getBytes();
+      for (int i = 0; i < length; i++) {
+        byte b = buffer.read(invert);
+        if (b == 1) {
+          // The last char is an escape char, read the actual char.
+          // The serialization format escape \0 to \1, and \1 to \2,
+          // to make sure the string is null-terminated.
+          b = (byte) (buffer.read(invert) - 1);
+        }
+        rdata[i] = b;
+      }
+      // 4. Read the null terminator.
+      byte b = buffer.read(invert);
+      assert (b == 0);
+    }
+    return r;
+  }
+
   BytesWritable serializeBytesWritable = new BytesWritable();
   OutputByteBuffer outputByteBuffer = new OutputByteBuffer();
 
@@ -572,7 +604,7 @@ public class BinarySortableSerDe extends
   }
 
   static void serialize(OutputByteBuffer buffer, Object o, ObjectInspector oi,
-      boolean invert) {
+      boolean invert) throws SerDeException {
     // Is this field a null?
     if (o == null) {
       buffer.write((byte) 0, invert);
@@ -668,6 +700,18 @@ public class BinarySortableSerDe extends
         return;
           }
 
+      case VARCHAR: {
+        HiveVarcharObjectInspector hcoi = (HiveVarcharObjectInspector)poi;
+        HiveVarcharWritable hc = hcoi.getPrimitiveWritableObject(o);
+        try {
+          ByteBuffer bb = Text.encode(hc.getHiveVarchar().getValue());
+          serializeBytes(buffer, bb.array(), bb.limit(), invert);
+        } catch (CharacterCodingException err) {
+          throw new SerDeException(err);
+        }
+        return;
+      }
+
       case BINARY: {
         BinaryObjectInspector baoi = (BinaryObjectInspector) poi;
         BytesWritable ba = baoi.getPrimitiveWritableObject(o);

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/io/DoubleWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/io/DoubleWritable.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/io/DoubleWritable.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/io/DoubleWritable.java Tue Sep 17 20:02:21 2013
@@ -17,7 +17,7 @@
  */
 
 /**
- * This file is back-ported from hadoop-0.19, to make sure hive can run 
+ * This file is back-ported from hadoop-0.19, to make sure hive can run
  * with hadoop-0.17.
  */
 package org.apache.hadoop.hive.serde2.io;

Added: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java?rev=1524198&view=auto
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java (added)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java Tue Sep 17 20:02:21 2013
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.io;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.common.type.HiveBaseChar;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+
+public class HiveVarcharWritable implements WritableComparable<HiveVarcharWritable>{
+  protected Text value = new Text();
+  transient protected int characterLength = -1;
+
+  public HiveVarcharWritable() {
+  }
+
+  public HiveVarcharWritable(HiveVarchar hc) {
+    set(hc);
+  }
+
+  public HiveVarcharWritable(HiveVarcharWritable hcw) {
+    set(hcw);
+  }
+
+  public void set(HiveVarchar val) {
+    set(val.getValue());
+  }
+
+  public void set(String val) {
+    set(val, -1);  // copy entire string value
+  }
+
+  public void set(HiveVarcharWritable val) {
+    value.set(val.value);
+    characterLength = val.characterLength;
+  }
+
+  public void set(HiveVarcharWritable val, int maxLength) {
+    if (val.characterLength > 0 && val.characterLength >= maxLength) {
+      set(val);
+    } else {
+      set(val.getHiveVarchar(), maxLength);
+    }
+  }
+
+  public void set(HiveVarchar val, int len) {
+    set(val.getValue(), len);
+  }
+
+  public void set(String val, int maxLength) {
+    value.set(HiveBaseChar.enforceMaxLength(val, maxLength));
+  }
+
+  public HiveVarchar getHiveVarchar() {
+    return new HiveVarchar(value.toString(), -1);
+  }
+
+  public int getCharacterLength() {
+    if (characterLength < 0) {
+      characterLength = getHiveVarchar().getCharacterLength();
+    }
+    return characterLength;
+  }
+
+  public void enforceMaxLength(int maxLength) {
+    // Might be possible to truncate the existing Text value, for now just do something simple.
+    set(getHiveVarchar(), maxLength);
+  }
+
+  public void readFields(DataInput in) throws IOException {
+    value.readFields(in);
+  }
+
+  public void write(DataOutput out) throws IOException {
+    value.write(out);
+  }
+
+  public int compareTo(HiveVarcharWritable rhs) {
+    return ShimLoader.getHadoopShims().compareText(value, rhs.value);
+  }
+
+  public boolean equals(Object obj) {
+    if (obj == null || !(obj instanceof HiveVarcharWritable)) {
+      return false;
+    }
+    return value.equals(((HiveVarcharWritable)obj).value);
+  }
+
+  @Override
+  public String toString() {
+    return value.toString();
+  }
+
+  public int hashCode() {
+    return value.hashCode();
+  }
+
+  /**
+   * Access to the internal Text member. Use with care.
+   * @return
+   */
+  public Text getTextValue() {
+    return value;
+  }
+}

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java Tue Sep 17 20:02:21 2013
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.serde2.laz
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyFloatObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveDecimalObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyIntObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyLongObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory;
@@ -113,6 +114,8 @@ public final class LazyFactory {
       return new LazyDouble((LazyDoubleObjectInspector) oi);
     case STRING:
       return new LazyString((LazyStringObjectInspector) oi);
+    case VARCHAR:
+      return new LazyHiveVarchar((LazyHiveVarcharObjectInspector) oi);
     case DATE:
       return new LazyDate((LazyDateObjectInspector) oi);
     case TIMESTAMP:

Added: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java?rev=1524198&view=auto
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java (added)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java Tue Sep 17 20:02:21 2013
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazy;
+
+import java.nio.charset.CharacterCodingException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveVarcharObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.io.Text;
+
+/**
+ * LazyObject for storing a value of HiveVarchar.
+ *
+ */
+public class LazyHiveVarchar extends
+    LazyPrimitive<LazyHiveVarcharObjectInspector, HiveVarcharWritable> {
+
+  private static final Log LOG = LogFactory.getLog(LazyHiveVarchar.class);
+
+  protected int maxLength = -1;
+
+  public LazyHiveVarchar(LazyHiveVarcharObjectInspector oi) {
+    super(oi);
+    VarcharTypeParams typeParams = (VarcharTypeParams)oi.getTypeParams();
+    if (typeParams == null) {
+      throw new RuntimeException("varchar type used without type params");
+    }
+    maxLength = typeParams.getLength();
+    data = new HiveVarcharWritable();
+  }
+
+  public LazyHiveVarchar(LazyHiveVarchar copy) {
+    super(copy);
+    this.maxLength = copy.maxLength;
+    data = new HiveVarcharWritable(copy.data);
+  }
+
+  public void setValue(LazyHiveVarchar copy) {
+    data.set(copy.data, maxLength);
+  }
+
+  @Override
+  public void init(ByteArrayRef bytes, int start, int length) {
+    String byteData = null;
+    try {
+      byteData = Text.decode(bytes.getData(), start, length);
+      data.set(byteData, maxLength);
+      isNull = false;
+    } catch (CharacterCodingException e) {
+      isNull = true;
+      LOG.debug("Data not in the HiveVarchar data type range so converted to null.", e);
+    }
+  }
+
+}

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java Tue Sep 17 20:02:21 2013
@@ -31,6 +31,7 @@ import org.apache.commons.codec.binary.B
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
@@ -39,6 +40,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
@@ -225,6 +227,12 @@ public final class LazyUtils {
       break;
     }
 
+    case VARCHAR: {
+      HiveVarcharWritable hc = ((HiveVarcharObjectInspector)oi).getPrimitiveWritableObject(o);
+      ByteBuffer b = Text.encode(hc.toString());
+      writeEscaped(out, b.array(), 0, b.limit(), escaped, escapeChar, needsEscape);
+      break;
+    }
     case BINARY: {
       BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(o);
       byte[] toEncode = new byte[bw.getLength()];

Added: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java?rev=1524198&view=auto
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java (added)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java Tue Sep 17 20:02:21 2013
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive;
+
+
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.lazy.LazyHiveVarchar;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
+import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
+
+public class LazyHiveVarcharObjectInspector
+    extends AbstractPrimitiveLazyObjectInspector<HiveVarcharWritable>
+    implements HiveVarcharObjectInspector {
+
+  public LazyHiveVarcharObjectInspector(PrimitiveTypeEntry typeEntry) {
+    super(typeEntry);
+    if (typeEntry.primitiveCategory != PrimitiveCategory.VARCHAR) {
+      throw new RuntimeException(
+          "TypeEntry of type varchar expected, got " + typeEntry.primitiveCategory);
+    }
+  }
+
+  @Override
+  public Object copyObject(Object o) {
+    if (o == null) {
+      return null;
+    }
+
+    LazyHiveVarchar ret = new LazyHiveVarchar(this);
+    ret.setValue((LazyHiveVarchar) o);
+    return ret;
+  }
+
+  @Override
+  public HiveVarchar getPrimitiveJavaObject(Object o) {
+    if (o == null) {
+      return null;
+    }
+
+    HiveVarchar ret = ((LazyHiveVarchar) o).getWritableObject().getHiveVarchar();
+    if (!ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
+        ret, (VarcharTypeParams) typeParams)) {
+      HiveVarchar newValue = new HiveVarchar(ret, ((VarcharTypeParams) typeParams).length);
+      return newValue;
+    }
+    return ret;
+  }
+
+  public String toString() {
+    return getTypeName();
+  }
+}

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java Tue Sep 17 20:02:21 2013
@@ -21,9 +21,11 @@ package org.apache.hadoop.hive.serde2.la
 import java.util.ArrayList;
 import java.util.HashMap;
 
+import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeSpec;
 
@@ -65,6 +67,8 @@ public final class LazyPrimitiveObjectIn
       new LazyBinaryObjectInspector();
   public static final LazyHiveDecimalObjectInspector LAZY_BIG_DECIMAL_OBJECT_INSPECTOR =
       new LazyHiveDecimalObjectInspector();
+  public static final LazyHiveVarcharObjectInspector LAZY_VARCHAR_OBJECT_INSPECTOR =
+      new LazyHiveVarcharObjectInspector(PrimitiveObjectInspectorUtils.varcharTypeEntry);
 
   static HashMap<ArrayList<Object>, LazyStringObjectInspector> cachedLazyStringObjectInspector =
       new HashMap<ArrayList<Object>, LazyStringObjectInspector>();
@@ -96,8 +100,14 @@ public final class LazyPrimitiveObjectIn
     if (poi == null) {
       // Object inspector hasn't been cached for this type/params yet, create now
       switch (primitiveCategory) {
-        // Get type entry for parameterized type, and create new object inspector for type
-        // Currently no parameterized types
+        case VARCHAR:
+          PrimitiveTypeEntry typeEntry = PrimitiveObjectInspectorUtils.getTypeEntryFromTypeSpecs(
+              primitiveCategory,
+              typeParams);
+          poi = new LazyHiveVarcharObjectInspector(typeEntry);
+          poi.setTypeParams(typeParams);
+          cachedParameterizedLazyObjectInspectors.setObjectInspector(poi);
+          break;
 
         default:
           throw new RuntimeException(
@@ -126,6 +136,8 @@ public final class LazyPrimitiveObjectIn
       return LAZY_DOUBLE_OBJECT_INSPECTOR;
     case STRING:
       return getLazyStringObjectInspector(escaped, escapeChar);
+    case VARCHAR:
+      return LAZY_VARCHAR_OBJECT_INSPECTOR;
     case BINARY:
       return LAZY_BINARY_OBJECT_INSPECTOR;
     case VOID:
@@ -151,7 +163,10 @@ public final class LazyPrimitiveObjectIn
       return getLazyObjectInspector(primitiveCategory, escaped, escapeChar);
     } else {
       switch(primitiveCategory) {
-        // call getParameterizedObjectInspector(). But no parameterized types yet
+        case VARCHAR:
+          LazyHiveVarcharObjectInspector oi = (LazyHiveVarcharObjectInspector)
+            getParameterizedObjectInspector(typeSpec);
+          return oi;
 
         default:
           throw new RuntimeException("Type " + primitiveCategory + " does not take parameters");

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java Tue Sep 17 20:02:21 2013
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector;
@@ -71,6 +72,8 @@ public final class LazyBinaryFactory {
       return new LazyBinaryDouble((WritableDoubleObjectInspector) oi);
     case STRING:
       return new LazyBinaryString((WritableStringObjectInspector) oi);
+    case VARCHAR:
+      return new LazyBinaryHiveVarchar((WritableHiveVarcharObjectInspector) oi);
     case VOID: // for NULL
       return new LazyBinaryVoid((WritableVoidObjectInspector) oi);
     case DATE:

Added: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveVarchar.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveVarchar.java?rev=1524198&view=auto
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveVarchar.java (added)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveVarchar.java Tue Sep 17 20:02:21 2013
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazybinary;
+
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+import org.apache.hadoop.io.Text;
+
+public class LazyBinaryHiveVarchar extends
+    LazyBinaryPrimitive<WritableHiveVarcharObjectInspector, HiveVarcharWritable> {
+
+  protected int maxLength = -1;
+
+  LazyBinaryHiveVarchar(WritableHiveVarcharObjectInspector oi) {
+    super(oi);
+    // Check for params
+    VarcharTypeParams typeParams = (VarcharTypeParams)oi.getTypeParams();
+    if (typeParams == null) {
+      throw new RuntimeException("varchar type used without type params");
+    }
+    maxLength = typeParams.length;
+    data = new HiveVarcharWritable();
+  }
+
+  LazyBinaryHiveVarchar(LazyBinaryHiveVarchar copy) {
+    super(copy);
+    maxLength = copy.maxLength;
+    data = new HiveVarcharWritable(copy.data);
+  }
+
+  @Override
+  public void init(ByteArrayRef bytes, int start, int length) {
+    // re-use existing text member in varchar writable
+    Text textValue = data.getTextValue();
+    textValue.set(bytes.getData(), start, length);
+    data.enforceMaxLength(maxLength);
+  }
+
+}

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java Tue Sep 17 20:02:21 2013
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.serde2.lazybinary;
 
+import java.nio.ByteBuffer;
+import java.nio.charset.CharacterCodingException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -44,6 +46,8 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
@@ -231,7 +235,7 @@ public class LazyBinarySerDe extends Abs
    *          once already
    */
   private static boolean serializeStruct(Output byteStream, Object obj,
-      StructObjectInspector soi, boolean warnedOnceNullMapKey) {
+      StructObjectInspector soi, boolean warnedOnceNullMapKey) throws SerDeException {
     // do nothing for null struct
     if (null == obj) {
       return warnedOnceNullMapKey;
@@ -284,7 +288,8 @@ public class LazyBinarySerDe extends Abs
    *          once already
    */
   public static boolean serialize(Output byteStream, Object obj,
-      ObjectInspector objInspector, boolean skipLengthPrefix, boolean warnedOnceNullMapKey) {
+      ObjectInspector objInspector, boolean skipLengthPrefix, boolean warnedOnceNullMapKey)
+      throws SerDeException {
 
     // do nothing for null object
     if (null == obj) {
@@ -363,7 +368,24 @@ public class LazyBinarySerDe extends Abs
         byteStream.write(data, 0, length);
         return warnedOnceNullMapKey;
       }
-
+      case VARCHAR: {
+        HiveVarcharObjectInspector hcoi = (HiveVarcharObjectInspector) poi;
+        String value =
+            hcoi.getPrimitiveWritableObject(obj).getHiveVarchar().getValue();
+        int length = value.length();
+        // Write byte size
+        if (!skipLengthPrefix) {
+          LazyBinaryUtils.writeVInt(byteStream, length);
+        }
+        // Write string value
+        try {
+          ByteBuffer bb = Text.encode(value);
+          byteStream.write(bb.array(), 0, bb.limit());
+        } catch (CharacterCodingException err) {
+          throw new SerDeException(err);
+        }
+        return warnedOnceNullMapKey;
+      }
       case BINARY: {
         BinaryObjectInspector baoi = (BinaryObjectInspector) poi;
         BytesWritable bw = baoi.getPrimitiveWritableObject(obj);

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java Tue Sep 17 20:02:21 2013
@@ -196,6 +196,11 @@ public final class LazyBinaryUtils {
         recordInfo.elementSize = vInt.value;
         break;
 
+      case VARCHAR:
+        LazyBinaryUtils.readVInt(bytes, offset, vInt);
+        recordInfo.elementOffset = vInt.length;
+        recordInfo.elementSize = vInt.value;
+        break;
       case BINARY:
         // using vint instead of 4 bytes
         LazyBinaryUtils.readVInt(bytes, offset, vInt);

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java Tue Sep 17 20:02:21 2013
@@ -31,6 +31,8 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableByteObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableDoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableFloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveDecimalObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableShortObjectInspector;
@@ -101,6 +103,10 @@ public final class ObjectInspectorConver
         return new PrimitiveObjectInspectorConverter.StringConverter(
             inputOI);
       }
+    case VARCHAR:
+      return new PrimitiveObjectInspectorConverter.HiveVarcharConverter(
+          inputOI,
+          (SettableHiveVarcharObjectInspector) outputOI);
     case DATE:
       return new PrimitiveObjectInspectorConverter.DateConverter(
           inputOI,

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java Tue Sep 17 20:02:21 2013
@@ -32,6 +32,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
@@ -43,6 +44,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -484,6 +486,8 @@ public final class ObjectInspectorUtils 
         }
         return r;
       }
+      case VARCHAR:
+        return ((HiveVarcharObjectInspector)poi).getPrimitiveWritableObject(o).hashCode();
       case BINARY:
         return ((BinaryObjectInspector) poi).getPrimitiveWritableObject(o).hashCode();
 
@@ -679,6 +683,11 @@ public final class ObjectInspectorUtils 
               .compareTo(s2));
         }
       }
+      case VARCHAR: {
+        HiveVarcharWritable t1 = ((HiveVarcharObjectInspector)poi1).getPrimitiveWritableObject(o1);
+        HiveVarcharWritable t2 = ((HiveVarcharObjectInspector)poi2).getPrimitiveWritableObject(o2);
+        return t1.compareTo(t2);
+      }
       case BINARY: {
         BytesWritable bw1 = ((BinaryObjectInspector) poi1).getPrimitiveWritableObject(o1);
         BytesWritable bw2 = ((BinaryObjectInspector) poi2).getPrimitiveWritableObject(o2);
@@ -948,7 +957,7 @@ public final class ObjectInspectorUtils 
       case PRIMITIVE:
         PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
         return PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
-            poi.getPrimitiveCategory(), writableValue);
+            poi, writableValue);
       case LIST:
         ListObjectInspector loi = (ListObjectInspector) oi;
         return ObjectInspectorFactory.getStandardConstantListObjectInspector(

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java Tue Sep 17 20:02:21 2013
@@ -31,7 +31,7 @@ public interface PrimitiveObjectInspecto
    */
   public static enum PrimitiveCategory {
     VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING,
-    DATE, TIMESTAMP, BINARY, DECIMAL, UNKNOWN
+    DATE, TIMESTAMP, BINARY, DECIMAL, VARCHAR, UNKNOWN
   };
 
   /**

Added: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveVarcharObjectInspector.java?rev=1524198&view=auto
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveVarcharObjectInspector.java (added)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveVarcharObjectInspector.java Tue Sep 17 20:02:21 2013
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+
+public interface HiveVarcharObjectInspector extends PrimitiveObjectInspector {
+  HiveVarcharWritable getPrimitiveWritableObject(Object o);
+
+  HiveVarchar getPrimitiveJavaObject(Object o);
+}

Added: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java?rev=1524198&view=auto
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java (added)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java Tue Sep 17 20:02:21 2013
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
+import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
+
+public class JavaHiveVarcharObjectInspector
+    extends AbstractPrimitiveJavaObjectInspector
+    implements SettableHiveVarcharObjectInspector {
+
+  public JavaHiveVarcharObjectInspector(PrimitiveTypeEntry typeEntry) {
+    super(typeEntry);
+    if (typeEntry.primitiveCategory != PrimitiveCategory.VARCHAR) {
+      throw new RuntimeException(
+          "TypeEntry of type varchar expected, got " + typeEntry.primitiveCategory);
+    }
+  }
+
+  public HiveVarchar getPrimitiveJavaObject(Object o) {
+    if (o == null) {
+      return null;
+    }
+    HiveVarchar value = (HiveVarchar)o;
+    if (ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
+        value, (VarcharTypeParams) typeParams)) {
+      return value;
+    }
+    // value needs to be converted to match the type params (length, etc).
+    return getPrimitiveWithParams(value);
+  }
+
+  @Override
+  public HiveVarcharWritable getPrimitiveWritableObject(Object o) {
+    if (o == null) {
+      return null;
+    }
+    return getWritableWithParams((HiveVarchar)o);
+  }
+
+  private HiveVarchar getPrimitiveWithParams(HiveVarchar val) {
+    HiveVarchar hv = new HiveVarchar(val, getMaxLength());
+    return hv;
+  }
+
+  private HiveVarcharWritable getWritableWithParams(HiveVarchar val) {
+    HiveVarcharWritable newValue = new HiveVarcharWritable();
+    newValue.set(val, getMaxLength());
+    return newValue;
+  }
+
+  @Override
+  public Object set(Object o, HiveVarchar value) {
+    HiveVarchar setValue = (HiveVarchar)o;
+    if (ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
+        value, (VarcharTypeParams) typeParams)) {
+      setValue.setValue(value);
+    } else {
+      // Otherwise value may be too long, convert to appropriate value based on params
+      setValue.setValue(value, getMaxLength());
+    }
+
+    return setValue;
+  }
+
+  @Override
+  public Object set(Object o, String value) {
+    HiveVarchar convertedValue = (HiveVarchar)o;
+    convertedValue.setValue(value, getMaxLength());
+    return convertedValue;
+  }
+
+  @Override
+  public Object create(HiveVarchar value) {
+    HiveVarchar hc = new HiveVarchar(value, getMaxLength());
+    return hc;
+  }
+
+  public int getMaxLength() {
+    return typeParams != null ? ((VarcharTypeParams) typeParams).length : -1;
+  }
+}

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java Tue Sep 17 20:02:21 2013
@@ -22,11 +22,14 @@ import java.sql.Date;
 import java.sql.Timestamp;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.LazyLong;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -394,6 +397,14 @@ public class PrimitiveObjectInspectorCon
           t.set(((StringObjectInspector) inputOI).getPrimitiveJavaObject(input));
         }
         return t;
+      case VARCHAR:
+        if (inputOI.preferWritable()) {
+          t.set(((HiveVarcharObjectInspector) inputOI).getPrimitiveWritableObject(input)
+              .toString());
+        } else {
+          t.set(((HiveVarcharObjectInspector) inputOI).getPrimitiveJavaObject(input).toString());
+        }
+        return t;
       case DATE:
         t.set(((DateObjectInspector) inputOI).getPrimitiveWritableObject(input).toString());
         return t;
@@ -430,4 +441,40 @@ public class PrimitiveObjectInspectorCon
     }
   }
 
+
+  public static class HiveVarcharConverter implements Converter {
+
+    PrimitiveObjectInspector inputOI;
+    SettableHiveVarcharObjectInspector outputOI;
+    HiveVarcharWritable hc;
+
+    public HiveVarcharConverter(PrimitiveObjectInspector inputOI,
+        SettableHiveVarcharObjectInspector outputOI) {
+      this.inputOI = inputOI;
+      this.outputOI = outputOI;
+      VarcharTypeParams typeParams = (VarcharTypeParams) outputOI.getTypeParams();
+
+      // unfortunately we seem to get instances of varchar object inspectors without params
+      // when an old-style UDF has an evaluate() method with varchar arguments.
+      // If we disallow varchar in old-style UDFs and only allow GenericUDFs to be defined
+      // with varchar arguments, then we might be able to enforce this properly.
+      //if (typeParams == null) {
+      //  throw new RuntimeException("varchar type used without type params");
+      //}
+      hc = new HiveVarcharWritable();
+    }
+
+    @Override
+    public Object convert(Object input) {
+      switch (inputOI.getPrimitiveCategory()) {
+        case BOOLEAN:
+          return outputOI.set(hc,
+              ((BooleanObjectInspector) inputOI).get(input) ?
+                  new HiveVarchar("TRUE", -1) : new HiveVarchar("FALSE", -1));
+        default:
+          return outputOI.set(hc, PrimitiveObjectInspectorUtils.getHiveVarchar(input, inputOI));
+      }
+    }
+
+  }
 }

Modified: hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java?rev=1524198&r1=1524197&r2=1524198&view=diff
==============================================================================
--- hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java (original)
+++ hive/branches/branch-0.12/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java Tue Sep 17 20:02:21 2013
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.serde2.io.
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
@@ -33,6 +34,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeSpec;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -68,6 +70,8 @@ public final class PrimitiveObjectInspec
       new JavaDoubleObjectInspector();
   public static final JavaStringObjectInspector javaStringObjectInspector =
       new JavaStringObjectInspector();
+  public static final JavaHiveVarcharObjectInspector javaHiveVarcharObjectInspector =
+      new JavaHiveVarcharObjectInspector(PrimitiveObjectInspectorUtils.varcharTypeEntry);
   public static final JavaVoidObjectInspector javaVoidObjectInspector =
       new JavaVoidObjectInspector();
   public static final JavaDateObjectInspector javaDateObjectInspector =
@@ -95,6 +99,8 @@ public final class PrimitiveObjectInspec
       new WritableDoubleObjectInspector();
   public static final WritableStringObjectInspector writableStringObjectInspector =
       new WritableStringObjectInspector();
+  public static final WritableHiveVarcharObjectInspector writableHiveVarcharObjectInspector =
+      new WritableHiveVarcharObjectInspector(PrimitiveObjectInspectorUtils.varcharTypeEntry);
   public static final WritableVoidObjectInspector writableVoidObjectInspector =
       new WritableVoidObjectInspector();
   public static final WritableDateObjectInspector writableDateObjectInspector =
@@ -125,6 +131,8 @@ public final class PrimitiveObjectInspec
         writableDoubleObjectInspector);
     cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.STRING,
         writableStringObjectInspector);
+    cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.VARCHAR,
+        writableHiveVarcharObjectInspector);
     cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.VOID,
         writableVoidObjectInspector);
     cachedPrimitiveWritableInspectorCache.put(PrimitiveCategory.DATE,
@@ -156,6 +164,8 @@ public final class PrimitiveObjectInspec
         javaDoubleObjectInspector);
     cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.STRING,
         javaStringObjectInspector);
+    cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.VARCHAR,
+        javaHiveVarcharObjectInspector);
     cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.VOID,
         javaVoidObjectInspector);
     cachedPrimitiveJavaInspectorCache.put(PrimitiveCategory.DATE,
@@ -229,7 +239,14 @@ public final class PrimitiveObjectInspec
       if (oi == null) {
         // Do a bit of validation - not all primitive types use parameters.
         switch (primitiveCategory) {
-          // Currently no parameterized types
+          case VARCHAR:
+            PrimitiveTypeEntry typeEntry = PrimitiveObjectInspectorUtils.getTypeEntryFromTypeSpecs(
+                primitiveCategory,
+                primitiveTypeParams);
+            oi = new WritableHiveVarcharObjectInspector(typeEntry);
+            oi.setTypeParams(primitiveTypeParams);
+            cachedParameterizedPrimitiveWritableObjectInspectorCache.setObjectInspector(oi);
+            break;
           default:
             throw new RuntimeException(
                 "Primitve type " + primitiveCategory + " should not take parameters");
@@ -248,6 +265,24 @@ public final class PrimitiveObjectInspec
    */
   public static ConstantObjectInspector getPrimitiveWritableConstantObjectInspector(
       PrimitiveCategory primitiveCategory, Object value) {
+    return getPrimitiveWritableConstantObjectInspector(
+        PrimitiveObjectInspectorUtils.getTypeEntryFromTypeSpecs(primitiveCategory, null),
+        value);
+  }
+
+  /**
+   * Returns a PrimitiveWritableObjectInspector which implements ConstantObjectInspector
+   * for the PrimitiveCategory.
+   *
+   * @param primitiveCategory
+   * @param typeParams  Type qualifiers for the type (if applicable)
+   * @param value
+   */
+  public static ConstantObjectInspector getPrimitiveWritableConstantObjectInspector(
+      PrimitiveTypeSpec typeSpecs, Object value) {
+    PrimitiveCategory primitiveCategory = typeSpecs.getPrimitiveCategory();
+    BaseTypeParams typeParams = typeSpecs.getTypeParams();
+
     switch (primitiveCategory) {
     case BOOLEAN:
       return new WritableConstantBooleanObjectInspector((BooleanWritable)value);
@@ -265,6 +300,9 @@ public final class PrimitiveObjectInspec
       return new WritableConstantDoubleObjectInspector((DoubleWritable)value);
     case STRING:
       return new WritableConstantStringObjectInspector((Text)value);
+    case VARCHAR:
+      return new WritableConstantHiveVarcharObjectInspector((HiveVarcharWritable)value,
+          (VarcharTypeParams) typeParams);
     case DATE:
       return new WritableConstantDateObjectInspector((DateWritable)value);
     case TIMESTAMP:
@@ -328,8 +366,14 @@ public final class PrimitiveObjectInspec
       if (oi == null) {
         // Do a bit of validation - not all primitive types use parameters.
         switch (primitiveCategory) {
-          // Create type info and add to cache
-          // Currently no existing parameterized types
+          case VARCHAR:
+            PrimitiveTypeEntry typeEntry = PrimitiveObjectInspectorUtils.getTypeEntryFromTypeSpecs(
+                primitiveCategory,
+                primitiveTypeParams);
+            oi = new JavaHiveVarcharObjectInspector(typeEntry);
+            oi.setTypeParams(primitiveTypeParams);
+            cachedParameterizedPrimitiveJavaObjectInspectorCache.setObjectInspector(oi);
+            break;
           default:
             throw new RuntimeException(
                 "Primitve type " + primitiveCategory + " should not take parameters");