You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2009/02/17 20:39:32 UTC

svn commit: r745212 [4/9] - in /hadoop/hive/trunk: ./ metastore/if/ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ metastore/src/gen-php/ metastore/src/gen-py/hive_metastore/ metastore/src/java/org/apache/hadoop/hive/metastore/ ql/src...

Modified: hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ThriftMetaStore.py
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ThriftMetaStore.py?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ThriftMetaStore.py (original)
+++ hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ThriftMetaStore.py Tue Feb 17 19:39:29 2009
@@ -80,12 +80,12 @@
     self._iprot.readMessageEnd()
     if result.success != None:
       return result.success
-    if result.ouch1 != None:
-      raise result.ouch1
-    if result.ouch2 != None:
-      raise result.ouch2
-    if result.ouch3 != None:
-      raise result.ouch3
+    if result.o1 != None:
+      raise result.o1
+    if result.o2 != None:
+      raise result.o2
+    if result.o3 != None:
+      raise result.o3
     raise TApplicationException(TApplicationException.MISSING_RESULT, "get_fields failed: unknown result");
 
   def get_tables(self, db_name, pattern):
@@ -113,12 +113,12 @@
     self._iprot.readMessageEnd()
     if result.success != None:
       return result.success
-    if result.ouch1 != None:
-      raise result.ouch1
-    if result.ouch2 != None:
-      raise result.ouch2
-    if result.ouch3 != None:
-      raise result.ouch3
+    if result.o1 != None:
+      raise result.o1
+    if result.o2 != None:
+      raise result.o2
+    if result.o3 != None:
+      raise result.o3
     raise TApplicationException(TApplicationException.MISSING_RESULT, "get_tables failed: unknown result");
 
   def get_schema(self, table_name):
@@ -145,12 +145,12 @@
     self._iprot.readMessageEnd()
     if result.success != None:
       return result.success
-    if result.ouch1 != None:
-      raise result.ouch1
-    if result.ouch2 != None:
-      raise result.ouch2
-    if result.ouch3 != None:
-      raise result.ouch3
+    if result.o1 != None:
+      raise result.o1
+    if result.o2 != None:
+      raise result.o2
+    if result.o3 != None:
+      raise result.o3
     raise TApplicationException(TApplicationException.MISSING_RESULT, "get_schema failed: unknown result");
 
   def alter_table(self, db_name, table_name, schema):
@@ -177,12 +177,12 @@
     result = alter_table_result()
     result.read(self._iprot)
     self._iprot.readMessageEnd()
-    if result.ouch1 != None:
-      raise result.ouch1
-    if result.ouch2 != None:
-      raise result.ouch2
-    if result.ouch3 != None:
-      raise result.ouch3
+    if result.o1 != None:
+      raise result.o1
+    if result.o2 != None:
+      raise result.o2
+    if result.o3 != None:
+      raise result.o3
     return
 
   def create_table(self, db_name, table_name, schema):
@@ -209,10 +209,10 @@
     result = create_table_result()
     result.read(self._iprot)
     self._iprot.readMessageEnd()
-    if result.ouch1 != None:
-      raise result.ouch1
-    if result.ouch2 != None:
-      raise result.ouch2
+    if result.o1 != None:
+      raise result.o1
+    if result.o2 != None:
+      raise result.o2
     return
 
   def drop_table(self, db_name, table_name):
@@ -238,12 +238,12 @@
     result = drop_table_result()
     result.read(self._iprot)
     self._iprot.readMessageEnd()
-    if result.ouch1 != None:
-      raise result.ouch1
-    if result.ouch2 != None:
-      raise result.ouch2
-    if result.ouch3 != None:
-      raise result.ouch3
+    if result.o1 != None:
+      raise result.o1
+    if result.o2 != None:
+      raise result.o2
+    if result.o3 != None:
+      raise result.o3
     return
 
   def truncate_table(self, db_name, table_name, partition):
@@ -270,12 +270,12 @@
     result = truncate_table_result()
     result.read(self._iprot)
     self._iprot.readMessageEnd()
-    if result.ouch1 != None:
-      raise result.ouch1
-    if result.ouch2 != None:
-      raise result.ouch2
-    if result.ouch3 != None:
-      raise result.ouch3
+    if result.o1 != None:
+      raise result.o1
+    if result.o2 != None:
+      raise result.o2
+    if result.o3 != None:
+      raise result.o3
     return
 
   def table_exists(self, db_name, table_name):
@@ -303,10 +303,10 @@
     self._iprot.readMessageEnd()
     if result.success != None:
       return result.success
-    if result.ouch1 != None:
-      raise result.ouch1
-    if result.ouch2 != None:
-      raise result.ouch2
+    if result.o1 != None:
+      raise result.o1
+    if result.o2 != None:
+      raise result.o2
     raise TApplicationException(TApplicationException.MISSING_RESULT, "table_exists failed: unknown result");
 
   def get_partitions(self, db_name, table_name):
@@ -334,12 +334,12 @@
     self._iprot.readMessageEnd()
     if result.success != None:
       return result.success
-    if result.ouch1 != None:
-      raise result.ouch1
-    if result.ouch2 != None:
-      raise result.ouch2
-    if result.ouch3 != None:
-      raise result.ouch3
+    if result.o1 != None:
+      raise result.o1
+    if result.o2 != None:
+      raise result.o2
+    if result.o3 != None:
+      raise result.o3
     raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions failed: unknown result");
 
   def get_dbs(self, ):
@@ -365,8 +365,8 @@
     self._iprot.readMessageEnd()
     if result.success != None:
       return result.success
-    if result.ouch != None:
-      raise result.ouch
+    if result.o != None:
+      raise result.o
     raise TApplicationException(TApplicationException.MISSING_RESULT, "get_dbs failed: unknown result");
 
   def cat(self, db_name, table_name, partition, high):
@@ -396,12 +396,12 @@
     self._iprot.readMessageEnd()
     if result.success != None:
       return result.success
-    if result.ouch1 != None:
-      raise result.ouch1
-    if result.ouch2 != None:
-      raise result.ouch2
-    if result.ouch3 != None:
-      raise result.ouch3
+    if result.o1 != None:
+      raise result.o1
+    if result.o2 != None:
+      raise result.o2
+    if result.o3 != None:
+      raise result.o3
     raise TApplicationException(TApplicationException.MISSING_RESULT, "cat failed: unknown result");
 
 
@@ -442,12 +442,12 @@
     result = get_fields_result()
     try:
       result.success = self._handler.get_fields(args.db_name, args.table_name)
-    except MetaException, ouch1:
-      result.ouch1 = ouch1
-    except UnknownTableException, ouch2:
-      result.ouch2 = ouch2
-    except UnknownDBException, ouch3:
-      result.ouch3 = ouch3
+    except MetaException, o1:
+      result.o1 = o1
+    except UnknownTableException, o2:
+      result.o2 = o2
+    except UnknownDBException, o3:
+      result.o3 = o3
     oprot.writeMessageBegin("get_fields", TMessageType.REPLY, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
@@ -460,12 +460,12 @@
     result = get_tables_result()
     try:
       result.success = self._handler.get_tables(args.db_name, args.pattern)
-    except MetaException, ouch1:
-      result.ouch1 = ouch1
-    except UnknownTableException, ouch2:
-      result.ouch2 = ouch2
-    except UnknownDBException, ouch3:
-      result.ouch3 = ouch3
+    except MetaException, o1:
+      result.o1 = o1
+    except UnknownTableException, o2:
+      result.o2 = o2
+    except UnknownDBException, o3:
+      result.o3 = o3
     oprot.writeMessageBegin("get_tables", TMessageType.REPLY, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
@@ -478,12 +478,12 @@
     result = get_schema_result()
     try:
       result.success = self._handler.get_schema(args.table_name)
-    except MetaException, ouch1:
-      result.ouch1 = ouch1
-    except UnknownTableException, ouch2:
-      result.ouch2 = ouch2
-    except UnknownDBException, ouch3:
-      result.ouch3 = ouch3
+    except MetaException, o1:
+      result.o1 = o1
+    except UnknownTableException, o2:
+      result.o2 = o2
+    except UnknownDBException, o3:
+      result.o3 = o3
     oprot.writeMessageBegin("get_schema", TMessageType.REPLY, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
@@ -496,12 +496,12 @@
     result = alter_table_result()
     try:
       self._handler.alter_table(args.db_name, args.table_name, args.schema)
-    except MetaException, ouch1:
-      result.ouch1 = ouch1
-    except UnknownTableException, ouch2:
-      result.ouch2 = ouch2
-    except UnknownDBException, ouch3:
-      result.ouch3 = ouch3
+    except MetaException, o1:
+      result.o1 = o1
+    except UnknownTableException, o2:
+      result.o2 = o2
+    except UnknownDBException, o3:
+      result.o3 = o3
     oprot.writeMessageBegin("alter_table", TMessageType.REPLY, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
@@ -514,10 +514,10 @@
     result = create_table_result()
     try:
       self._handler.create_table(args.db_name, args.table_name, args.schema)
-    except MetaException, ouch1:
-      result.ouch1 = ouch1
-    except UnknownDBException, ouch2:
-      result.ouch2 = ouch2
+    except MetaException, o1:
+      result.o1 = o1
+    except UnknownDBException, o2:
+      result.o2 = o2
     oprot.writeMessageBegin("create_table", TMessageType.REPLY, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
@@ -530,12 +530,12 @@
     result = drop_table_result()
     try:
       self._handler.drop_table(args.db_name, args.table_name)
-    except MetaException, ouch1:
-      result.ouch1 = ouch1
-    except UnknownTableException, ouch2:
-      result.ouch2 = ouch2
-    except UnknownDBException, ouch3:
-      result.ouch3 = ouch3
+    except MetaException, o1:
+      result.o1 = o1
+    except UnknownTableException, o2:
+      result.o2 = o2
+    except UnknownDBException, o3:
+      result.o3 = o3
     oprot.writeMessageBegin("drop_table", TMessageType.REPLY, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
@@ -548,12 +548,12 @@
     result = truncate_table_result()
     try:
       self._handler.truncate_table(args.db_name, args.table_name, args.partition)
-    except MetaException, ouch1:
-      result.ouch1 = ouch1
-    except UnknownTableException, ouch2:
-      result.ouch2 = ouch2
-    except UnknownDBException, ouch3:
-      result.ouch3 = ouch3
+    except MetaException, o1:
+      result.o1 = o1
+    except UnknownTableException, o2:
+      result.o2 = o2
+    except UnknownDBException, o3:
+      result.o3 = o3
     oprot.writeMessageBegin("truncate_table", TMessageType.REPLY, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
@@ -566,10 +566,10 @@
     result = table_exists_result()
     try:
       result.success = self._handler.table_exists(args.db_name, args.table_name)
-    except MetaException, ouch1:
-      result.ouch1 = ouch1
-    except UnknownDBException, ouch2:
-      result.ouch2 = ouch2
+    except MetaException, o1:
+      result.o1 = o1
+    except UnknownDBException, o2:
+      result.o2 = o2
     oprot.writeMessageBegin("table_exists", TMessageType.REPLY, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
@@ -582,12 +582,12 @@
     result = get_partitions_result()
     try:
       result.success = self._handler.get_partitions(args.db_name, args.table_name)
-    except MetaException, ouch1:
-      result.ouch1 = ouch1
-    except UnknownTableException, ouch2:
-      result.ouch2 = ouch2
-    except UnknownDBException, ouch3:
-      result.ouch3 = ouch3
+    except MetaException, o1:
+      result.o1 = o1
+    except UnknownTableException, o2:
+      result.o2 = o2
+    except UnknownDBException, o3:
+      result.o3 = o3
     oprot.writeMessageBegin("get_partitions", TMessageType.REPLY, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
@@ -600,8 +600,8 @@
     result = get_dbs_result()
     try:
       result.success = self._handler.get_dbs()
-    except MetaException, ouch:
-      result.ouch = ouch
+    except MetaException, o:
+      result.o = o
     oprot.writeMessageBegin("get_dbs", TMessageType.REPLY, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
@@ -614,12 +614,12 @@
     result = cat_result()
     try:
       result.success = self._handler.cat(args.db_name, args.table_name, args.partition, args.high)
-    except MetaException, ouch1:
-      result.ouch1 = ouch1
-    except UnknownDBException, ouch2:
-      result.ouch2 = ouch2
-    except UnknownTableException, ouch3:
-      result.ouch3 = ouch3
+    except MetaException, o1:
+      result.o1 = o1
+    except UnknownDBException, o2:
+      result.o2 = o2
+    except UnknownTableException, o3:
+      result.o3 = o3
     oprot.writeMessageBegin("cat", TMessageType.REPLY, seqid)
     result.write(oprot)
     oprot.writeMessageEnd()
@@ -697,18 +697,18 @@
   thrift_spec = None
   def __init__(self, d=None):
     self.success = None
-    self.ouch1 = None
-    self.ouch2 = None
-    self.ouch3 = None
+    self.o1 = None
+    self.o2 = None
+    self.o3 = None
     if isinstance(d, dict):
       if 'success' in d:
         self.success = d['success']
-      if 'ouch1' in d:
-        self.ouch1 = d['ouch1']
-      if 'ouch2' in d:
-        self.ouch2 = d['ouch2']
-      if 'ouch3' in d:
-        self.ouch3 = d['ouch3']
+      if 'o1' in d:
+        self.o1 = d['o1']
+      if 'o2' in d:
+        self.o2 = d['o2']
+      if 'o3' in d:
+        self.o3 = d['o3']
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -732,20 +732,20 @@
           iprot.skip(ftype)
       elif fid == -3:
         if ftype == TType.STRUCT:
-          self.ouch1 = MetaException()
-          self.ouch1.read(iprot)
+          self.o1 = MetaException()
+          self.o1.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -4:
         if ftype == TType.STRUCT:
-          self.ouch2 = UnknownTableException()
-          self.ouch2.read(iprot)
+          self.o2 = UnknownTableException()
+          self.o2.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -5:
         if ftype == TType.STRUCT:
-          self.ouch3 = UnknownDBException()
-          self.ouch3.read(iprot)
+          self.o3 = UnknownDBException()
+          self.o3.read(iprot)
         else:
           iprot.skip(ftype)
       else:
@@ -765,17 +765,17 @@
         iter163.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
-    if self.ouch1 != None:
-      oprot.writeFieldBegin('ouch1', TType.STRUCT, -3)
-      self.ouch1.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch2 != None:
-      oprot.writeFieldBegin('ouch2', TType.STRUCT, -4)
-      self.ouch2.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch3 != None:
-      oprot.writeFieldBegin('ouch3', TType.STRUCT, -5)
-      self.ouch3.write(oprot)
+    if self.o1 != None:
+      oprot.writeFieldBegin('o1', TType.STRUCT, -3)
+      self.o1.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o2 != None:
+      oprot.writeFieldBegin('o2', TType.STRUCT, -4)
+      self.o2.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o3 != None:
+      oprot.writeFieldBegin('o3', TType.STRUCT, -5)
+      self.o3.write(oprot)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
@@ -861,18 +861,18 @@
   thrift_spec = None
   def __init__(self, d=None):
     self.success = None
-    self.ouch1 = None
-    self.ouch2 = None
-    self.ouch3 = None
+    self.o1 = None
+    self.o2 = None
+    self.o3 = None
     if isinstance(d, dict):
       if 'success' in d:
         self.success = d['success']
-      if 'ouch1' in d:
-        self.ouch1 = d['ouch1']
-      if 'ouch2' in d:
-        self.ouch2 = d['ouch2']
-      if 'ouch3' in d:
-        self.ouch3 = d['ouch3']
+      if 'o1' in d:
+        self.o1 = d['o1']
+      if 'o2' in d:
+        self.o2 = d['o2']
+      if 'o3' in d:
+        self.o3 = d['o3']
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -895,20 +895,20 @@
           iprot.skip(ftype)
       elif fid == -3:
         if ftype == TType.STRUCT:
-          self.ouch1 = MetaException()
-          self.ouch1.read(iprot)
+          self.o1 = MetaException()
+          self.o1.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -4:
         if ftype == TType.STRUCT:
-          self.ouch2 = UnknownTableException()
-          self.ouch2.read(iprot)
+          self.o2 = UnknownTableException()
+          self.o2.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -5:
         if ftype == TType.STRUCT:
-          self.ouch3 = UnknownDBException()
-          self.ouch3.read(iprot)
+          self.o3 = UnknownDBException()
+          self.o3.read(iprot)
         else:
           iprot.skip(ftype)
       else:
@@ -928,17 +928,17 @@
         oprot.writeString(iter170)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
-    if self.ouch1 != None:
-      oprot.writeFieldBegin('ouch1', TType.STRUCT, -3)
-      self.ouch1.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch2 != None:
-      oprot.writeFieldBegin('ouch2', TType.STRUCT, -4)
-      self.ouch2.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch3 != None:
-      oprot.writeFieldBegin('ouch3', TType.STRUCT, -5)
-      self.ouch3.write(oprot)
+    if self.o1 != None:
+      oprot.writeFieldBegin('o1', TType.STRUCT, -3)
+      self.o1.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o2 != None:
+      oprot.writeFieldBegin('o2', TType.STRUCT, -4)
+      self.o2.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o3 != None:
+      oprot.writeFieldBegin('o3', TType.STRUCT, -5)
+      self.o3.write(oprot)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
@@ -1012,18 +1012,18 @@
   thrift_spec = None
   def __init__(self, d=None):
     self.success = None
-    self.ouch1 = None
-    self.ouch2 = None
-    self.ouch3 = None
+    self.o1 = None
+    self.o2 = None
+    self.o3 = None
     if isinstance(d, dict):
       if 'success' in d:
         self.success = d['success']
-      if 'ouch1' in d:
-        self.ouch1 = d['ouch1']
-      if 'ouch2' in d:
-        self.ouch2 = d['ouch2']
-      if 'ouch3' in d:
-        self.ouch3 = d['ouch3']
+      if 'o1' in d:
+        self.o1 = d['o1']
+      if 'o2' in d:
+        self.o2 = d['o2']
+      if 'o3' in d:
+        self.o3 = d['o3']
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1047,20 +1047,20 @@
           iprot.skip(ftype)
       elif fid == -2:
         if ftype == TType.STRUCT:
-          self.ouch1 = MetaException()
-          self.ouch1.read(iprot)
+          self.o1 = MetaException()
+          self.o1.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -3:
         if ftype == TType.STRUCT:
-          self.ouch2 = UnknownTableException()
-          self.ouch2.read(iprot)
+          self.o2 = UnknownTableException()
+          self.o2.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -4:
         if ftype == TType.STRUCT:
-          self.ouch3 = UnknownDBException()
-          self.ouch3.read(iprot)
+          self.o3 = UnknownDBException()
+          self.o3.read(iprot)
         else:
           iprot.skip(ftype)
       else:
@@ -1081,17 +1081,17 @@
         oprot.writeString(viter179)
       oprot.writeMapEnd()
       oprot.writeFieldEnd()
-    if self.ouch1 != None:
-      oprot.writeFieldBegin('ouch1', TType.STRUCT, -2)
-      self.ouch1.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch2 != None:
-      oprot.writeFieldBegin('ouch2', TType.STRUCT, -3)
-      self.ouch2.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch3 != None:
-      oprot.writeFieldBegin('ouch3', TType.STRUCT, -4)
-      self.ouch3.write(oprot)
+    if self.o1 != None:
+      oprot.writeFieldBegin('o1', TType.STRUCT, -2)
+      self.o1.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o2 != None:
+      oprot.writeFieldBegin('o2', TType.STRUCT, -3)
+      self.o2.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o3 != None:
+      oprot.writeFieldBegin('o3', TType.STRUCT, -4)
+      self.o3.write(oprot)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
@@ -1198,16 +1198,16 @@
 
   thrift_spec = None
   def __init__(self, d=None):
-    self.ouch1 = None
-    self.ouch2 = None
-    self.ouch3 = None
-    if isinstance(d, dict):
-      if 'ouch1' in d:
-        self.ouch1 = d['ouch1']
-      if 'ouch2' in d:
-        self.ouch2 = d['ouch2']
-      if 'ouch3' in d:
-        self.ouch3 = d['ouch3']
+    self.o1 = None
+    self.o2 = None
+    self.o3 = None
+    if isinstance(d, dict):
+      if 'o1' in d:
+        self.o1 = d['o1']
+      if 'o2' in d:
+        self.o2 = d['o2']
+      if 'o3' in d:
+        self.o3 = d['o3']
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1220,20 +1220,20 @@
         break
       if fid == -4:
         if ftype == TType.STRUCT:
-          self.ouch1 = MetaException()
-          self.ouch1.read(iprot)
+          self.o1 = MetaException()
+          self.o1.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -5:
         if ftype == TType.STRUCT:
-          self.ouch2 = UnknownTableException()
-          self.ouch2.read(iprot)
+          self.o2 = UnknownTableException()
+          self.o2.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -6:
         if ftype == TType.STRUCT:
-          self.ouch3 = UnknownDBException()
-          self.ouch3.read(iprot)
+          self.o3 = UnknownDBException()
+          self.o3.read(iprot)
         else:
           iprot.skip(ftype)
       else:
@@ -1246,17 +1246,17 @@
       oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
       return
     oprot.writeStructBegin('alter_table_result')
-    if self.ouch1 != None:
-      oprot.writeFieldBegin('ouch1', TType.STRUCT, -4)
-      self.ouch1.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch2 != None:
-      oprot.writeFieldBegin('ouch2', TType.STRUCT, -5)
-      self.ouch2.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch3 != None:
-      oprot.writeFieldBegin('ouch3', TType.STRUCT, -6)
-      self.ouch3.write(oprot)
+    if self.o1 != None:
+      oprot.writeFieldBegin('o1', TType.STRUCT, -4)
+      self.o1.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o2 != None:
+      oprot.writeFieldBegin('o2', TType.STRUCT, -5)
+      self.o2.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o3 != None:
+      oprot.writeFieldBegin('o3', TType.STRUCT, -6)
+      self.o3.write(oprot)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
@@ -1363,13 +1363,13 @@
 
   thrift_spec = None
   def __init__(self, d=None):
-    self.ouch1 = None
-    self.ouch2 = None
+    self.o1 = None
+    self.o2 = None
     if isinstance(d, dict):
-      if 'ouch1' in d:
-        self.ouch1 = d['ouch1']
-      if 'ouch2' in d:
-        self.ouch2 = d['ouch2']
+      if 'o1' in d:
+        self.o1 = d['o1']
+      if 'o2' in d:
+        self.o2 = d['o2']
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1382,14 +1382,14 @@
         break
       if fid == -4:
         if ftype == TType.STRUCT:
-          self.ouch1 = MetaException()
-          self.ouch1.read(iprot)
+          self.o1 = MetaException()
+          self.o1.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -5:
         if ftype == TType.STRUCT:
-          self.ouch2 = UnknownDBException()
-          self.ouch2.read(iprot)
+          self.o2 = UnknownDBException()
+          self.o2.read(iprot)
         else:
           iprot.skip(ftype)
       else:
@@ -1402,13 +1402,13 @@
       oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
       return
     oprot.writeStructBegin('create_table_result')
-    if self.ouch1 != None:
-      oprot.writeFieldBegin('ouch1', TType.STRUCT, -4)
-      self.ouch1.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch2 != None:
-      oprot.writeFieldBegin('ouch2', TType.STRUCT, -5)
-      self.ouch2.write(oprot)
+    if self.o1 != None:
+      oprot.writeFieldBegin('o1', TType.STRUCT, -4)
+      self.o1.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o2 != None:
+      oprot.writeFieldBegin('o2', TType.STRUCT, -5)
+      self.o2.write(oprot)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
@@ -1493,16 +1493,16 @@
 
   thrift_spec = None
   def __init__(self, d=None):
-    self.ouch1 = None
-    self.ouch2 = None
-    self.ouch3 = None
-    if isinstance(d, dict):
-      if 'ouch1' in d:
-        self.ouch1 = d['ouch1']
-      if 'ouch2' in d:
-        self.ouch2 = d['ouch2']
-      if 'ouch3' in d:
-        self.ouch3 = d['ouch3']
+    self.o1 = None
+    self.o2 = None
+    self.o3 = None
+    if isinstance(d, dict):
+      if 'o1' in d:
+        self.o1 = d['o1']
+      if 'o2' in d:
+        self.o2 = d['o2']
+      if 'o3' in d:
+        self.o3 = d['o3']
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1515,20 +1515,20 @@
         break
       if fid == -3:
         if ftype == TType.STRUCT:
-          self.ouch1 = MetaException()
-          self.ouch1.read(iprot)
+          self.o1 = MetaException()
+          self.o1.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -4:
         if ftype == TType.STRUCT:
-          self.ouch2 = UnknownTableException()
-          self.ouch2.read(iprot)
+          self.o2 = UnknownTableException()
+          self.o2.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -5:
         if ftype == TType.STRUCT:
-          self.ouch3 = UnknownDBException()
-          self.ouch3.read(iprot)
+          self.o3 = UnknownDBException()
+          self.o3.read(iprot)
         else:
           iprot.skip(ftype)
       else:
@@ -1541,17 +1541,17 @@
       oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
       return
     oprot.writeStructBegin('drop_table_result')
-    if self.ouch1 != None:
-      oprot.writeFieldBegin('ouch1', TType.STRUCT, -3)
-      self.ouch1.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch2 != None:
-      oprot.writeFieldBegin('ouch2', TType.STRUCT, -4)
-      self.ouch2.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch3 != None:
-      oprot.writeFieldBegin('ouch3', TType.STRUCT, -5)
-      self.ouch3.write(oprot)
+    if self.o1 != None:
+      oprot.writeFieldBegin('o1', TType.STRUCT, -3)
+      self.o1.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o2 != None:
+      oprot.writeFieldBegin('o2', TType.STRUCT, -4)
+      self.o2.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o3 != None:
+      oprot.writeFieldBegin('o3', TType.STRUCT, -5)
+      self.o3.write(oprot)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
@@ -1648,16 +1648,16 @@
 
   thrift_spec = None
   def __init__(self, d=None):
-    self.ouch1 = None
-    self.ouch2 = None
-    self.ouch3 = None
-    if isinstance(d, dict):
-      if 'ouch1' in d:
-        self.ouch1 = d['ouch1']
-      if 'ouch2' in d:
-        self.ouch2 = d['ouch2']
-      if 'ouch3' in d:
-        self.ouch3 = d['ouch3']
+    self.o1 = None
+    self.o2 = None
+    self.o3 = None
+    if isinstance(d, dict):
+      if 'o1' in d:
+        self.o1 = d['o1']
+      if 'o2' in d:
+        self.o2 = d['o2']
+      if 'o3' in d:
+        self.o3 = d['o3']
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1670,20 +1670,20 @@
         break
       if fid == -4:
         if ftype == TType.STRUCT:
-          self.ouch1 = MetaException()
-          self.ouch1.read(iprot)
+          self.o1 = MetaException()
+          self.o1.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -5:
         if ftype == TType.STRUCT:
-          self.ouch2 = UnknownTableException()
-          self.ouch2.read(iprot)
+          self.o2 = UnknownTableException()
+          self.o2.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -6:
         if ftype == TType.STRUCT:
-          self.ouch3 = UnknownDBException()
-          self.ouch3.read(iprot)
+          self.o3 = UnknownDBException()
+          self.o3.read(iprot)
         else:
           iprot.skip(ftype)
       else:
@@ -1696,17 +1696,17 @@
       oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
       return
     oprot.writeStructBegin('truncate_table_result')
-    if self.ouch1 != None:
-      oprot.writeFieldBegin('ouch1', TType.STRUCT, -4)
-      self.ouch1.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch2 != None:
-      oprot.writeFieldBegin('ouch2', TType.STRUCT, -5)
-      self.ouch2.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch3 != None:
-      oprot.writeFieldBegin('ouch3', TType.STRUCT, -6)
-      self.ouch3.write(oprot)
+    if self.o1 != None:
+      oprot.writeFieldBegin('o1', TType.STRUCT, -4)
+      self.o1.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o2 != None:
+      oprot.writeFieldBegin('o2', TType.STRUCT, -5)
+      self.o2.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o3 != None:
+      oprot.writeFieldBegin('o3', TType.STRUCT, -6)
+      self.o3.write(oprot)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
@@ -1792,15 +1792,15 @@
   thrift_spec = None
   def __init__(self, d=None):
     self.success = None
-    self.ouch1 = None
-    self.ouch2 = None
+    self.o1 = None
+    self.o2 = None
     if isinstance(d, dict):
       if 'success' in d:
         self.success = d['success']
-      if 'ouch1' in d:
-        self.ouch1 = d['ouch1']
-      if 'ouch2' in d:
-        self.ouch2 = d['ouch2']
+      if 'o1' in d:
+        self.o1 = d['o1']
+      if 'o2' in d:
+        self.o2 = d['o2']
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1818,14 +1818,14 @@
           iprot.skip(ftype)
       elif fid == -3:
         if ftype == TType.STRUCT:
-          self.ouch1 = MetaException()
-          self.ouch1.read(iprot)
+          self.o1 = MetaException()
+          self.o1.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -4:
         if ftype == TType.STRUCT:
-          self.ouch2 = UnknownDBException()
-          self.ouch2.read(iprot)
+          self.o2 = UnknownDBException()
+          self.o2.read(iprot)
         else:
           iprot.skip(ftype)
       else:
@@ -1842,13 +1842,13 @@
       oprot.writeFieldBegin('success', TType.BOOL, 0)
       oprot.writeBool(self.success)
       oprot.writeFieldEnd()
-    if self.ouch1 != None:
-      oprot.writeFieldBegin('ouch1', TType.STRUCT, -3)
-      self.ouch1.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch2 != None:
-      oprot.writeFieldBegin('ouch2', TType.STRUCT, -4)
-      self.ouch2.write(oprot)
+    if self.o1 != None:
+      oprot.writeFieldBegin('o1', TType.STRUCT, -3)
+      self.o1.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o2 != None:
+      oprot.writeFieldBegin('o2', TType.STRUCT, -4)
+      self.o2.write(oprot)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
@@ -1934,18 +1934,18 @@
   thrift_spec = None
   def __init__(self, d=None):
     self.success = None
-    self.ouch1 = None
-    self.ouch2 = None
-    self.ouch3 = None
+    self.o1 = None
+    self.o2 = None
+    self.o3 = None
     if isinstance(d, dict):
       if 'success' in d:
         self.success = d['success']
-      if 'ouch1' in d:
-        self.ouch1 = d['ouch1']
-      if 'ouch2' in d:
-        self.ouch2 = d['ouch2']
-      if 'ouch3' in d:
-        self.ouch3 = d['ouch3']
+      if 'o1' in d:
+        self.o1 = d['o1']
+      if 'o2' in d:
+        self.o2 = d['o2']
+      if 'o3' in d:
+        self.o3 = d['o3']
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -1968,20 +1968,20 @@
           iprot.skip(ftype)
       elif fid == -3:
         if ftype == TType.STRUCT:
-          self.ouch1 = MetaException()
-          self.ouch1.read(iprot)
+          self.o1 = MetaException()
+          self.o1.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -4:
         if ftype == TType.STRUCT:
-          self.ouch2 = UnknownTableException()
-          self.ouch2.read(iprot)
+          self.o2 = UnknownTableException()
+          self.o2.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -5:
         if ftype == TType.STRUCT:
-          self.ouch3 = UnknownDBException()
-          self.ouch3.read(iprot)
+          self.o3 = UnknownDBException()
+          self.o3.read(iprot)
         else:
           iprot.skip(ftype)
       else:
@@ -2001,17 +2001,17 @@
         oprot.writeString(iter204)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
-    if self.ouch1 != None:
-      oprot.writeFieldBegin('ouch1', TType.STRUCT, -3)
-      self.ouch1.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch2 != None:
-      oprot.writeFieldBegin('ouch2', TType.STRUCT, -4)
-      self.ouch2.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch3 != None:
-      oprot.writeFieldBegin('ouch3', TType.STRUCT, -5)
-      self.ouch3.write(oprot)
+    if self.o1 != None:
+      oprot.writeFieldBegin('o1', TType.STRUCT, -3)
+      self.o1.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o2 != None:
+      oprot.writeFieldBegin('o2', TType.STRUCT, -4)
+      self.o2.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o3 != None:
+      oprot.writeFieldBegin('o3', TType.STRUCT, -5)
+      self.o3.write(oprot)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
@@ -2075,12 +2075,12 @@
   thrift_spec = None
   def __init__(self, d=None):
     self.success = None
-    self.ouch = None
+    self.o = None
     if isinstance(d, dict):
       if 'success' in d:
         self.success = d['success']
-      if 'ouch' in d:
-        self.ouch = d['ouch']
+      if 'o' in d:
+        self.o = d['o']
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -2103,8 +2103,8 @@
           iprot.skip(ftype)
       elif fid == -1:
         if ftype == TType.STRUCT:
-          self.ouch = MetaException()
-          self.ouch.read(iprot)
+          self.o = MetaException()
+          self.o.read(iprot)
         else:
           iprot.skip(ftype)
       else:
@@ -2124,9 +2124,9 @@
         oprot.writeString(iter211)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
-    if self.ouch != None:
-      oprot.writeFieldBegin('ouch', TType.STRUCT, -1)
-      self.ouch.write(oprot)
+    if self.o != None:
+      oprot.writeFieldBegin('o', TType.STRUCT, -1)
+      self.o.write(oprot)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
@@ -2236,18 +2236,18 @@
   thrift_spec = None
   def __init__(self, d=None):
     self.success = None
-    self.ouch1 = None
-    self.ouch2 = None
-    self.ouch3 = None
+    self.o1 = None
+    self.o2 = None
+    self.o3 = None
     if isinstance(d, dict):
       if 'success' in d:
         self.success = d['success']
-      if 'ouch1' in d:
-        self.ouch1 = d['ouch1']
-      if 'ouch2' in d:
-        self.ouch2 = d['ouch2']
-      if 'ouch3' in d:
-        self.ouch3 = d['ouch3']
+      if 'o1' in d:
+        self.o1 = d['o1']
+      if 'o2' in d:
+        self.o2 = d['o2']
+      if 'o3' in d:
+        self.o3 = d['o3']
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -2270,20 +2270,20 @@
           iprot.skip(ftype)
       elif fid == -5:
         if ftype == TType.STRUCT:
-          self.ouch1 = MetaException()
-          self.ouch1.read(iprot)
+          self.o1 = MetaException()
+          self.o1.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -6:
         if ftype == TType.STRUCT:
-          self.ouch2 = UnknownDBException()
-          self.ouch2.read(iprot)
+          self.o2 = UnknownDBException()
+          self.o2.read(iprot)
         else:
           iprot.skip(ftype)
       elif fid == -7:
         if ftype == TType.STRUCT:
-          self.ouch3 = UnknownTableException()
-          self.ouch3.read(iprot)
+          self.o3 = UnknownTableException()
+          self.o3.read(iprot)
         else:
           iprot.skip(ftype)
       else:
@@ -2303,17 +2303,17 @@
         oprot.writeString(iter218)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
-    if self.ouch1 != None:
-      oprot.writeFieldBegin('ouch1', TType.STRUCT, -5)
-      self.ouch1.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch2 != None:
-      oprot.writeFieldBegin('ouch2', TType.STRUCT, -6)
-      self.ouch2.write(oprot)
-      oprot.writeFieldEnd()
-    if self.ouch3 != None:
-      oprot.writeFieldBegin('ouch3', TType.STRUCT, -7)
-      self.ouch3.write(oprot)
+    if self.o1 != None:
+      oprot.writeFieldBegin('o1', TType.STRUCT, -5)
+      self.o1.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o2 != None:
+      oprot.writeFieldBegin('o2', TType.STRUCT, -6)
+      self.o2.write(oprot)
+      oprot.writeFieldEnd()
+    if self.o3 != None:
+      oprot.writeFieldBegin('o3', TType.STRUCT, -7)
+      self.o3.write(oprot)
       oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()

Modified: hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/constants.py
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/constants.py?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/constants.py (original)
+++ hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/constants.py Tue Feb 17 19:39:29 2009
@@ -9,6 +9,8 @@
 
 META_TABLE_COLUMNS = 'columns'
 
+META_TABLE_COLUMN_TYPES = 'columns.types'
+
 BUCKET_FIELD_NAME = 'bucket_field_name'
 
 BUCKET_COUNT = 'bucket_count'

Modified: hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py (original)
+++ hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py Tue Feb 17 19:39:29 2009
@@ -523,7 +523,7 @@
     self.inputFormat = None
     self.outputFormat = None
     self.compressed = None
-    self.numBuckets = 32
+    self.numBuckets = None
     self.serdeInfo = None
     self.bucketCols = None
     self.sortCols = None

Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Tue Feb 17 19:39:29 2009
@@ -344,11 +344,13 @@
     if(org.apache.commons.lang.StringUtils.isNotBlank(schema.getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_CLASS))) {
       setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_SERDE);
     }
-    // needed for MetadataTypedColumnSetSerDe
+    // needed for MetadataTypedColumnSetSerDe and LazySimpleSerDe
     setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMNS);
+    // needed for LazySimpleSerDe
+    setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMN_TYPES);
     // needed for DynamicSerDe
     setSerdeParam(t.getSd().getSerdeInfo(), schema, org.apache.hadoop.hive.serde.Constants.SERIALIZATION_DDL);
-    
+      
     String colstr = schema.getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMNS);
     List<FieldSchema>  fields = new ArrayList<FieldSchema>();
     if(colstr != null) {
@@ -385,6 +387,7 @@
     schema.remove(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB);
     schema.remove(Constants.META_TABLE_SERDE);
     schema.remove(Constants.META_TABLE_COLUMNS);
+    schema.remove(Constants.META_TABLE_COLUMN_TYPES);
     
     // add the remaining unknown parameters to the table's parameters
     t.setParameters(new HashMap<String, String>());
@@ -485,17 +488,22 @@
     if(tbl.getSd().getSerdeInfo().getSerializationLib() != null) {
       schema.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB, tbl.getSd().getSerdeInfo().getSerializationLib());
     }
-    StringBuilder buf = new StringBuilder();
+    StringBuilder colNameBuf = new StringBuilder();
+    StringBuilder colTypeBuf = new StringBuilder();
     boolean first = true;
     for (FieldSchema col: tbl.getSd().getCols()) {
       if (!first) {
-        buf.append(",");
+        colNameBuf.append(",");
+        colTypeBuf.append(":");
       }
-      buf.append(col.getName());
+      colNameBuf.append(col.getName());
+      colTypeBuf.append(col.getType());
       first = false;
     }
-    String cols = buf.toString();
-    schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMNS, cols);
+    String colNames = colNameBuf.toString();
+    String colTypes = colTypeBuf.toString();
+    schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMNS, colNames);
+    schema.setProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_COLUMN_TYPES, colTypes);
     schema.setProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_DDL, 
         getDDLFromFieldSchema(tbl.getTableName(), tbl.getSd().getCols()));
     

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Tue Feb 17 19:39:29 2009
@@ -660,14 +660,16 @@
     }
 
     /**
-     * If the user didn't specify a SerDe, and any of the columns are not of type String, 
+     * We use LazySimpleSerDe by default.
+     * 
+     * If the user didn't specify a SerDe, and any of the columns are not simple types, 
      * we will have to use DynamicSerDe instead.
      */
     if (crtTbl.getSerName() == null) {
       boolean useDynamicSerDe = false;
       if (crtTbl.getCols() != null) {
         for (FieldSchema field: crtTbl.getCols()) {
-          if (!Constants.STRING_TYPE_NAME.equalsIgnoreCase(field.getType())) {
+          if (field.getType().indexOf('<') >= 0 || field.getType().indexOf('>') >= 0) {
             useDynamicSerDe = true;
           }
         }
@@ -676,6 +678,9 @@
         LOG.info("Default to DynamicSerDe for table " + crtTbl.getTableName() );
         tbl.setSerializationLib(org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe.class.getName());
         tbl.setSerdeParam(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName());
+      } else {
+        LOG.info("Default to LazySimpleSerDe for table " + crtTbl.getTableName() );
+        tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
       }
     }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/TypeInfoUtils.java Tue Feb 17 19:39:29 2009
@@ -147,24 +147,6 @@
     );
   }
 
-  /**
-   * The mapping from type name in DDL to the Java class. 
-   */
-  public static final Map<String, Class<?>> TypeNameToClass = new HashMap<String, Class<?>>();
-  static {
-    TypeNameToClass.put(Constants.BOOLEAN_TYPE_NAME, Boolean.class);
-    TypeNameToClass.put(Constants.TINYINT_TYPE_NAME, Byte.class);
-    TypeNameToClass.put(Constants.SMALLINT_TYPE_NAME, Short.class);
-    TypeNameToClass.put(Constants.INT_TYPE_NAME, Integer.class);
-    TypeNameToClass.put(Constants.BIGINT_TYPE_NAME, Long.class);
-    TypeNameToClass.put(Constants.FLOAT_TYPE_NAME, Float.class);
-    TypeNameToClass.put(Constants.DOUBLE_TYPE_NAME, Double.class);
-    TypeNameToClass.put(Constants.STRING_TYPE_NAME, String.class);
-    TypeNameToClass.put(Constants.DATE_TYPE_NAME, java.sql.Date.class);
-    // These types are not supported yet. 
-    // TypeNameToClass.put(Constants.DATETIME_TYPE_NAME);
-    // TypeNameToClass.put(Constants.TIMESTAMP_TYPE_NAME);
-  }
   
   /**
    * Return the primitive type corresponding to the field schema
@@ -174,7 +156,7 @@
   public static TypeInfo getPrimitiveTypeInfoFromFieldSchema(FieldSchema field) {
     String type = field.getType();
     
-    Class<?> c = TypeNameToClass.get(type);
+    Class<?> c = ObjectInspectorUtils.typeNameToClass.get(type);
     return c == null ? null : TypeInfoFactory.getPrimitiveTypeInfo(c);
   }
 }

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out Tue Feb 17 19:39:29 2009
@@ -1,2 +1,2 @@
-Invalid table columns : Partition column name ds conflicts with table columns.
+Replace columns is not supported for this table. SerDe may be incompatible.
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out Tue Feb 17 19:39:29 2009
@@ -31,7 +31,7 @@
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest1
 
   Stage: Stage-0
@@ -41,7 +41,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out Tue Feb 17 19:39:29 2009
@@ -55,7 +55,7 @@
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: dest1
 
   Stage: Stage-0
@@ -65,7 +65,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out Tue Feb 17 19:39:29 2009
@@ -41,7 +41,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-zshao/67494501/106593589.10001 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/1064081965/497188115.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -79,7 +79,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest_g1
 
   Stage: Stage-0
@@ -89,7 +89,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest_g1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out Tue Feb 17 19:39:29 2009
@@ -42,7 +42,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/zshao/sync/apache-trunk-HIVE-105/build/ql/tmp/228651165/7195775.10001 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/341327619/216157041.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -80,7 +80,7 @@
   Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/zshao/sync/apache-trunk-HIVE-105/build/ql/tmp/228651165/7195775.10002 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/341327619/216157041.10002 
           Reduce Output Operator
             sort order: 
             tag: -1
@@ -103,7 +103,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
   Stage: Stage-0
@@ -113,7 +113,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out Tue Feb 17 19:39:29 2009
@@ -48,7 +48,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-zshao/632752964/335784834.10001 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/623301799/541321831.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -86,7 +86,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
   Stage: Stage-0
@@ -96,7 +96,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out Tue Feb 17 19:39:29 2009
@@ -43,7 +43,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-zshao/75837910/35360202.10001 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/398752760/916418821.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -88,7 +88,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest_g2
 
   Stage: Stage-0
@@ -98,7 +98,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest_g2
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out Tue Feb 17 19:39:29 2009
@@ -58,7 +58,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/athusoo/commits/hive_trunk_ws3/build/ql/tmp/435212851/489689211.10001 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/73868446/32430537.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -103,7 +103,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
   Stage: Stage-0
@@ -113,7 +113,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out Tue Feb 17 19:39:29 2009
@@ -43,7 +43,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/athusoo/commits/hive_trunk_ws2/build/ql/tmp/156149158/223155059.10001 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/72566128/1350457683.10001 
           Reduce Output Operator
             sort order: 
             tag: -1
@@ -96,7 +96,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
   Stage: Stage-0
@@ -106,7 +106,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out Tue Feb 17 19:39:29 2009
@@ -65,7 +65,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/athusoo/commits/hive_trunk_ws2/build/ql/tmp/1134254033/650040203.10001 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/793761293/734762737.10001 
           Reduce Output Operator
             sort order: 
             tag: -1
@@ -118,7 +118,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
   Stage: Stage-0
@@ -128,7 +128,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4.q.out Tue Feb 17 19:39:29 2009
@@ -40,7 +40,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-zshao/1491006708/287075280.10001 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/50461607/40782581.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -65,7 +65,7 @@
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: dest1
 
   Stage: Stage-0
@@ -75,7 +75,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out Tue Feb 17 19:39:29 2009
@@ -42,7 +42,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
   Stage: Stage-0
@@ -52,7 +52,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out Tue Feb 17 19:39:29 2009
@@ -41,7 +41,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-zshao/350108858/95672649.10001 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/234258046/892792028.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -79,7 +79,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
   Stage: Stage-0
@@ -89,7 +89,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out Tue Feb 17 19:39:29 2009
@@ -45,7 +45,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
   Stage: Stage-0
@@ -55,7 +55,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6.q.out Tue Feb 17 19:39:29 2009
@@ -40,7 +40,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-zshao/454201677/52450507.10001 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/594382705/10780360.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -65,7 +65,7 @@
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: dest1
 
   Stage: Stage-0
@@ -75,7 +75,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out Tue Feb 17 19:39:29 2009
@@ -31,7 +31,7 @@
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest1
 
   Stage: Stage-0
@@ -41,7 +41,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out Tue Feb 17 19:39:29 2009
@@ -43,7 +43,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
   Stage: Stage-0
@@ -53,7 +53,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out Tue Feb 17 19:39:29 2009
@@ -31,7 +31,7 @@
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest1
             Filter Operator
               predicate:
@@ -54,7 +54,7 @@
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest2
             Filter Operator
               predicate:
@@ -73,7 +73,7 @@
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest3
 
   Stage: Stage-0
@@ -83,13 +83,13 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
             replace: true
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest2
             partition:
               ds 2008-04-08
@@ -98,7 +98,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest3
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out Tue Feb 17 19:39:29 2009
@@ -31,7 +31,7 @@
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest1
             Filter Operator
               predicate:
@@ -54,7 +54,7 @@
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest2
             Filter Operator
               predicate:
@@ -73,7 +73,7 @@
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest3
             Filter Operator
               predicate:
@@ -99,13 +99,13 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
             replace: true
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest2
             partition:
               ds 2008-04-08
@@ -114,7 +114,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest3
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out Tue Feb 17 19:39:29 2009
@@ -58,7 +58,7 @@
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest1
 
   Stage: Stage-0
@@ -68,7 +68,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out Tue Feb 17 19:39:29 2009
@@ -49,7 +49,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/zshao/sync/apache-trunk-HIVE-105/build/ql/tmp/360702650/134814302.10001 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/213302378/79959031.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -88,7 +88,7 @@
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest1
 
   Stage: Stage-0
@@ -98,7 +98,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out Tue Feb 17 19:39:29 2009
@@ -62,7 +62,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
   Stage: Stage-0
@@ -72,7 +72,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out Tue Feb 17 19:39:29 2009
@@ -62,7 +62,7 @@
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest1
 
   Stage: Stage-0
@@ -72,7 +72,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out Tue Feb 17 19:39:29 2009
@@ -62,7 +62,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
   Stage: Stage-0
@@ -72,19 +72,19 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
             replace: true
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest2
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/zshao/sync/apache-trunk-HIVE-105/build/ql/tmp/1425158066/374907939.10002 
+        /data/users/zshao/sync/apache-trunk-HIVE-270/build/ql/tmp/332566619/774394260.10002 
           Reduce Output Operator
             sort order: 
             tag: -1
@@ -107,7 +107,7 @@
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest2
 
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out?rev=745212&r1=745211&r2=745212&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out Tue Feb 17 19:39:29 2009
@@ -63,7 +63,7 @@
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest1
 
   Stage: Stage-0
@@ -73,7 +73,7 @@
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1