You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2014/04/25 22:56:32 UTC

svn commit: r1590140 [1/2] - in /hive/trunk: contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/ contrib/src/test/org/apache/hadoop/hive/contrib/serde2/ hbase-handler/src/test/org/apache/hadoop/hive/hbase/ hcatalog/core/src/main/java/org/apache/...

Author: xuefu
Date: Fri Apr 25 20:56:31 2014
New Revision: 1590140

URL: http://svn.apache.org/r1590140
Log:
HIVE-6853: Reading of partitioned Avro data fails if partition schema does not match table schema (Anthony via Xuefu)

Modified:
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java
    hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
    hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestHCatRecordSerDe.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
    hive/trunk/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
    hive/trunk/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java
    hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveQueryResultSet.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestMapJoinTableContainer.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/Utilities.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java
    hive/trunk/ql/src/test/queries/clientpositive/avro_partitioned.q
    hive/trunk/ql/src/test/results/clientpositive/avro_partitioned.q.out
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerde.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestLazyBinaryColumnarSerDe.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestCrossMapEqualComparer.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestSimpleMapEqualComparer.java
    hive/trunk/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java Fri Apr 25 20:56:31 2014
@@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hive.serde2.AbstractDeserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ReflectionStructObjectInspector;
@@ -182,7 +183,7 @@ public class S3LogDeserializer extends A
       // Text("04ff331638adc13885d6c42059584deabbdeabcd55bf0bee491172a79a87b196 static.zemanta.com [09/Apr/2009:23:12:39 +0000] 65.94.12.181 65a011a29cdf8ec533ec3d1ccaae921c EEE6FFE9B9F9EA29 REST.HEAD.OBJECT readside/loader.js%22+defer%3D%22defer \"HEAD /readside/loader.js\"+defer=\"defer HTTP/1.0\" 403 AccessDenied 231 - 7 - \"-\" \"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)\"");
       Text sample = new Text(
           "04ff331638adc13885d6c42059584deabbdeabcd55bf0bee491172a79a87b196 img.zemanta.com [10/Apr/2009:05:34:01 +0000] 70.32.81.92 65a011a29cdf8ec533ec3d1ccaae921c F939A7D698D27C63 REST.GET.OBJECT reblog_b.png \"GET /reblog_b.png?x-id=79ca9376-6326-41b7-9257-eea43d112eb2 HTTP/1.0\" 200 - 1250 1250 160 159 \"-\" \"Firefox 0.8 (Linux)\" useragent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040614 Firefox/0.8\"");
-      serDe.initialize(conf, tbl);
+      SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
       Object row = serDe.deserialize(sample);
       System.err.println(serDe.getObjectInspector().getClass().toString());
       ReflectionStructObjectInspector oi = (ReflectionStructObjectInspector) serDe

Modified: hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java (original)
+++ hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java Fri Apr 25 20:56:31 2014
@@ -24,6 +24,7 @@ import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
@@ -44,7 +45,7 @@ public class TestRegexSerDe extends Test
     schema.setProperty("output.format.string", outputFormatString);
 
     RegexSerDe serde = new RegexSerDe();
-    serde.initialize(new Configuration(), schema);
+    SerDeUtils.initializeSerDe(serde, new Configuration(), schema, null);
     return serde;
   }
 

Modified: hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java (original)
+++ hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java Fri Apr 25 20:56:31 2014
@@ -115,28 +115,28 @@ public class TestHBaseSerDe extends Test
     HBaseSerDe serDe = new HBaseSerDe();
     Configuration conf = new Configuration();
     Properties tbl = createPropertiesI_I();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
     deserializeAndSerialize(serDe, r, p, expectedFieldsData);
 
     serDe = new HBaseSerDe();
     conf = new Configuration();
     tbl = createPropertiesI_II();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
     deserializeAndSerialize(serDe, r, p, expectedFieldsData);
 
     serDe = new HBaseSerDe();
     conf = new Configuration();
     tbl = createPropertiesI_III();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
     deserializeAndSerialize(serDe, r, p, expectedFieldsData);
 
     serDe = new HBaseSerDe();
     conf = new Configuration();
     tbl = createPropertiesI_IV();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
     deserializeAndSerialize(serDe, r, p, expectedFieldsData);
   }
@@ -149,7 +149,7 @@ public class TestHBaseSerDe extends Test
     long putTimestamp = 1;
     tbl.setProperty(HBaseSerDe.HBASE_PUT_TIMESTAMP,
             Long.toString(putTimestamp));
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
 
     byte [] cfa = "cola".getBytes();
@@ -356,21 +356,21 @@ public class TestHBaseSerDe extends Test
     HBaseSerDe serDe = new HBaseSerDe();
     Configuration conf = new Configuration();
     Properties tbl = createPropertiesII_I();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
     deserializeAndSerialize(serDe, r, p, expectedFieldsData);
 
     serDe = new HBaseSerDe();
     conf = new Configuration();
     tbl = createPropertiesII_II();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
     deserializeAndSerialize(serDe, r, p, expectedFieldsData);
 
     serDe = new HBaseSerDe();
     conf = new Configuration();
     tbl = createPropertiesII_III();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
     deserializeAndSerialize(serDe, r, p, expectedFieldsData);
   }
@@ -486,7 +486,7 @@ public class TestHBaseSerDe extends Test
     HBaseSerDe hbaseSerDe = new HBaseSerDe();
     Configuration conf = new Configuration();
     Properties tbl = createPropertiesForHiveMapHBaseColumnFamily();
-    hbaseSerDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
 
     deserializeAndSerializeHiveMapHBaseColumnFamily(hbaseSerDe, r, p, expectedData, rowKeys,
         columnFamilies, columnQualifiersAndValues);
@@ -494,7 +494,7 @@ public class TestHBaseSerDe extends Test
     hbaseSerDe = new HBaseSerDe();
     conf = new Configuration();
     tbl = createPropertiesForHiveMapHBaseColumnFamilyII();
-    hbaseSerDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
 
     deserializeAndSerializeHiveMapHBaseColumnFamily(hbaseSerDe, r, p, expectedData, rowKeys,
         columnFamilies, columnQualifiersAndValues);
@@ -615,7 +615,7 @@ public class TestHBaseSerDe extends Test
     HBaseSerDe hbaseSerDe = new HBaseSerDe();
     Configuration conf = new Configuration();
     Properties tbl = createPropertiesForHiveMapHBaseColumnFamilyII_I();
-    hbaseSerDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
 
     deserializeAndSerializeHiveMapHBaseColumnFamilyII(hbaseSerDe, r, p, expectedData,
         columnFamilies, columnQualifiersAndValues);
@@ -623,7 +623,7 @@ public class TestHBaseSerDe extends Test
     hbaseSerDe = new HBaseSerDe();
     conf = new Configuration();
     tbl = createPropertiesForHiveMapHBaseColumnFamilyII_II();
-    hbaseSerDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(hbaseSerDe, conf, tbl, null);
 
     deserializeAndSerializeHiveMapHBaseColumnFamilyII(hbaseSerDe, r, p, expectedData,
         columnFamilies, columnQualifiersAndValues);
@@ -748,7 +748,7 @@ public class TestHBaseSerDe extends Test
     HBaseSerDe serDe = new HBaseSerDe();
     Configuration conf = new Configuration();
     Properties tbl = createPropertiesForColumnPrefixes();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
     Object notPresentKey = new Text("unwanted_col");
 
@@ -842,7 +842,7 @@ public class TestHBaseSerDe extends Test
     HBaseSerDe serDe = new HBaseSerDe();
     Configuration conf = new Configuration();
     Properties tbl = createPropertiesForCompositeKeyWithSeparator();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
     deserializeAndSerializeHBaseCompositeKey(serDe, r, p);
   }
@@ -892,7 +892,7 @@ public class TestHBaseSerDe extends Test
     HBaseSerDe serDe = new HBaseSerDe();
     Configuration conf = new Configuration();
     Properties tbl = createPropertiesForCompositeKeyWithoutSeparator();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
     deserializeAndSerializeHBaseCompositeKey(serDe, r, p);
   }

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java Fri Apr 25 20:56:31 2014
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -142,14 +143,17 @@ class InternalUtil {
   // if the default was decided by the serde
   static void initializeOutputSerDe(SerDe serDe, Configuration conf, OutputJobInfo jobInfo)
     throws SerDeException {
-    serDe.initialize(conf, getSerdeProperties(jobInfo.getTableInfo(), jobInfo.getOutputSchema()));
+    SerDeUtils.initializeSerDe(serDe, conf,
+                               getSerdeProperties(jobInfo.getTableInfo(),
+                                                  jobInfo.getOutputSchema()),
+                               null);
   }
 
   static void initializeDeserializer(Deserializer deserializer, Configuration conf,
                      HCatTableInfo info, HCatSchema schema) throws SerDeException {
     Properties props = getSerdeProperties(info, schema);
     LOG.info("Initializing " + deserializer.getClass().getName() + " with properties " + props);
-    deserializer.initialize(conf, props);
+    SerDeUtils.initializeSerDe(deserializer, conf, props, null);
   }
 
   private static Properties getSerdeProperties(HCatTableInfo info, HCatSchema s)

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestHCatRecordSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestHCatRecordSerDe.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestHCatRecordSerDe.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestHCatRecordSerDe.java Fri Apr 25 20:56:31 2014
@@ -30,6 +30,7 @@ import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.io.Writable;
 import org.slf4j.Logger;
@@ -123,7 +124,7 @@ public class TestHCatRecordSerDe extends
       HCatRecord r = e.getValue();
 
       HCatRecordSerDe hrsd = new HCatRecordSerDe();
-      hrsd.initialize(conf, tblProps);
+      SerDeUtils.initializeSerDe(hrsd, conf, tblProps, null);
 
       LOG.info("ORIG: {}", r);
 
@@ -144,7 +145,7 @@ public class TestHCatRecordSerDe extends
 
       // serialize using another serde, and read out that object repr.
       LazySimpleSerDe testSD = new LazySimpleSerDe();
-      testSD.initialize(conf, tblProps);
+      SerDeUtils.initializeSerDe(testSD, conf, tblProps, null);
 
       Writable s3 = testSD.serialize(s, hrsd.getObjectInspector());
       LOG.info("THREE: {}", s3);

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java Fri Apr 25 20:56:31 2014
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.common.typ
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.slf4j.Logger;
@@ -143,10 +144,10 @@ public class TestJsonSerDe extends TestC
       HCatRecord r = e.second;
 
       HCatRecordSerDe hrsd = new HCatRecordSerDe();
-      hrsd.initialize(conf, tblProps);
+      SerDeUtils.initializeSerDe(hrsd, conf, tblProps, null);
 
       JsonSerDe jsde = new JsonSerDe();
-      jsde.initialize(conf, tblProps);
+      SerDeUtils.initializeSerDe(jsde, conf, tblProps, null);
 
       LOG.info("ORIG:{}", r);
 
@@ -195,10 +196,10 @@ public class TestJsonSerDe extends TestC
       LOG.info("modif tbl props:{}", internalTblProps);
 
       JsonSerDe wjsd = new JsonSerDe();
-      wjsd.initialize(conf, internalTblProps);
+      SerDeUtils.initializeSerDe(wjsd, conf, internalTblProps, null);
 
       JsonSerDe rjsd = new JsonSerDe();
-      rjsd.initialize(conf, tblProps);
+      SerDeUtils.initializeSerDe(rjsd, conf, tblProps, null);
 
       LOG.info("ORIG:{}", r);
 
@@ -266,7 +267,7 @@ public class TestJsonSerDe extends TestC
     props.put(serdeConstants.LIST_COLUMNS, "s,k");
     props.put(serdeConstants.LIST_COLUMN_TYPES, "struct<a:int,b:string>,int");
     JsonSerDe rjsd = new JsonSerDe();
-    rjsd.initialize(conf, props);
+    SerDeUtils.initializeSerDe(rjsd, conf, props, null);
 
     Text jsonText = new Text("{ \"x\" : \"abc\" , "
         + " \"t\" : { \"a\":\"1\", \"b\":\"2\", \"c\":[ { \"x\":2 , \"y\":3 } , { \"x\":3 , \"y\":2 }] } ,"

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java Fri Apr 25 20:56:31 2014
@@ -32,6 +32,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.io.RCFile;
 import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
@@ -75,7 +76,7 @@ public class TestRCFileMapReduceInputFor
       serDe = new ColumnarSerDe();
       // Create the SerDe
       tbl = createProperties();
-      serDe.initialize(conf, tbl);
+      SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
     } catch (Exception e) {
     }
   }

Modified: hive/trunk/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java (original)
+++ hive/trunk/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java Fri Apr 25 20:56:31 2014
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.io.BytesWritable;
 
@@ -248,7 +249,7 @@ public class DelimitedInputWriter extend
       Properties tableProps = MetaStoreUtils.getTableMetadata(tbl);
       tableProps.setProperty("field.delim", String.valueOf(serdeSeparator));
       LazySimpleSerDe serde = new LazySimpleSerDe();
-      serde.initialize(conf, tableProps);
+      SerDeUtils.initializeSerDe(serde, conf, tableProps, null);
       return serde;
     } catch (SerDeException e) {
       throw new SerializationError("Error initializing serde", e);

Modified: hive/trunk/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java (original)
+++ hive/trunk/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java Fri Apr 25 20:56:31 2014
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hive.hcatalog.data.JsonSerDe;
 
@@ -95,7 +96,7 @@ public class StrictJsonWriter extends Ab
     try {
       Properties tableProps = MetaStoreUtils.getTableMetadata(tbl);
       JsonSerDe serde = new JsonSerDe();
-      serde.initialize(conf, tableProps);
+      SerDeUtils.initializeSerDe(serde, conf, tableProps, null);
       return serde;
     } catch (SerDeException e) {
       throw new SerializationError("Error initializing serde " + JsonSerDe.class.getName(), e);

Modified: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveQueryResultSet.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveQueryResultSet.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveQueryResultSet.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveQueryResultSet.java Fri Apr 25 20:56:31 2014
@@ -32,6 +32,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -108,7 +109,7 @@ public class HiveQueryResultSet extends 
         LOG.debug("Column types: " + types);
         props.setProperty(serdeConstants.LIST_COLUMN_TYPES, types);
       }
-      serde.initialize(new Configuration(), props);
+      SerDeUtils.initializeSerDe(serde, new Configuration(), props, null);
 
     } catch (Exception ex) {
       ex.printStackTrace();

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Fri Apr 25 20:56:31 2014
@@ -60,6 +60,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -336,7 +337,7 @@ public class MetaStoreUtils {
     try {
       Deserializer deserializer = ReflectionUtils.newInstance(conf.getClassByName(lib).
         asSubclass(Deserializer.class), conf);
-      deserializer.initialize(conf, MetaStoreUtils.getTableMetadata(table));
+      SerDeUtils.initializeSerDe(deserializer, conf, MetaStoreUtils.getTableMetadata(table), null);
       return deserializer;
     } catch (RuntimeException e) {
       throw e;
@@ -372,7 +373,8 @@ public class MetaStoreUtils {
     try {
       Deserializer deserializer = ReflectionUtils.newInstance(conf.getClassByName(lib).
         asSubclass(Deserializer.class), conf);
-      deserializer.initialize(conf, MetaStoreUtils.getPartitionMetadata(part, table));
+      SerDeUtils.initializeSerDe(deserializer, conf, MetaStoreUtils.getTableMetadata(table),
+                                 MetaStoreUtils.getPartitionMetadata(part, table));
       return deserializer;
     } catch (RuntimeException e) {
       throw e;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java Fri Apr 25 20:56:31 2014
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.DelimitedJSONSerDe;
 import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -61,7 +62,7 @@ public class DefaultFetchFormatter<T> im
       serdeProps.put(SERIALIZATION_FORMAT, props.getProperty(SERIALIZATION_FORMAT));
       serdeProps.put(SERIALIZATION_NULL_FORMAT, props.getProperty(SERIALIZATION_NULL_FORMAT));
     }
-    serde.initialize(conf, serdeProps);
+    SerDeUtils.initializeSerDe(serde, conf, serdeProps, null);
     return serde;
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java Fri Apr 25 20:56:31 2014
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.plan.Op
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
 import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -131,12 +132,13 @@ public class DemuxOperator extends Opera
         TableDesc keyTableDesc = conf.getKeysSerializeInfos().get(newTag);
         Deserializer inputKeyDeserializer = ReflectionUtils.newInstance(keyTableDesc
             .getDeserializerClass(), null);
-        inputKeyDeserializer.initialize(null, keyTableDesc.getProperties());
+        SerDeUtils.initializeSerDe(inputKeyDeserializer, null, keyTableDesc.getProperties(), null);
 
         TableDesc valueTableDesc = conf.getValuesSerializeInfos().get(newTag);
         Deserializer inputValueDeserializer = ReflectionUtils.newInstance(valueTableDesc
             .getDeserializerClass(), null);
-        inputValueDeserializer.initialize(null, valueTableDesc.getProperties());
+        SerDeUtils.initializeSerDe(inputValueDeserializer, null, valueTableDesc.getProperties(),
+                                   null);
 
         List<ObjectInspector> oi = new ArrayList<ObjectInspector>();
         oi.add(inputKeyDeserializer.getObjectInspector());

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java Fri Apr 25 20:56:31 2014
@@ -51,6 +51,7 @@ import org.apache.hadoop.hive.ql.session
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.DelegatedObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -240,7 +241,7 @@ public class FetchOperator implements Se
 
   private StructObjectInspector getRowInspectorFromTable(TableDesc table) throws Exception {
     Deserializer serde = table.getDeserializerClass().newInstance();
-    serde.initialize(job, table.getProperties());
+    SerDeUtils.initializeSerDe(serde, job, table.getProperties(), null);
     return createRowInspector(getStructOIFrom(serde.getObjectInspector()));
   }
 
@@ -261,7 +262,7 @@ public class FetchOperator implements Se
   private StructObjectInspector getRowInspectorFromPartitionedTable(TableDesc table)
       throws Exception {
     Deserializer serde = table.getDeserializerClass().newInstance();
-    serde.initialize(job, table.getProperties());
+    SerDeUtils.initializeSerDe(serde, job, table.getProperties(), null);
     String pcols = table.getProperties().getProperty(
         org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
     String[] partKeys = pcols.trim().split("/");
@@ -427,14 +428,15 @@ public class FetchOperator implements Se
 
       splitNum = 0;
       serde = partDesc.getDeserializer(job);
-      serde.initialize(job, partDesc.getOverlayedProperties());
+      SerDeUtils.initializeSerDe(serde, job, partDesc.getTableDesc().getProperties(),
+                                 partDesc.getProperties());
 
       if (currTbl != null) {
         tblSerde = serde;
       }
       else {
         tblSerde = currPart.getTableDesc().getDeserializerClass().newInstance();
-        tblSerde.initialize(job, currPart.getTableDesc().getProperties());
+        SerDeUtils.initializeSerDe(tblSerde, job, currPart.getTableDesc().getProperties(), null);
       }
 
       ObjectInspector outputOI = ObjectInspectorConverters.getConvertedOI(
@@ -448,7 +450,9 @@ public class FetchOperator implements Se
       if (LOG.isDebugEnabled()) {
         LOG.debug("Creating fetchTask with deserializer typeinfo: "
             + serde.getObjectInspector().getTypeName());
-        LOG.debug("deserializer properties: " + partDesc.getOverlayedProperties());
+        LOG.debug("deserializer properties:\ntable properties: " +
+            partDesc.getTableDesc().getProperties() + "\npartition properties: " +
+            partDesc.getProperties());
       }
 
       if (currPart != null) {
@@ -704,7 +708,7 @@ public class FetchOperator implements Se
       // Whenever a new partition is being read, a new converter is being created
       PartitionDesc partition = listParts.get(0);
       Deserializer tblSerde = partition.getTableDesc().getDeserializerClass().newInstance();
-      tblSerde.initialize(job, partition.getTableDesc().getProperties());
+      SerDeUtils.initializeSerDe(tblSerde, job, partition.getTableDesc().getProperties(), null);
 
       partitionedTableOI = null;
       ObjectInspector tableOI = tblSerde.getObjectInspector();
@@ -713,7 +717,8 @@ public class FetchOperator implements Se
       for (PartitionDesc listPart : listParts) {
         partition = listPart;
         Deserializer partSerde = listPart.getDeserializer(job);
-        partSerde.initialize(job, listPart.getOverlayedProperties());
+        SerDeUtils.initializeSerDe(partSerde, job, partition.getTableDesc().getProperties(),
+                                   listPart.getProperties());
 
         partitionedTableOI = ObjectInspectorConverters.getConvertedOI(
             partSerde.getObjectInspector(), tableOI, oiSettableProperties);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java Fri Apr 25 20:56:31 2014
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.plan.Ha
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
 import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 
 public class HashTableDummyOperator extends Operator<HashTableDummyDesc> implements Serializable {
   private static final long serialVersionUID = 1L;
@@ -34,7 +35,7 @@ public class HashTableDummyOperator exte
     TableDesc tbl = this.getConf().getTbl();
     try {
       Deserializer serde = tbl.getDeserializerClass().newInstance();
-      serde.initialize(hconf, tbl.getProperties());
+      SerDeUtils.initializeSerDe(serde, hconf, tbl.getProperties(), null);
       this.outputObjInspector = serde.getObjectInspector();
       initializeChildren(hconf);
     } catch (Exception e) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java Fri Apr 25 20:56:31 2014
@@ -47,6 +47,7 @@ import org.apache.hadoop.hive.ql.plan.ap
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
@@ -177,7 +178,7 @@ public class HashTableSinkOperator exten
       TableDesc keyTableDesc = conf.getKeyTblDesc();
       SerDe keySerde = (SerDe) ReflectionUtils.newInstance(keyTableDesc.getDeserializerClass(),
           null);
-      keySerde.initialize(null, keyTableDesc.getProperties());
+      SerDeUtils.initializeSerDe(keySerde, null, keyTableDesc.getProperties(), null);
       MapJoinObjectSerDeContext keyContext = new MapJoinObjectSerDeContext(keySerde, false);
       for (Byte pos : order) {
         if (pos == posBigTableAlias) {
@@ -186,7 +187,7 @@ public class HashTableSinkOperator exten
         mapJoinTables[pos] = new HashMapWrapper(hashTableThreshold, hashTableLoadFactor);
         TableDesc valueTableDesc = conf.getValueTblFilteredDescs().get(pos);
         SerDe valueSerDe = (SerDe) ReflectionUtils.newInstance(valueTableDesc.getDeserializerClass(), null);
-        valueSerDe.initialize(null, valueTableDesc.getProperties());
+        SerDeUtils.initializeSerDe(valueSerDe, null, valueTableDesc.getProperties(), null);
         mapJoinTableSerdes[pos] = new MapJoinTableContainerSerDe(keyContext, new MapJoinObjectSerDeContext(
             valueSerDe, hasFilter(pos)));
       }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java Fri Apr 25 20:56:31 2014
@@ -32,6 +32,7 @@ import org.apache.hadoop.hive.ql.plan.Ta
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -261,7 +262,7 @@ public class JoinUtil {
     SerDe sd = (SerDe) ReflectionUtils.newInstance(desc.getDeserializerClass(),
         null);
     try {
-      sd.initialize(null, desc.getProperties());
+      SerDeUtils.initializeSerDe(sd, null, desc.getProperties(), null);
     } catch (SerDeException e) {
       e.printStackTrace();
       return null;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java Fri Apr 25 20:56:31 2014
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.plan.ap
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
@@ -117,7 +118,7 @@ public class MapJoinOperator extends Abs
     TableDesc keyTableDesc = conf.getKeyTblDesc();
     SerDe keySerializer = (SerDe) ReflectionUtils.newInstance(keyTableDesc.getDeserializerClass(),
         null);
-    keySerializer.initialize(null, keyTableDesc.getProperties());
+    SerDeUtils.initializeSerDe(keySerializer, null, keyTableDesc.getProperties(), null);
     MapJoinObjectSerDeContext keyContext = new MapJoinObjectSerDeContext(keySerializer, false);
     for (int pos = 0; pos < order.length; pos++) {
       if (pos == posBigTable) {
@@ -131,7 +132,7 @@ public class MapJoinOperator extends Abs
       }
       SerDe valueSerDe = (SerDe) ReflectionUtils.newInstance(valueTableDesc.getDeserializerClass(),
           null);
-      valueSerDe.initialize(null, valueTableDesc.getProperties());
+      SerDeUtils.initializeSerDe(valueSerDe, null, valueTableDesc.getProperties(), null);
       MapJoinObjectSerDeContext valueContext = new MapJoinObjectSerDeContext(valueSerDe, hasFilter(pos));
       mapJoinTableSerdes[pos] = new MapJoinTableContainerSerDe(keyContext, valueContext);
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java Fri Apr 25 20:56:31 2014
@@ -184,18 +184,18 @@ public class MapOperator extends Operato
     MapOpCtx opCtx = new MapOpCtx();
     // Use table properties in case of unpartitioned tables,
     // and the union of table properties and partition properties, with partition
-    // taking precedence
-    Properties partProps = isPartitioned(pd) ?
-        pd.getOverlayedProperties() : pd.getTableDesc().getProperties();
+    // taking precedence, in the case of partitioned tables
+    Properties overlayedProps =
+        SerDeUtils.createOverlayedProperties(td.getProperties(), pd.getProperties());
 
     Map<String, String> partSpec = pd.getPartSpec();
 
-    opCtx.tableName = String.valueOf(partProps.getProperty("name"));
+    opCtx.tableName = String.valueOf(overlayedProps.getProperty("name"));
     opCtx.partName = String.valueOf(partSpec);
 
     Class serdeclass = hconf.getClassByName(pd.getSerdeClassName());
     opCtx.deserializer = (Deserializer) serdeclass.newInstance();
-    opCtx.deserializer.initialize(hconf, partProps);
+    SerDeUtils.initializeSerDe(opCtx.deserializer, hconf, td.getProperties(), pd.getProperties());
 
     StructObjectInspector partRawRowObjectInspector =
         (StructObjectInspector) opCtx.deserializer.getObjectInspector();
@@ -208,11 +208,12 @@ public class MapOperator extends Operato
     // Next check if this table has partitions and if so
     // get the list of partition names as well as allocate
     // the serdes for the partition columns
-    String pcols = partProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
+    String pcols = overlayedProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
     
     if (pcols != null && pcols.length() > 0) {
       String[] partKeys = pcols.trim().split("/");
-      String pcolTypes = partProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);      
+      String pcolTypes = overlayedProps
+          .getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
       String[] partKeyTypes = pcolTypes.trim().split(":");
       
       if (partKeys.length > partKeyTypes.length) {
@@ -300,11 +301,9 @@ public class MapOperator extends Operato
         PartitionDesc pd = conf.getPathToPartitionInfo().get(onefile);
         TableDesc tableDesc = pd.getTableDesc();
         Properties tblProps = tableDesc.getProperties();
-        // If the partition does not exist, use table properties
-        Properties partProps = isPartitioned(pd) ? pd.getOverlayedProperties() : tblProps;
         Class sdclass = hconf.getClassByName(pd.getSerdeClassName());
         Deserializer partDeserializer = (Deserializer) sdclass.newInstance();
-        partDeserializer.initialize(hconf, partProps);
+        SerDeUtils.initializeSerDe(partDeserializer, hconf, tblProps, pd.getProperties());
         StructObjectInspector partRawRowObjectInspector = (StructObjectInspector) partDeserializer
             .getObjectInspector();
 
@@ -313,7 +312,7 @@ public class MapOperator extends Operato
             (identityConverterTableDesc.contains(tableDesc))) {
             sdclass = hconf.getClassByName(tableDesc.getSerdeClassName());
             Deserializer tblDeserializer = (Deserializer) sdclass.newInstance();
-          tblDeserializer.initialize(hconf, tblProps);
+            SerDeUtils.initializeSerDe(tblDeserializer, hconf, tblProps, null);
           tblRawRowObjectInspector =
               (StructObjectInspector) ObjectInspectorConverters.getConvertedOI(
                   partRawRowObjectInspector,

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java Fri Apr 25 20:56:31 2014
@@ -40,6 +40,7 @@ import org.apache.hadoop.hive.ql.plan.Sc
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.Serializer;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.BytesWritable;
@@ -242,8 +243,8 @@ public class ScriptOperator extends Oper
 
       scriptOutputDeserializer = conf.getScriptOutputInfo()
           .getDeserializerClass().newInstance();
-      scriptOutputDeserializer.initialize(hconf, conf.getScriptOutputInfo()
-          .getProperties());
+      SerDeUtils.initializeSerDe(scriptOutputDeserializer, hconf,
+                                 conf.getScriptOutputInfo().getProperties(), null);
 
       scriptInputSerializer = (Serializer) conf.getScriptInputInfo()
           .getDeserializerClass().newInstance();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java Fri Apr 25 20:56:31 2014
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.plan.Op
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -138,7 +139,7 @@ public class SkewJoinHandler {
       try {
         SerDe serializer = (SerDe) ReflectionUtils.newInstance(tblDesc.get(
             alias).getDeserializerClass(), null);
-        serializer.initialize(null, tblDesc.get(alias).getProperties());
+        SerDeUtils.initializeSerDe(serializer, null, tblDesc.get(alias).getProperties(), null);
         tblSerializers.put((byte) i, serializer);
       } catch (SerDeException e) {
         LOG.error("Skewjoin will be disabled due to " + e.getMessage(), e);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Fri Apr 25 20:56:31 2014
@@ -159,6 +159,7 @@ import org.apache.hadoop.hive.ql.stats.S
 import org.apache.hadoop.hive.ql.stats.StatsPublisher;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.Serializer;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.shims.ShimLoader;
@@ -2230,8 +2231,10 @@ public final class Utilities {
                   return;
                 }
                 HiveStorageHandler handler = HiveUtils.getStorageHandler(myConf,
-                    partDesc.getOverlayedProperties().getProperty(
-                    hive_metastoreConstants.META_TABLE_STORAGE));
+                    SerDeUtils.createOverlayedProperties(
+                        partDesc.getTableDesc().getProperties(),
+                        partDesc.getProperties())
+                        .getProperty(hive_metastoreConstants.META_TABLE_STORAGE));
                 if (handler instanceof InputEstimator) {
                   long total = 0;
                   TableDesc tableDesc = partDesc.getTableDesc();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java Fri Apr 25 20:56:31 2014
@@ -134,7 +134,7 @@ public class ExecReducer extends MapRedu
       keyTableDesc = gWork.getKeyDesc();
       inputKeyDeserializer = (SerDe) ReflectionUtils.newInstance(keyTableDesc
           .getDeserializerClass(), null);
-      inputKeyDeserializer.initialize(null, keyTableDesc.getProperties());
+      SerDeUtils.initializeSerDe(inputKeyDeserializer, null, keyTableDesc.getProperties(), null);
       keyObjectInspector = inputKeyDeserializer.getObjectInspector();
       valueTableDesc = new TableDesc[gWork.getTagToValueDesc().size()];
       for (int tag = 0; tag < gWork.getTagToValueDesc().size(); tag++) {
@@ -142,8 +142,8 @@ public class ExecReducer extends MapRedu
         valueTableDesc[tag] = gWork.getTagToValueDesc().get(tag);
         inputValueDeserializer[tag] = (SerDe) ReflectionUtils.newInstance(
             valueTableDesc[tag].getDeserializerClass(), null);
-        inputValueDeserializer[tag].initialize(null, valueTableDesc[tag]
-            .getProperties());
+        SerDeUtils.initializeSerDe(inputValueDeserializer[tag], null,
+                                   valueTableDesc[tag].getProperties(), null);
         valueObjectInspector[tag] = inputValueDeserializer[tag]
             .getObjectInspector();
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java Fri Apr 25 20:56:31 2014
@@ -122,7 +122,7 @@ public class ReduceRecordProcessor  exte
       keyTableDesc = redWork.getKeyDesc();
       inputKeyDeserializer = (SerDe) ReflectionUtils.newInstance(keyTableDesc
           .getDeserializerClass(), null);
-      inputKeyDeserializer.initialize(null, keyTableDesc.getProperties());
+      SerDeUtils.initializeSerDe(inputKeyDeserializer, null, keyTableDesc.getProperties(), null);
       keyObjectInspector = inputKeyDeserializer.getObjectInspector();
       reducer.setGroupKeyObjectInspector(keyObjectInspector);
       valueTableDesc = new TableDesc[redWork.getTagToValueDesc().size()];
@@ -131,8 +131,8 @@ public class ReduceRecordProcessor  exte
         valueTableDesc[tag] = redWork.getTagToValueDesc().get(tag);
         inputValueDeserializer[tag] = (SerDe) ReflectionUtils.newInstance(
             valueTableDesc[tag].getDeserializerClass(), null);
-        inputValueDeserializer[tag].initialize(null, valueTableDesc[tag]
-            .getProperties());
+        SerDeUtils.initializeSerDe(inputValueDeserializer[tag], null,
+                                   valueTableDesc[tag].getProperties(), null);
         valueObjectInspector[tag] = inputValueDeserializer[tag]
             .getObjectInspector();
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java Fri Apr 25 20:56:31 2014
@@ -43,6 +43,7 @@ import org.apache.hadoop.hive.ql.plan.Pa
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -176,7 +177,8 @@ public class VectorizedRowBatchCtx {
 
     Class serdeclass = hiveConf.getClassByName(part.getSerdeClassName());
     Deserializer partDeserializer = (Deserializer) serdeclass.newInstance(); 
-    partDeserializer.initialize(hiveConf, partProps);
+    SerDeUtils.initializeSerDe(partDeserializer, hiveConf, part.getTableDesc().getProperties(),
+                               partProps);
     StructObjectInspector partRawRowObjectInspector = (StructObjectInspector) partDeserializer
         .getObjectInspector();
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java Fri Apr 25 20:56:31 2014
@@ -85,6 +85,7 @@ import org.apache.hadoop.hive.ql.udf.ptf
 import org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -814,7 +815,7 @@ public class PTFTranslator {
     p.setProperty(
         org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES,
         serdePropsMap.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES));
-    serDe.initialize(cfg, p);
+    SerDeUtils.initializeSerDe(serDe, cfg, p, null);
     return serDe;
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Apr 25 20:56:31 2014
@@ -178,6 +178,7 @@ import org.apache.hadoop.hive.serde2.Des
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
 import org.apache.hadoop.hive.serde2.NullStructSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -5950,7 +5951,7 @@ public class SemanticAnalyzer extends Ba
     try {
       Deserializer deserializer = table_desc.getDeserializerClass()
           .newInstance();
-      deserializer.initialize(conf, table_desc.getProperties());
+      SerDeUtils.initializeSerDe(deserializer, conf, table_desc.getProperties(), null);
       oi = (StructObjectInspector) deserializer.getObjectInspector();
     } catch (Exception e) {
       throw new SemanticException(e);
@@ -6229,7 +6230,7 @@ public class SemanticAnalyzer extends Ba
     try {
       Deserializer deserializer = table_desc.getDeserializerClass()
           .newInstance();
-      deserializer.initialize(conf, table_desc.getProperties());
+      SerDeUtils.initializeSerDe(deserializer, conf, table_desc.getProperties(), null);
       oi = (StructObjectInspector) deserializer.getObjectInspector();
     } catch (Exception e) {
       throw new SemanticException(e);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java Fri Apr 25 20:56:31 2014
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.udf.ptf
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver;
 import org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver;
 import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -260,7 +261,7 @@ public class PTFDeserializer {
     try {
       SerDe serDe =  ReflectionUtils.newInstance(hConf.getClassByName(serdeClassName).
           asSubclass(SerDe.class), hConf);
-      serDe.initialize(hConf, serDeProps);
+      SerDeUtils.initializeSerDe(serDe, hConf, serDeProps, null);
       shp.setSerde(serDe);
       StructObjectInspector outOI = PTFPartition.setupPartitionOutputOI(serDe, OI);
       shp.setOI(outOI);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java Fri Apr 25 20:56:31 2014
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -129,7 +130,7 @@ public class PartitionDesc implements Se
     }
     Deserializer deserializer = ReflectionUtils.newInstance(conf.getClassByName(clazzName)
         .asSubclass(Deserializer.class), conf);
-    deserializer.initialize(conf, schema);
+    SerDeUtils.initializeSerDe(deserializer, conf, getTableDesc().getProperties(), schema);
     return deserializer;
   }
 
@@ -168,16 +169,6 @@ public class PartitionDesc implements Se
     return properties;
   }
 
-  public Properties getOverlayedProperties(){
-    if (tableDesc != null) {
-      Properties overlayedProps = new Properties(tableDesc.getProperties());
-      overlayedProps.putAll(getProperties());
-      return overlayedProps;
-    } else {
-      return getProperties();
-    }
-  }
-
   public void setProperties(final Properties properties) {
     this.properties = properties;
     for (Enumeration<?> keys =  properties.propertyNames(); keys.hasMoreElements();) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java Fri Apr 25 20:56:31 2014
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.ql.io.Hive
 import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.mapred.InputFormat;
 
 /**
@@ -79,7 +80,7 @@ public class TableDesc implements Serial
    */
   public Deserializer getDeserializer() throws Exception {
     Deserializer de = getDeserializerClass().newInstance();
-    de.initialize(null, properties);
+    SerDeUtils.initializeSerDe(de, null, properties, null);
     return de;
   }
 

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestMapJoinTableContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestMapJoinTableContainer.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestMapJoinTableContainer.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestMapJoinTableContainer.java Fri Apr 25 20:56:31 2014
@@ -26,6 +26,7 @@ import java.util.Properties;
 import junit.framework.Assert;
 
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.io.Text;
 import org.junit.Before;
@@ -54,12 +55,12 @@ public class TestMapJoinTableContainer {
     Properties keyProps = new Properties();
     keyProps.put(serdeConstants.LIST_COLUMNS, "v1");
     keyProps.put(serdeConstants.LIST_COLUMN_TYPES, "string");
-    keySerde.initialize(null, keyProps);
+    SerDeUtils.initializeSerDe(keySerde, null, keyProps, null);
     LazyBinarySerDe valueSerde = new LazyBinarySerDe();
     Properties valueProps = new Properties();
     valueProps.put(serdeConstants.LIST_COLUMNS, "v1");
     valueProps.put(serdeConstants.LIST_COLUMN_TYPES, "string");
-    valueSerde.initialize(null, keyProps);
+    SerDeUtils.initializeSerDe(valueSerde, null, keyProps, null);
     containerSerde = new MapJoinTableContainerSerDe(
         new MapJoinObjectSerDeContext(keySerde, false),
         new MapJoinObjectSerDeContext(valueSerde, false));

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java Fri Apr 25 20:56:31 2014
@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -55,7 +56,7 @@ public class TestPTFRowContainer {
     p.setProperty(
         org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES,
         COL_TYPES);
-    serDe.initialize(cfg, p);
+    SerDeUtils.initializeSerDe(serDe, cfg, p, null);
   }
 
   private PTFRowContainer<List<Object>> rowContainer(int blockSize)

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/Utilities.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/Utilities.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/Utilities.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/Utilities.java Fri Apr 25 20:56:31 2014
@@ -28,6 +28,7 @@ import junit.framework.Assert;
 
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.io.BytesWritable;
 
@@ -51,7 +52,7 @@ class Utilities {
     Properties props = new Properties();
     props.put(serdeConstants.LIST_COLUMNS, columns);
     props.put(serdeConstants.LIST_COLUMN_TYPES, types);
-    serde.initialize(null, props);
+    SerDeUtils.initializeSerDe(serde, null, props, null);
     MapJoinObjectSerDeContext context = new MapJoinObjectSerDeContext(serde, false);    
     key.write(context, out);
     out.close();
@@ -84,7 +85,7 @@ class Utilities {
     Properties props = new Properties();
     props.put(serdeConstants.LIST_COLUMNS, columns);
     props.put(serdeConstants.LIST_COLUMN_TYPES, types);
-    serde.initialize(null, props);
+    SerDeUtils.initializeSerDe(serde, null, props, null);
     MapJoinObjectSerDeContext context = new MapJoinObjectSerDeContext(serde, true);    
     container.write(context, out);
     out.close();

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java Fri Apr 25 20:56:31 2014
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.ql.io.RCFi
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
@@ -96,7 +97,7 @@ public class TestVectorizedRowBatchCtx {
 
     try {
       serDe = new ColumnarSerDe();
-      serDe.initialize(conf, tbl);
+      SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
     } catch (SerDeException e) {
       new RuntimeException(e);
     }
@@ -334,7 +335,7 @@ public class TestVectorizedRowBatchCtx {
 
     // Test VectorizedColumnarSerDe
     VectorizedColumnarSerDe vcs = new VectorizedColumnarSerDe();
-    vcs.initialize(this.conf, tbl);
+    SerDeUtils.initializeSerDe(vcs, this.conf, tbl, null);
     Writable w = vcs.serializeVector(batch, (StructObjectInspector) serDe
         .getObjectInspector());
     BytesRefArrayWritable[] refArray = (BytesRefArrayWritable[]) ((ObjectWritable) w).get();

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java Fri Apr 25 20:56:31 2014
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
@@ -113,7 +114,7 @@ public class TestRCFile {
     serDe = new ColumnarSerDe();
     // Create the SerDe
     tbl = createProperties();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
     try {
       bytesArray = new byte[][] {"123".getBytes("UTF-8"),
           "456".getBytes("UTF-8"), "789".getBytes("UTF-8"),
@@ -427,7 +428,7 @@ public class TestRCFile {
     AbstractSerDe serDe = new ColumnarSerDe();
     // Create the SerDe
     Properties tbl = createProperties();
-    serDe.initialize(conf, tbl);
+    SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
     String usage = "Usage: RCFile " + "[-count N]" + " file";
     if (args.length == 0) {

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java Fri Apr 25 20:56:31 2014
@@ -74,6 +74,7 @@ import org.apache.hadoop.hive.ql.plan.Pa
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -937,7 +938,7 @@ public class TestInputOutputFormat {
     serde = new OrcSerde();
     properties.setProperty("columns", "x,y");
     properties.setProperty("columns.types", "int:int");
-    serde.initialize(conf, properties);
+    SerDeUtils.initializeSerDe(serde, conf, properties, null);
     assertEquals(OrcSerde.OrcSerdeRow.class, serde.getSerializedClass());
     inspector = (StructObjectInspector) serde.getObjectInspector();
     assertEquals("struct<x:int,y:int>", inspector.getTypeName());
@@ -1054,7 +1055,7 @@ public class TestInputOutputFormat {
     serde = new OrcSerde();
     properties.setProperty("columns", "z,r");
     properties.setProperty("columns.types", "int:struct<x:int,y:int>");
-    serde.initialize(conf, properties);
+    SerDeUtils.initializeSerDe(serde, conf, properties, null);
     inspector = (StructObjectInspector) serde.getObjectInspector();
     InputFormat<?,?> in = new OrcInputFormat();
     FileInputFormat.setInputPaths(conf, testFilePath.toString());
@@ -1098,7 +1099,7 @@ public class TestInputOutputFormat {
     properties.setProperty("columns", "x,y");
     properties.setProperty("columns.types", "int:int");
     SerDe serde = new OrcSerde();
-    serde.initialize(conf, properties);
+    SerDeUtils.initializeSerDe(serde, conf, properties, null);
     InputFormat<?,?> in = new OrcInputFormat();
     FileInputFormat.setInputPaths(conf, testFilePath.toString());
     InputSplit[] splits = in.getSplits(conf, 1);
@@ -1148,7 +1149,7 @@ public class TestInputOutputFormat {
     writer.close(true);
     serde = new OrcSerde();
     properties.setProperty("columns", "str,str2");
-    serde.initialize(conf, properties);
+    SerDeUtils.initializeSerDe(serde, conf, properties, null);
     inspector = (StructObjectInspector) serde.getObjectInspector();
     assertEquals("struct<str:string,str2:string>", inspector.getTypeName());
     InputFormat<?,?> in = new OrcInputFormat();

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java Fri Apr 25 20:56:31 2014
@@ -21,6 +21,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
 import org.apache.hadoop.hive.ql.io.parquet.writable.BinaryWritable;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -42,7 +43,7 @@ public class TestParquetSerDe extends Te
       final ParquetHiveSerDe serDe = new ParquetHiveSerDe();
       final Configuration conf = new Configuration();
       final Properties tbl = createProperties();
-      serDe.initialize(conf, tbl);
+      SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
 
       // Data
       final Writable[] arr = new Writable[8];

Modified: hive/trunk/ql/src/test/queries/clientpositive/avro_partitioned.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/avro_partitioned.q?rev=1590140&r1=1590139&r2=1590140&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/avro_partitioned.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/avro_partitioned.q Fri Apr 25 20:56:31 2014
@@ -1,4 +1,4 @@
--- verify that new joins bring in correct schemas (including evolved schemas)
+-- Verify that table scans work with partitioned Avro tables
 CREATE TABLE episodes
 ROW FORMAT
 SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
@@ -69,5 +69,77 @@ SELECT * FROM episodes_partitioned WHERE
 SELECT * FROM episodes_partitioned ORDER BY air_date LIMIT 5;
 -- Fetch w/filter to specific partition
 SELECT * FROM episodes_partitioned WHERE doctor_pt = 6;
--- Fetch w/non-existant partition
+-- Fetch w/non-existent partition
 SELECT * FROM episodes_partitioned WHERE doctor_pt = 7 LIMIT 5;
+
+
+-- Verify that reading from an Avro partition works
+-- even if it has an old schema relative to the current table level schema
+
+-- Create table and store schema in SERDEPROPERTIES
+CREATE TABLE episodes_partitioned_serdeproperties
+PARTITIONED BY (doctor_pt INT)
+ROW FORMAT
+SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
+WITH SERDEPROPERTIES ('avro.schema.literal'='{
+  "namespace": "testing.hive.avro.serde",
+  "name": "episodes",
+  "type": "record",
+  "fields": [
+    {
+      "name":"title",
+      "type":"string",
+      "doc":"episode title"
+    },
+    {
+      "name":"air_date",
+      "type":"string",
+      "doc":"initial date"
+    },
+    {
+      "name":"doctor",
+      "type":"int",
+      "doc":"main actor playing the Doctor in episode"
+    }
+  ]
+}')
+STORED AS
+INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat';
+
+-- Insert data into a partition
+INSERT INTO TABLE episodes_partitioned_serdeproperties PARTITION (doctor_pt) SELECT title, air_date, doctor, doctor as doctor_pt FROM episodes;
+
+-- Evolve the table schema by adding new array field "cast_and_crew"
+ALTER TABLE episodes_partitioned_serdeproperties
+SET SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
+WITH SERDEPROPERTIES ('avro.schema.literal'='{
+  "namespace": "testing.hive.avro.serde",
+  "name": "episodes",
+  "type": "record",
+  "fields": [
+    {
+      "name":"cast_and_crew",
+      "type":{"type":"array","items":"string"},
+      "default":[]
+    },
+    {
+      "name":"title",
+      "type":"string",
+      "doc":"episode title"
+    },
+    {
+      "name":"air_date",
+      "type":"string",
+      "doc":"initial date"
+    },
+    {
+      "name":"doctor",
+      "type":"int",
+      "doc":"main actor playing the Doctor in episode"
+    }
+  ]
+}');
+
+-- Try selecting from the evolved table
+SELECT * FROM episodes_partitioned_serdeproperties ORDER BY air_date;