You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2016/11/15 20:21:09 UTC

[37/50] [abbrv] hive git commit: HIVE-15167 : remove SerDe interface; undeprecate Deserializer and Serializer (Sergey Shelukhin, reviewed by Ashutosh Chauhan)

HIVE-15167 : remove SerDe interface; undeprecate Deserializer and Serializer (Sergey Shelukhin, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/652ed7a7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/652ed7a7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/652ed7a7

Branch: refs/heads/hive-14535
Commit: 652ed7a796c687bbb3aff0504a5f91ee685eaab9
Parents: 23bba11
Author: Sergey Shelukhin <se...@apache.org>
Authored: Mon Nov 14 17:21:45 2016 -0800
Committer: Sergey Shelukhin <se...@apache.org>
Committed: Mon Nov 14 17:21:45 2016 -0800

----------------------------------------------------------------------
 .../hive/accumulo/AccumuloStorageHandler.java   |  4 +--
 .../hive/accumulo/serde/AccumuloSerDe.java      |  4 +--
 .../hive/contrib/serde2/TestRegexSerDe.java     |  6 ++--
 .../hadoop/hive/druid/DruidStorageHandler.java  |  4 +--
 .../hive/druid/QTestDruidStorageHandler.java    |  4 +--
 .../apache/hadoop/hive/hbase/HBaseSerDe.java    |  6 ++--
 .../hadoop/hive/hbase/HBaseStorageHandler.java  |  4 +--
 .../hive/hcatalog/data/HCatRecordSerDe.java     |  4 +--
 .../apache/hive/hcatalog/data/JsonSerDe.java    |  4 +--
 .../mapreduce/DefaultRecordWriterContainer.java |  4 +--
 ...namicPartitionFileRecordWriterContainer.java |  8 ++---
 .../mapreduce/FileOutputFormatContainer.java    |  6 ++--
 .../mapreduce/FileRecordWriterContainer.java    | 12 +++----
 .../mapreduce/FosterStorageHandler.java         | 10 +++---
 .../hive/hcatalog/mapreduce/InternalUtil.java   |  4 +--
 .../streaming/AbstractRecordWriter.java         |  4 +--
 .../streaming/DelimitedInputWriter.java         |  4 +--
 .../hcatalog/streaming/StrictJsonWriter.java    |  4 +--
 .../benchmark/storage/ColumnarStorageBench.java |  6 ++--
 .../hadoop/hive/llap/LlapRowRecordReader.java   |  8 ++---
 .../hive/ql/exec/HashTableSinkOperator.java     |  6 ++--
 .../apache/hadoop/hive/ql/exec/JoinUtil.java    |  8 ++---
 .../hadoop/hive/ql/exec/MapJoinOperator.java    |  6 ++--
 .../apache/hadoop/hive/ql/exec/PTFOperator.java |  4 +--
 .../hadoop/hive/ql/exec/PTFPartition.java       | 16 ++++-----
 .../hive/ql/exec/PTFRollingPartition.java       |  4 +--
 .../hadoop/hive/ql/exec/SkewJoinHandler.java    |  8 ++---
 .../ql/exec/persistence/FlatRowContainer.java   |  6 ++--
 .../persistence/HybridHashTableContainer.java   |  4 +--
 .../persistence/MapJoinBytesTableContainer.java | 12 +++----
 .../persistence/MapJoinEagerRowContainer.java   |  6 ++--
 .../hive/ql/exec/persistence/MapJoinKey.java    |  4 +--
 .../ql/exec/persistence/MapJoinKeyObject.java   |  4 +--
 .../persistence/MapJoinObjectSerDeContext.java  |  8 ++---
 .../persistence/MapJoinTableContainerSerDe.java | 14 ++++----
 .../hive/ql/exec/persistence/RowContainer.java  |  6 ++--
 .../hive/ql/exec/tez/ReduceRecordSource.java    |  6 ++--
 .../hive/ql/exec/vector/VectorizedSerde.java    | 12 +++----
 .../apache/hadoop/hive/ql/io/orc/OrcSerde.java  |  4 +--
 .../hive/ql/metadata/DefaultStorageHandler.java |  4 +--
 .../hive/ql/metadata/HiveStorageHandler.java    |  6 ++--
 .../hadoop/hive/ql/parse/PTFTranslator.java     |  8 ++---
 .../hadoop/hive/ql/plan/PTFDeserializer.java    |  6 ++--
 .../hadoop/hive/ql/plan/ptf/ShapeDetails.java   |  8 ++---
 .../hive/ql/udf/ptf/WindowingTableFunction.java |  4 +--
 .../exec/persistence/TestPTFRowContainer.java   |  4 +--
 .../hive/ql/io/orc/TestInputOutputFormat.java   | 16 ++++-----
 .../hadoop/hive/serde2/AbstractSerDe.java       |  2 +-
 .../hive/serde2/DefaultFetchFormatter.java      | 10 +++---
 .../apache/hadoop/hive/serde2/Deserializer.java |  5 ++-
 .../org/apache/hadoop/hive/serde2/SerDe.java    | 35 --------------------
 .../apache/hadoop/hive/serde2/Serializer.java   |  5 ++-
 .../hive/serde2/columnar/ColumnarSerDe.java     |  4 +--
 .../hive/serde2/lazy/LazySimpleSerDe.java       |  8 ++---
 .../hadoop/hive/serde2/TestStatsSerde.java      |  2 +-
 .../binarysortable/TestBinarySortableFast.java  | 14 ++++----
 .../binarysortable/TestBinarySortableSerDe.java |  6 ++--
 .../serde2/lazybinary/TestLazyBinaryFast.java   | 10 +++---
 .../serde2/lazybinary/TestLazyBinarySerDe.java  | 24 +++++++-------
 .../service/cli/operation/SQLOperation.java     |  6 ++--
 60 files changed, 199 insertions(+), 236 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java
index 41a65ce..cdbc7f2 100644
--- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java
+++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java
@@ -53,7 +53,7 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
@@ -148,7 +148,7 @@ public class AccumuloStorageHandler extends DefaultStorageHandler implements Hiv
 
   @SuppressWarnings("deprecation")
   @Override
-  public Class<? extends SerDe> getSerDeClass() {
+  public Class<? extends AbstractSerDe> getSerDeClass() {
     return AccumuloSerDe.class;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java
index 40c9553..fcd819b 100644
--- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java
+++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.accumulo.AccumuloHiveRow;
 import org.apache.hadoop.hive.accumulo.LazyAccumuloRow;
 import org.apache.hadoop.hive.accumulo.columns.ColumnMapping;
 import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloRowIdColumnMapping;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
@@ -45,7 +45,7 @@ import org.slf4j.LoggerFactory;
  * Deserialization from Accumulo to LazyAccumuloRow for Hive.
  *
  */
-public class AccumuloSerDe implements SerDe {
+public class AccumuloSerDe extends AbstractSerDe {
 
   private AccumuloSerDeParameters accumuloSerDeParameters;
   private LazyAccumuloRow cachedRow;

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java b/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
index 639fc3a..62e5c81 100644
--- a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
+++ b/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
@@ -23,7 +23,7 @@ import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -36,7 +36,7 @@ import org.apache.hadoop.io.Text;
  */
 public class TestRegexSerDe extends TestCase {
 
-  private SerDe createSerDe(String fieldNames, String fieldTypes,
+  private AbstractSerDe createSerDe(String fieldNames, String fieldTypes,
       String inputRegex, String outputFormatString) throws Throwable {
     Properties schema = new Properties();
     schema.setProperty(serdeConstants.LIST_COLUMNS, fieldNames);
@@ -55,7 +55,7 @@ public class TestRegexSerDe extends TestCase {
   public void testRegexSerDe() throws Throwable {
     try {
       // Create the SerDe
-      SerDe serDe = createSerDe(
+      AbstractSerDe serDe = createSerDe(
           "host,identity,user,time,request,status,size,referer,agent",
           "string,string,string,string,string,string,string,string,string",
           "([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ \"]*|\"[^\"]*\") " 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
index ac03099..8242385 100644
--- a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
+++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.OutputFormat;
 import org.slf4j.Logger;
@@ -50,7 +50,7 @@ public class DruidStorageHandler extends DefaultStorageHandler implements HiveMe
   }
 
   @Override
-  public Class<? extends SerDe> getSerDeClass() {
+  public Class<? extends AbstractSerDe> getSerDeClass() {
     return DruidSerDe.class;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/druid-handler/src/test/org/apache/hadoop/hive/druid/QTestDruidStorageHandler.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/QTestDruidStorageHandler.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/QTestDruidStorageHandler.java
index 0a44aaa..6db13c3 100644
--- a/druid-handler/src/test/org/apache/hadoop/hive/druid/QTestDruidStorageHandler.java
+++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/QTestDruidStorageHandler.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hive.druid;
 
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 
 /**
  * Storage handler for Druid to be used in tests. It cannot connect to
@@ -27,7 +27,7 @@ import org.apache.hadoop.hive.serde2.SerDe;
 public class QTestDruidStorageHandler extends DruidStorageHandler {
 
   @Override
-  public Class<? extends SerDe> getSerDeClass() {
+  public Class<? extends AbstractSerDe> getSerDeClass() {
     return QTestDruidSerDe.class;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
index 466aabe..c2e7808 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
@@ -117,7 +117,7 @@ public class HBaseSerDe extends AbstractSerDe {
 
   /**
    * Initialize the SerDe given parameters.
-   * @see SerDe#initialize(Configuration, Properties)
+   * @see AbstractSerDe#initialize(Configuration, Properties)
    */
   @Override
   public void initialize(Configuration conf, Properties tbl)
@@ -268,7 +268,7 @@ public class HBaseSerDe extends AbstractSerDe {
    * Deserialize a row from the HBase Result writable to a LazyObject
    * @param result the HBase Result Writable containing the row
    * @return the deserialized object
-   * @see SerDe#deserialize(Writable)
+   * @see AbstractSerDe#deserialize(Writable)
    */
   @Override
   public Object deserialize(Writable result) throws SerDeException {

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
index 1a1f780..9cad97a 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
@@ -65,7 +65,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
 import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
@@ -315,7 +315,7 @@ public class HBaseStorageHandler extends DefaultStorageHandler
   }
 
   @Override
-  public Class<? extends SerDe> getSerDeClass() {
+  public Class<? extends AbstractSerDe> getSerDeClass() {
     return HBaseSerDe.class;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
index 81c7943..235d186 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
@@ -27,7 +27,7 @@ import java.util.HashMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
@@ -56,7 +56,7 @@ import org.slf4j.LoggerFactory;
 @SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS,
                           serdeConstants.LIST_COLUMN_TYPES})
 
-public class HCatRecordSerDe implements SerDe {
+public class HCatRecordSerDe extends AbstractSerDe {
 
   private static final Logger LOG = LoggerFactory.getLogger(HCatRecordSerDe.class);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
index 1b47b28..ef17079 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
@@ -38,7 +38,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
@@ -89,7 +89,7 @@ import org.slf4j.LoggerFactory;
                           serdeConstants.LIST_COLUMN_TYPES,
                           serdeConstants.TIMESTAMP_FORMATS})
 
-public class JsonSerDe implements SerDe {
+public class JsonSerDe extends AbstractSerDe {
 
   private static final Logger LOG = LoggerFactory.getLogger(JsonSerDe.class);
   private List<String> columnNames;

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java
index 209d7bc..13c4354 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java
@@ -22,7 +22,7 @@ package org.apache.hive.hcatalog.mapreduce;
 import java.io.IOException;
 
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
@@ -39,7 +39,7 @@ import org.apache.hive.hcatalog.data.HCatRecord;
 class DefaultRecordWriterContainer extends RecordWriterContainer {
 
   private final HiveStorageHandler storageHandler;
-  private final SerDe serDe;
+  private final AbstractSerDe serDe;
   private final OutputJobInfo jobInfo;
   private final ObjectInspector hcatRecordOI;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
index a7c9f29..b53dcf1 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
@@ -27,7 +27,7 @@ import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
@@ -56,7 +56,7 @@ class DynamicPartitionFileRecordWriterContainer extends FileRecordWriterContaine
   private int maxDynamicPartitions;
 
   private final Map<String, RecordWriter<? super WritableComparable<?>, ? super Writable>> baseDynamicWriters;
-  private final Map<String, SerDe> baseDynamicSerDe;
+  private final Map<String, AbstractSerDe> baseDynamicSerDe;
   private final Map<String, org.apache.hadoop.mapred.OutputCommitter> baseDynamicCommitters;
   private final Map<String, org.apache.hadoop.mapred.TaskAttemptContext> dynamicContexts;
   private final Map<String, ObjectInspector> dynamicObjectInspectors;
@@ -81,7 +81,7 @@ class DynamicPartitionFileRecordWriterContainer extends FileRecordWriterContaine
           + "HCatOutputFormat. Please make sure that method is called.");
     }
 
-    this.baseDynamicSerDe = new HashMap<String, SerDe>();
+    this.baseDynamicSerDe = new HashMap<String, AbstractSerDe>();
     this.baseDynamicWriters =
         new HashMap<String, RecordWriter<? super WritableComparable<?>, ? super Writable>>();
     this.baseDynamicCommitters = new HashMap<String, org.apache.hadoop.mapred.OutputCommitter>();
@@ -159,7 +159,7 @@ class DynamicPartitionFileRecordWriterContainer extends FileRecordWriterContaine
       localJobInfo = HCatBaseOutputFormat.getJobInfo(currTaskContext.getConfiguration());
 
       // Setup serDe.
-      SerDe currSerDe =
+      AbstractSerDe currSerDe =
           ReflectionUtils.newInstance(storageHandler.getSerDeClass(), currTaskContext.getJobConf());
       try {
         InternalUtil.initializeOutputSerDe(currSerDe, currTaskContext.getConfiguration(),

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
index 95ee3b4..3ecb608 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
@@ -82,8 +82,8 @@ class FileOutputFormatContainer extends OutputFormatContainer {
     StorerInfo storeInfo = jobInfo.getTableInfo().getStorerInfo();
     HiveStorageHandler storageHandler = HCatUtil.getStorageHandler(
       context.getConfiguration(), storeInfo);
-    Class<? extends SerDe> serde = storageHandler.getSerDeClass();
-    SerDe sd = (SerDe) ReflectionUtils.newInstance(serde,
+    Class<? extends AbstractSerDe> serde = storageHandler.getSerDeClass();
+    AbstractSerDe sd = (AbstractSerDe) ReflectionUtils.newInstance(serde,
       context.getConfiguration());
     context.getConfiguration().set("mapred.output.value.class",
       sd.getSerializedClass().getName());

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
index 2a883d6..b2abc5f 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java
@@ -28,7 +28,7 @@ import java.util.Map;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.NullWritable;
@@ -54,7 +54,7 @@ import org.apache.hive.hcatalog.data.HCatRecord;
 abstract class FileRecordWriterContainer extends RecordWriterContainer {
 
   protected final HiveStorageHandler storageHandler;
-  protected final SerDe serDe;
+  protected final AbstractSerDe serDe;
   protected final ObjectInspector objectInspector;
 
   private final List<Integer> partColsToDel;
@@ -110,7 +110,7 @@ abstract class FileRecordWriterContainer extends RecordWriterContainer {
     LocalFileWriter localFileWriter = getLocalFileWriter(value);
     RecordWriter localWriter = localFileWriter.getLocalWriter();
     ObjectInspector localObjectInspector = localFileWriter.getLocalObjectInspector();
-    SerDe localSerDe = localFileWriter.getLocalSerDe();
+    AbstractSerDe localSerDe = localFileWriter.getLocalSerDe();
     OutputJobInfo localJobInfo = localFileWriter.getLocalJobInfo();
 
     for (Integer colToDel : partColsToDel) {
@@ -129,11 +129,11 @@ abstract class FileRecordWriterContainer extends RecordWriterContainer {
   class LocalFileWriter {
     private RecordWriter localWriter;
     private ObjectInspector localObjectInspector;
-    private SerDe localSerDe;
+    private AbstractSerDe localSerDe;
     private OutputJobInfo localJobInfo;
 
     public LocalFileWriter(RecordWriter localWriter, ObjectInspector localObjectInspector,
-        SerDe localSerDe, OutputJobInfo localJobInfo) {
+        AbstractSerDe localSerDe, OutputJobInfo localJobInfo) {
       this.localWriter = localWriter;
       this.localObjectInspector = localObjectInspector;
       this.localSerDe = localSerDe;
@@ -148,7 +148,7 @@ abstract class FileRecordWriterContainer extends RecordWriterContainer {
       return localObjectInspector;
     }
 
-    public SerDe getLocalSerDe() {
+    public AbstractSerDe getLocalSerDe() {
       return localSerDe;
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
index b970153..040906f 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
@@ -62,17 +62,17 @@ public class FosterStorageHandler extends DefaultStorageHandler {
 
   private Class<? extends InputFormat> ifClass;
   private Class<? extends OutputFormat> ofClass;
-  private Class<? extends SerDe> serDeClass;
+  private Class<? extends AbstractSerDe> serDeClass;
 
   public FosterStorageHandler(String ifName, String ofName, String serdeName) throws ClassNotFoundException {
     this((Class<? extends InputFormat>) JavaUtils.loadClass(ifName),
       (Class<? extends OutputFormat>) JavaUtils.loadClass(ofName),
-      (Class<? extends SerDe>) JavaUtils.loadClass(serdeName));
+      (Class<? extends AbstractSerDe>) JavaUtils.loadClass(serdeName));
   }
 
   public FosterStorageHandler(Class<? extends InputFormat> ifClass,
                 Class<? extends OutputFormat> ofClass,
-                Class<? extends SerDe> serDeClass) {
+                Class<? extends AbstractSerDe> serDeClass) {
     this.ifClass = ifClass;
     this.ofClass = ofClass;
     this.serDeClass = serDeClass;
@@ -89,7 +89,7 @@ public class FosterStorageHandler extends DefaultStorageHandler {
   }
 
   @Override
-  public Class<? extends SerDe> getSerDeClass() {
+  public Class<? extends AbstractSerDe> getSerDeClass() {
     return serDeClass;  //To change body of implemented methods use File | Settings | File Templates.
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java
index 3100181..1230795 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -141,7 +141,7 @@ class InternalUtil {
 
   //TODO this has to find a better home, it's also hardcoded as default in hive would be nice
   // if the default was decided by the serde
-  static void initializeOutputSerDe(SerDe serDe, Configuration conf, OutputJobInfo jobInfo)
+  static void initializeOutputSerDe(AbstractSerDe serDe, Configuration conf, OutputJobInfo jobInfo)
     throws SerDeException {
     SerDeUtils.initializeSerDe(serDe, conf,
                                getSerdeProperties(jobInfo.getTableInfo(),

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
index 24b952e..e409e75 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
 import org.apache.hadoop.hive.ql.io.RecordUpdater;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -152,7 +152,7 @@ public abstract class AbstractRecordWriter implements RecordWriter {
    * @return serde
    * @throws SerializationError
    */
-  public abstract SerDe getSerde() throws SerializationError;
+  public abstract AbstractSerDe getSerde() throws SerializationError;
 
   /**
    * Encode a record as an Object that Hive can read with the ObjectInspector associated with the

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
index 87eb4c4..58fba4f 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
@@ -270,7 +270,7 @@ public class DelimitedInputWriter extends AbstractRecordWriter {
   }
 
   @Override
-  public SerDe getSerde() {
+  public AbstractSerDe getSerde() {
     return serde;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java
index 31212ee..13756e2 100644
--- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java
+++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java
@@ -21,7 +21,7 @@ package org.apache.hive.hcatalog.streaming;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -98,7 +98,7 @@ public class StrictJsonWriter extends AbstractRecordWriter {
   }
 
   @Override
-  public SerDe getSerde() {
+  public AbstractSerDe getSerde() {
     return serde;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
----------------------------------------------------------------------
diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
index 4f6985c..3efe424 100644
--- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
+++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat;
 import org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat;
 import org.apache.hadoop.hive.ql.io.parquet.serde.ArrayWritableObjectInspector;
 import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -248,12 +248,12 @@ public class ColumnarStorageBench {
    * methods.
    */
   private class StorageFormatTest {
-    private SerDe serDe;
+    private AbstractSerDe serDe;
     private JobConf jobConf;
     private HiveOutputFormat outputFormat;
     private InputFormat inputFormat;
 
-    public StorageFormatTest(SerDe serDeImpl, HiveOutputFormat outputFormatImpl, InputFormat inputFormatImpl) throws SerDeException {
+    public StorageFormatTest(AbstractSerDe serDeImpl, HiveOutputFormat outputFormatImpl, InputFormat inputFormatImpl) throws SerDeException {
       jobConf = new JobConf();
       serDe = serDeImpl;
       outputFormat = outputFormatImpl;

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/llap-client/src/java/org/apache/hadoop/hive/llap/LlapRowRecordReader.java
----------------------------------------------------------------------
diff --git a/llap-client/src/java/org/apache/hadoop/hive/llap/LlapRowRecordReader.java b/llap-client/src/java/org/apache/hadoop/hive/llap/LlapRowRecordReader.java
index 10d7c94..ee92f3e 100644
--- a/llap-client/src/java/org/apache/hadoop/hive/llap/LlapRowRecordReader.java
+++ b/llap-client/src/java/org/apache/hadoop/hive/llap/LlapRowRecordReader.java
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.llap.FieldDesc;
 import org.apache.hadoop.hive.llap.Schema;
 import org.apache.hadoop.hive.llap.TypeDesc;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
@@ -60,7 +60,7 @@ public class LlapRowRecordReader implements RecordReader<NullWritable, Row> {
   protected final Configuration conf;
   protected final RecordReader<NullWritable, Text> reader;
   protected final Schema schema;
-  protected final SerDe serde;
+  protected final AbstractSerDe serde;
   protected final Text textData = new Text();
 
   public LlapRowRecordReader(Configuration conf, Schema schema, RecordReader<NullWritable, Text> reader) throws IOException {
@@ -147,7 +147,7 @@ public class LlapRowRecordReader implements RecordReader<NullWritable, Row> {
     return schema;
   }
 
-  protected SerDe initSerDe(Configuration conf) throws SerDeException {
+  protected AbstractSerDe initSerDe(Configuration conf) throws SerDeException {
     Properties props = new Properties();
     StringBuffer columnsBuffer = new StringBuffer();
     StringBuffer typesBuffer = new StringBuffer();
@@ -166,7 +166,7 @@ public class LlapRowRecordReader implements RecordReader<NullWritable, Row> {
     props.put(serdeConstants.LIST_COLUMNS, columns);
     props.put(serdeConstants.LIST_COLUMN_TYPES, types);
     props.put(serdeConstants.ESCAPE_CHAR, "\\");
-    SerDe serde = new LazySimpleSerDe();
+    AbstractSerDe serde = new LazySimpleSerDe();
     serde.initialize(conf, props);
 
     return serde;

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
index deb7c76..ac5331e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java
@@ -46,7 +46,7 @@ import org.apache.hadoop.hive.ql.plan.HashTableSinkDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -180,7 +180,7 @@ public class HashTableSinkOperator extends TerminalOperator<HashTableSinkDesc> i
     }
     try {
       TableDesc keyTableDesc = conf.getKeyTblDesc();
-      SerDe keySerde = (SerDe) ReflectionUtils.newInstance(keyTableDesc.getDeserializerClass(),
+      AbstractSerDe keySerde = (AbstractSerDe) ReflectionUtils.newInstance(keyTableDesc.getDeserializerClass(),
           null);
       SerDeUtils.initializeSerDe(keySerde, null, keyTableDesc.getProperties(), null);
       MapJoinObjectSerDeContext keyContext = new MapJoinObjectSerDeContext(keySerde, false);
@@ -190,7 +190,7 @@ public class HashTableSinkOperator extends TerminalOperator<HashTableSinkDesc> i
         }
         mapJoinTables[pos] = new HashMapWrapper(hconf, -1);
         TableDesc valueTableDesc = conf.getValueTblFilteredDescs().get(pos);
-        SerDe valueSerDe = (SerDe) ReflectionUtils.newInstance(valueTableDesc.getDeserializerClass(), null);
+        AbstractSerDe valueSerDe = (AbstractSerDe) ReflectionUtils.newInstance(valueTableDesc.getDeserializerClass(), null);
         SerDeUtils.initializeSerDe(valueSerDe, null, valueTableDesc.getProperties(), null);
         mapJoinTableSerdes[pos] = new MapJoinTableContainerSerDe(keyContext, new MapJoinObjectSerDeContext(
             valueSerDe, hasFilter(pos)));

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
index 0aaa51a..6cbcab6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.JoinDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -276,13 +276,13 @@ public class JoinUtil {
     return spillTableDesc[alias];
   }
 
-  public static SerDe getSpillSerDe(byte alias, TableDesc[] spillTableDesc,
+  public static AbstractSerDe getSpillSerDe(byte alias, TableDesc[] spillTableDesc,
       JoinDesc conf, boolean noFilter) {
     TableDesc desc = getSpillTableDesc(alias, spillTableDesc, conf, noFilter);
     if (desc == null) {
       return null;
     }
-    SerDe sd = (SerDe) ReflectionUtil.newInstance(desc.getDeserializerClass(),
+    AbstractSerDe sd = (AbstractSerDe) ReflectionUtil.newInstance(desc.getDeserializerClass(),
         null);
     try {
       SerDeUtils.initializeSerDe(sd, null, desc.getProperties(), null);
@@ -344,7 +344,7 @@ public class JoinUtil {
       JoinDesc conf,boolean noFilter, Reporter reporter) throws HiveException {
 
     TableDesc tblDesc = JoinUtil.getSpillTableDesc(alias,spillTableDesc,conf, noFilter);
-    SerDe serde = JoinUtil.getSpillSerDe(alias, spillTableDesc, conf, noFilter);
+    AbstractSerDe serde = JoinUtil.getSpillSerDe(alias, spillTableDesc, conf, noFilter);
 
     if (serde == null) {
       containerSize = -1;

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
index 416606e..07aa2ea 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
@@ -58,7 +58,7 @@ import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -275,7 +275,7 @@ public class MapJoinOperator extends AbstractMapJoinOperator<MapJoinDesc> implem
 
     try {
       TableDesc keyTableDesc = conf.getKeyTblDesc();
-      SerDe keySerializer = (SerDe) ReflectionUtil.newInstance(
+      AbstractSerDe keySerializer = (AbstractSerDe) ReflectionUtil.newInstance(
           keyTableDesc.getDeserializerClass(), null);
       SerDeUtils.initializeSerDe(keySerializer, null, keyTableDesc.getProperties(), null);
       MapJoinObjectSerDeContext keyContext = new MapJoinObjectSerDeContext(keySerializer, false);
@@ -289,7 +289,7 @@ public class MapJoinOperator extends AbstractMapJoinOperator<MapJoinDesc> implem
         } else {
           valueTableDesc = conf.getValueFilteredTblDescs().get(pos);
         }
-        SerDe valueSerDe = (SerDe) ReflectionUtil.newInstance(
+        AbstractSerDe valueSerDe = (AbstractSerDe) ReflectionUtil.newInstance(
             valueTableDesc.getDeserializerClass(), null);
         SerDeUtils.initializeSerDe(valueSerDe, null, valueTableDesc.getProperties(), null);
         MapJoinObjectSerDeContext valueContext =

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java
index 8366ea7..f418a7f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.ql.plan.ptf.PartitionDef;
 import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag;
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
@@ -403,7 +403,7 @@ public class PTFOperator extends Operator<PTFDesc> implements Serializable {
       ObjectInspector inputOI = conf.getStartOfChain() == tabDef ?
           inputObjInspectors[0] : inputDef.getOutputShape().getOI();
 
-      SerDe serde = conf.isMapSide() ? tabDef.getInput().getOutputShape().getSerde() :
+      AbstractSerDe serde = conf.isMapSide() ? tabDef.getInput().getOutputShape().getSerde() :
         tabDef.getRawInputShape().getSerde();
       StructObjectInspector outputOI = conf.isMapSide() ? tabDef.getInput().getOutputShape().getOI() :
         tabDef.getRawInputShape().getOI();

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java
index 0d0211f..edcb8f7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.exec.persistence.PTFRowContainer;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
@@ -42,20 +42,20 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 public class PTFPartition {
   protected static Logger LOG = LoggerFactory.getLogger(PTFPartition.class);
 
-  SerDe serDe;
+  AbstractSerDe serDe;
   StructObjectInspector inputOI;
   StructObjectInspector outputOI;
   private final PTFRowContainer<List<Object>> elems;
 
   protected PTFPartition(Configuration cfg,
-      SerDe serDe, StructObjectInspector inputOI,
+      AbstractSerDe serDe, StructObjectInspector inputOI,
       StructObjectInspector outputOI)
       throws HiveException {
     this(cfg, serDe, inputOI, outputOI, true);
   }
   
   protected PTFPartition(Configuration cfg,
-      SerDe serDe, StructObjectInspector inputOI,
+      AbstractSerDe serDe, StructObjectInspector inputOI,
       StructObjectInspector outputOI,
       boolean createElemContainer)
       throws HiveException {
@@ -76,7 +76,7 @@ public class PTFPartition {
     elems.clearRows();
   }
 
-  public SerDe getSerDe() {
+  public AbstractSerDe getSerDe() {
     return serDe;
   }
 
@@ -239,7 +239,7 @@ public class PTFPartition {
   }
 
   public static PTFPartition create(Configuration cfg,
-      SerDe serDe,
+      AbstractSerDe serDe,
       StructObjectInspector inputOI,
       StructObjectInspector outputOI)
       throws HiveException {
@@ -247,7 +247,7 @@ public class PTFPartition {
   }
   
   public static PTFRollingPartition createRolling(Configuration cfg,
-      SerDe serDe,
+      AbstractSerDe serDe,
       StructObjectInspector inputOI,
       StructObjectInspector outputOI,
       int precedingSpan,
@@ -256,7 +256,7 @@ public class PTFPartition {
     return new PTFRollingPartition(cfg, serDe, inputOI, outputOI, precedingSpan, followingSpan);
   }
 
-  public static StructObjectInspector setupPartitionOutputOI(SerDe serDe,
+  public static StructObjectInspector setupPartitionOutputOI(AbstractSerDe serDe,
       StructObjectInspector tblFnOI) throws SerDeException {
     return (StructObjectInspector) ObjectInspectorUtils.getStandardObjectInspector(tblFnOI,
         ObjectInspectorCopyOption.WRITABLE);

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFRollingPartition.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFRollingPartition.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFRollingPartition.java
index ad1cf24..67b3255 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFRollingPartition.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFRollingPartition.java
@@ -24,7 +24,7 @@ import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
@@ -70,7 +70,7 @@ public class PTFRollingPartition extends PTFPartition {
    */
   List<Object> currWindow;
 
-  protected PTFRollingPartition(Configuration cfg, SerDe serDe,
+  protected PTFRollingPartition(Configuration cfg, AbstractSerDe serDe,
       StructObjectInspector inputOI, StructObjectInspector outputOI,
       int startPos, int endPos) throws HiveException {
     super(cfg, serDe, inputOI, outputOI, false);

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
index 0ff6659..7fad34f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
@@ -36,7 +36,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.JoinDesc;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -83,7 +83,7 @@ public class SkewJoinHandler {
 
   private int skewKeyDefinition = -1;
   private Map<Byte, StructObjectInspector> skewKeysTableObjectInspector = null;
-  private Map<Byte, SerDe> tblSerializers = null;
+  private Map<Byte, AbstractSerDe> tblSerializers = null;
   private Map<Byte, TableDesc> tblDesc = null;
 
   private Map<Byte, Boolean> bigKeysExistingMap = null;
@@ -113,7 +113,7 @@ public class SkewJoinHandler {
     skewKeysTableObjectInspector = new HashMap<Byte, StructObjectInspector>(
         numAliases);
     tblDesc = desc.getSkewKeysValuesTables();
-    tblSerializers = new HashMap<Byte, SerDe>(numAliases);
+    tblSerializers = new HashMap<Byte, AbstractSerDe>(numAliases);
     bigKeysExistingMap = new HashMap<Byte, Boolean>(numAliases);
     taskId = Utilities.getTaskId(hconf);
 
@@ -137,7 +137,7 @@ public class SkewJoinHandler {
           .getStandardStructObjectInspector(keyColNames, skewTableKeyInspectors);
 
       try {
-        SerDe serializer = (SerDe) ReflectionUtils.newInstance(tblDesc.get(
+        AbstractSerDe serializer = (AbstractSerDe) ReflectionUtils.newInstance(tblDesc.get(
             alias).getDeserializerClass(), null);
         SerDeUtils.initializeSerDe(serializer, null, tblDesc.get(alias).getProperties(), null);
         tblSerializers.put((byte) i, serializer);

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java
index c491df3..9b1af1b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java
@@ -31,7 +31,7 @@ import java.util.NoSuchElementException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -69,7 +69,7 @@ public class FlatRowContainer extends AbstractCollection<Object>
   /** Called when loading the hashtable. */
   public void add(MapJoinObjectSerDeContext context,
       BytesWritable value) throws HiveException {
-    SerDe serde = context.getSerDe();
+    AbstractSerDe serde = context.getSerDe();
     isAliasFilterSet = !context.hasFilterTag(); // has tag => need to set later
     if (rowLength == UNKNOWN) {
       try {
@@ -197,7 +197,7 @@ public class FlatRowContainer extends AbstractCollection<Object>
     }
   }
 
-  private void read(SerDe serde, Writable writable, int rowOffset) throws HiveException {
+  private void read(AbstractSerDe serde, Writable writable, int rowOffset) throws HiveException {
     try {
       ObjectInspectorUtils.copyStructToArray(
           serde.deserialize(writable), serde.getObjectInspector(),

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
index 573dc08..04e89e8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
@@ -47,7 +47,7 @@ import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.WriteBuffers;
 import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
@@ -1166,7 +1166,7 @@ public class HybridHashTableContainer
   @Override
   public void setSerde(MapJoinObjectSerDeContext keyCtx, MapJoinObjectSerDeContext valCtx)
       throws SerDeException {
-    SerDe keySerde = keyCtx.getSerDe(), valSerde = valCtx.getSerDe();
+    AbstractSerDe keySerde = keyCtx.getSerDe(), valSerde = valCtx.getSerDe();
 
     if (writeHelper == null) {
       LOG.info("Initializing container with " + keySerde.getClass().getName() + " and "

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java
index a8aa71a..c86e5f5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.WriteBuffers;
 import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
@@ -154,14 +154,14 @@ public class MapJoinBytesTableContainer
   }
 
   private static class KeyValueWriter implements KeyValueHelper {
-    private final SerDe keySerDe, valSerDe;
+    private final AbstractSerDe keySerDe, valSerDe;
     private final StructObjectInspector keySoi, valSoi;
     private final List<ObjectInspector> keyOis, valOis;
     private final Object[] keyObjs, valObjs;
     private final boolean hasFilterTag;
 
     public KeyValueWriter(
-        SerDe keySerDe, SerDe valSerDe, boolean hasFilterTag) throws SerDeException {
+        AbstractSerDe keySerDe, AbstractSerDe valSerDe, boolean hasFilterTag) throws SerDeException {
       this.keySerDe = keySerDe;
       this.valSerDe = valSerDe;
       keySoi = (StructObjectInspector)keySerDe.getObjectInspector();
@@ -221,10 +221,10 @@ public class MapJoinBytesTableContainer
   static class LazyBinaryKvWriter implements KeyValueHelper {
     private final LazyBinaryStruct.SingleFieldGetter filterGetter;
     private Writable key, value;
-    private final SerDe keySerDe;
+    private final AbstractSerDe keySerDe;
     private Boolean hasTag = null; // sanity check - we should not receive keys with tags
 
-    public LazyBinaryKvWriter(SerDe keySerDe, LazyBinaryStructObjectInspector valSoi,
+    public LazyBinaryKvWriter(AbstractSerDe keySerDe, LazyBinaryStructObjectInspector valSoi,
         boolean hasFilterTag) throws SerDeException {
       this.keySerDe = keySerDe;
       if (hasFilterTag) {
@@ -366,7 +366,7 @@ public class MapJoinBytesTableContainer
   @Override
   public void setSerde(MapJoinObjectSerDeContext keyContext, MapJoinObjectSerDeContext valueContext)
       throws SerDeException {
-    SerDe keySerde = keyContext.getSerDe(), valSerde = valueContext.getSerDe();
+    AbstractSerDe keySerde = keyContext.getSerDe(), valSerde = valueContext.getSerDe();
     if (writeHelper == null) {
       LOG.info("Initializing container with " + keySerde.getClass().getName() + " and "
           + valSerde.getClass().getName());

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinEagerRowContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinEagerRowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinEagerRowContainer.java
index eaeae31..bb3c4be 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinEagerRowContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinEagerRowContainer.java
@@ -26,7 +26,7 @@ import java.util.ArrayList;
 import java.util.ConcurrentModificationException;
 import java.util.List;
 
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -134,7 +134,7 @@ public class MapJoinEagerRowContainer
 
   @SuppressWarnings("unchecked")
   public void read(MapJoinObjectSerDeContext context, Writable currentValue) throws SerDeException {
-    SerDe serde = context.getSerDe();
+    AbstractSerDe serde = context.getSerDe();
     List<Object> value = (List<Object>)ObjectInspectorUtils.copyToStandardObject(serde.deserialize(currentValue),
         serde.getObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
     if(value == null) {
@@ -151,7 +151,7 @@ public class MapJoinEagerRowContainer
   @Override
   public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out)
   throws IOException, SerDeException {
-    SerDe serde = context.getSerDe();
+    AbstractSerDe serde = context.getSerDe();
     ObjectInspector valueObjectInspector = context.getStandardOI();
     long numRows = rowCount();
     long numRowsWritten = 0L;

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
index 9f27f56..1cd9021 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorHashKeyWrapperBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
@@ -56,7 +56,7 @@ public abstract class MapJoinKey {
   @SuppressWarnings("deprecation")
   public static MapJoinKey read(Output output, MapJoinObjectSerDeContext context,
       Writable writable) throws SerDeException, HiveException {
-    SerDe serde = context.getSerDe();
+    AbstractSerDe serde = context.getSerDe();
     Object obj = serde.deserialize(writable);
     MapJoinKeyObject result = new MapJoinKeyObject();
     result.read(serde.getObjectInspector(), obj);

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKeyObject.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKeyObject.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKeyObject.java
index 7592f9e..ad7bd5d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKeyObject.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKeyObject.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorHashKeyWrapperBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -119,7 +119,7 @@ public class MapJoinKeyObject extends MapJoinKey {
   @Override
   public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out)
       throws IOException, SerDeException {
-    SerDe serde = context.getSerDe();
+    AbstractSerDe serde = context.getSerDe();
     ObjectInspector objectInspector = context.getStandardOI();
     Writable container = serde.serialize(key, objectInspector);
     container.write(out);

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java
index f47d481..a112a68 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.exec.persistence;
 
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -26,10 +26,10 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.Object
 @SuppressWarnings("deprecation")
 public class MapJoinObjectSerDeContext {
   private final ObjectInspector standardOI;
-  private final SerDe serde;
+  private final AbstractSerDe serde;
   private final boolean hasFilter;
 
-  public MapJoinObjectSerDeContext(SerDe serde, boolean hasFilter)
+  public MapJoinObjectSerDeContext(AbstractSerDe serde, boolean hasFilter)
       throws SerDeException {
     this.serde = serde;
     this.hasFilter = hasFilter;
@@ -47,7 +47,7 @@ public class MapJoinObjectSerDeContext {
   /**
    * @return the serde
    */
-  public SerDe getSerDe() {
+  public AbstractSerDe getSerDe() {
     return serde;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java
index eb48dd7..83a4612 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastTableContainer;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Writable;
@@ -66,8 +66,8 @@ public class MapJoinTableContainerSerDe {
    */
   public MapJoinPersistableTableContainer load(ObjectInputStream in)
       throws HiveException {
-    SerDe keySerDe = keyContext.getSerDe();
-    SerDe valueSerDe = valueContext.getSerDe();
+    AbstractSerDe keySerDe = keyContext.getSerDe();
+    AbstractSerDe valueSerDe = valueContext.getSerDe();
     MapJoinPersistableTableContainer tableContainer;
     try {
       String name = in.readUTF();
@@ -120,8 +120,8 @@ public class MapJoinTableContainerSerDe {
         return getDefaultEmptyContainer(keyContext, valueContext);
       }
 
-      SerDe keySerDe = keyContext.getSerDe();
-      SerDe valueSerDe = valueContext.getSerDe();
+      AbstractSerDe keySerDe = keyContext.getSerDe();
+      AbstractSerDe valueSerDe = valueContext.getSerDe();
       Writable keyContainer = keySerDe.getSerializedClass().newInstance();
       Writable valueContainer = valueSerDe.getSerializedClass().newInstance();
 
@@ -225,8 +225,8 @@ public class MapJoinTableContainerSerDe {
 
         FileStatus[] fileStatuses = fs.listStatus(folder);
         if (fileStatuses != null && fileStatuses.length > 0) {
-          SerDe keySerDe = keyContext.getSerDe();
-          SerDe valueSerDe = valueContext.getSerDe();
+          AbstractSerDe keySerDe = keyContext.getSerDe();
+          AbstractSerDe valueSerDe = valueContext.getSerDe();
           Writable key = keySerDe.getSerializedClass().newInstance();
           Writable value = valueSerDe.getSerializedClass().newInstance();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
index e928719..c8a1a0d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
@@ -97,7 +97,7 @@ public class RowContainer<ROW extends List<Object>>
   private int itrCursor; // iterator cursor in the currBlock
   private int readBlockSize; // size of current read block
   private int addCursor; // append cursor in the lastBlock
-  private SerDe serde; // serialization/deserialization for the row
+  private AbstractSerDe serde; // serialization/deserialization for the row
   private ObjectInspector standardOI; // object inspector for the row
 
   private List<Object> keyObject;
@@ -160,7 +160,7 @@ public class RowContainer<ROW extends List<Object>>
   }
 
 
-  public void setSerDe(SerDe sd, ObjectInspector oi) {
+  public void setSerDe(AbstractSerDe sd, ObjectInspector oi) {
     this.serde = sd;
     this.standardOI = oi;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
index 7e41b7a..d7264c2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
@@ -79,7 +79,7 @@ public class ReduceRecordSource implements RecordSource {
 
   // Input value serde needs to be an array to support different SerDe
   // for different tags
-  private SerDe inputValueDeserializer;
+  private AbstractSerDe inputValueDeserializer;
 
   private TableDesc keyTableDesc;
   private TableDesc valueTableDesc;
@@ -151,7 +151,7 @@ public class ReduceRecordSource implements RecordSource {
 
       // We should initialize the SerDe with the TypeInfo when available.
       this.valueTableDesc = valueTableDesc;
-      inputValueDeserializer = (SerDe) ReflectionUtils.newInstance(
+      inputValueDeserializer = (AbstractSerDe) ReflectionUtils.newInstance(
           valueTableDesc.getDeserializerClass(), null);
       SerDeUtils.initializeSerDe(inputValueDeserializer, null,
           valueTableDesc.getProperties(), null);

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSerde.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSerde.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSerde.java
index bff6200..9675cc8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSerde.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSerde.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
@@ -25,11 +26,10 @@ import org.apache.hadoop.io.Writable;
 /**
  * Serdes that support vectorized {@link VectorizedRowBatch} must implement this interface.
  */
-public interface VectorizedSerde {
+public abstract class VectorizedSerde extends AbstractSerDe {
+  public abstract Writable serializeVector(
+      VectorizedRowBatch vrg, ObjectInspector objInspector) throws SerDeException;
 
-  Writable serializeVector(VectorizedRowBatch vrg, ObjectInspector objInspector)
-      throws SerDeException;
-
-  void deserializeVector(Object rowBlob, int rowsInBlob, VectorizedRowBatch reuseBatch)
-      throws SerDeException;
+  public abstract void deserializeVector(
+      Object rowBlob, int rowsInBlob, VectorizedRowBatch reuseBatch) throws SerDeException;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
index 59876e2..3ec9105 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedSerde;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
@@ -45,7 +45,7 @@ import org.apache.hadoop.io.Writable;
  * It transparently passes the object to/from the ORC file reader/writer.
  */
 @SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES, OrcSerde.COMPRESSION})
-public class OrcSerde implements SerDe, VectorizedSerde {
+public class OrcSerde extends VectorizedSerde {
 
   private static final Logger LOG = LoggerFactory.getLogger(OrcSerde.class);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
index e183bf3..82b78b8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
@@ -23,7 +23,7 @@ import java.util.Map;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
@@ -54,7 +54,7 @@ public class DefaultStorageHandler implements HiveStorageHandler {
   }
 
   @Override
-  public Class<? extends SerDe> getSerDeClass() {
+  public Class<? extends AbstractSerDe> getSerDeClass() {
     return LazySimpleSerDe.class;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java
index 1eec32c..5975d0c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java
@@ -23,7 +23,7 @@ import java.util.Map;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
@@ -59,9 +59,9 @@ public interface HiveStorageHandler extends Configurable {
   public Class<? extends OutputFormat> getOutputFormatClass();
 
   /**
-   * @return Class providing an implementation of {@link SerDe}
+   * @return Class providing an implementation of {@link AbstractSerDe}
    */
-  public Class<? extends SerDe> getSerDeClass();
+  public Class<? extends AbstractSerDe> getSerDeClass();
 
   /**
    * @return metadata hook implementation, or null if this

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
index f32d02b..519f10d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
@@ -81,7 +81,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag;
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator;
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver;
 import org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
@@ -643,7 +643,7 @@ public class PTFTranslator {
       List<String> columnNames,
       RowResolver rr) throws SemanticException {
     Map<String, String> serdePropsMap = new LinkedHashMap<String, String>();
-    SerDe serde = null;
+    AbstractSerDe serde = null;
     ShapeDetails shp = new ShapeDetails();
 
     try {
@@ -806,13 +806,13 @@ public class PTFTranslator {
    * OI & Serde helper methods
    */
 
-  protected static SerDe createLazyBinarySerDe(Configuration cfg,
+  protected static AbstractSerDe createLazyBinarySerDe(Configuration cfg,
       StructObjectInspector oi, Map<String, String> serdePropsMap) throws SerDeException {
     serdePropsMap = serdePropsMap == null ? new LinkedHashMap<String, String>() : serdePropsMap;
 
     PTFDeserializer.addOIPropertiestoSerDePropsMap(oi, serdePropsMap);
 
-    SerDe serDe = new LazyBinarySerDe();
+    AbstractSerDe serDe = new LazyBinarySerDe();
     Properties p = new Properties();
     p.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS,
         serdePropsMap.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS));

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
index cfddb22..a793fea 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
@@ -49,7 +49,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag;
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator;
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver;
 import org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -265,8 +265,8 @@ public class PTFDeserializer {
       serDeProps.setProperty(serdeName, serdePropsMap.get(serdeName));
     }
     try {
-      SerDe serDe =  ReflectionUtils.newInstance(hConf.getClassByName(serdeClassName).
-          asSubclass(SerDe.class), hConf);
+      AbstractSerDe serDe =  ReflectionUtils.newInstance(hConf.getClassByName(serdeClassName).
+          asSubclass(AbstractSerDe.class), hConf);
       SerDeUtils.initializeSerDe(serDe, hConf, serDeProps, null);
       shp.setSerde(serDe);
       StructObjectInspector outOI = PTFPartition.setupPartitionOutputOI(serDe, OI);

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java
index bc2ee83..7e3cebd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java
@@ -24,7 +24,7 @@ import java.util.Map;
 import org.apache.hadoop.hive.ql.exec.PTFUtils;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.parse.TypeCheckCtx;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 
 public class ShapeDetails {
@@ -32,7 +32,7 @@ public class ShapeDetails {
   Map<String, String> serdeProps;
   List<String> columnNames;
   transient StructObjectInspector OI;
-  transient SerDe serde;
+  transient AbstractSerDe serde;
   transient RowResolver rr;
   transient TypeCheckCtx typeCheckCtx;
 
@@ -68,11 +68,11 @@ public class ShapeDetails {
     OI = oI;
   }
 
-  public SerDe getSerde() {
+  public AbstractSerDe getSerde() {
     return serde;
   }
 
-  public void setSerde(SerDe serde) {
+  public void setSerde(AbstractSerDe serde) {
     this.serde = serde;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
index e9f8ff9..5cc84a0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
@@ -56,7 +56,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFStreamingEvaluator.SumAvgEnhancer;
 import org.apache.hadoop.hive.ql.udf.generic.ISupportStreamingModeForWindowing;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -1618,7 +1618,7 @@ public class WindowingTableFunction extends TableFunctionEvaluator {
     StreamingState(Configuration cfg, StructObjectInspector inputOI,
         boolean isMapSide, WindowTableFunctionDef tabDef, int precedingSpan,
         int followingSpan) throws HiveException {
-      SerDe serde = isMapSide ? tabDef.getInput().getOutputShape().getSerde()
+      AbstractSerDe serde = isMapSide ? tabDef.getInput().getOutputShape().getSerde()
           : tabDef.getRawInputShape().getSerde();
       StructObjectInspector outputOI = isMapSide ? tabDef.getInput()
           .getOutputShape().getOI() : tabDef.getRawInputShape().getOI();

http://git-wip-us.apache.org/repos/asf/hive/blob/652ed7a7/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java
index 0611072..e5a5bff 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java
@@ -27,7 +27,7 @@ import java.util.Random;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -47,7 +47,7 @@ public class TestPTFRowContainer {
   private static final String COL_NAMES = "x,y,z,a,b,v";
   private static final String COL_TYPES = "int,string,double,int,string,string";
 
-  static SerDe serDe;
+  static AbstractSerDe serDe;
   static Configuration cfg;
 
   @BeforeClass