You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2013/09/27 19:41:55 UTC

svn commit: r1526996 [10/29] - in /hive/branches/maven: ./ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/src/test/ bin/ bin/ext/ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/test/results/clientposi...

Modified: hive/branches/maven/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java (original)
+++ hive/branches/maven/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java Fri Sep 27 17:41:42 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.metastore
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
@@ -50,6 +51,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.model.MRoleMap;
 import org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege;
 import org.apache.hadoop.hive.metastore.model.MTablePrivilege;
+import org.apache.thrift.TException;
 
 /**
  * A wrapper around {@link org.apache.hadoop.hive.metastore.ObjectStore}
@@ -303,6 +305,13 @@ public class DummyRawStoreControlledComm
   }
 
   @Override
+  public boolean getPartitionsByExpr(String dbName, String tblName, byte[] expr,
+      String defaultPartitionName, short maxParts, Set<Partition> result) throws TException {
+    return objectStore.getPartitionsByExpr(
+        dbName, tblName, expr, defaultPartitionName, maxParts, result);
+  }
+
+  @Override
   public Table markPartitionForEvent(String dbName, String tblName,
       Map<String, String> partVals, PartitionEventType evtType)
       throws MetaException, UnknownTableException, InvalidPartitionException,

Modified: hive/branches/maven/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java (original)
+++ hive/branches/maven/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java Fri Sep 27 17:41:42 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.metastore
 
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import junit.framework.Assert;
 
@@ -51,6 +52,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.model.MRoleMap;
 import org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege;
 import org.apache.hadoop.hive.metastore.model.MTablePrivilege;
+import org.apache.thrift.TException;
 
 /**
  *
@@ -322,6 +324,12 @@ public class DummyRawStoreForJdoConnecti
   }
 
   @Override
+  public boolean getPartitionsByExpr(String dbName, String tblName, byte[] expr,
+      String defaultPartitionName, short maxParts, Set<Partition> result) throws TException {
+    return false;
+  }
+
+  @Override
   public Table markPartitionForEvent(String dbName, String tblName, Map<String, String> partVals,
       PartitionEventType evtType) throws MetaException, UnknownTableException,
       InvalidPartitionException, UnknownPartitionException {

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Fri Sep 27 17:41:42 2013
@@ -158,14 +158,13 @@ import org.apache.hadoop.hive.ql.session
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
 import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.stringtemplate.v4.ST;
 
@@ -3423,11 +3422,13 @@ public class DDLTask extends Task<DDLWor
    */
   private void validateSerDe(String serdeName) throws HiveException {
     try {
-      Deserializer d = SerDeUtils.lookupDeserializer(serdeName);
+
+      Deserializer d = ReflectionUtils.newInstance(conf.getClassByName(serdeName).
+        asSubclass(Deserializer.class), conf);
       if (d != null) {
         LOG.debug("Found class for " + serdeName);
       }
-    } catch (SerDeException e) {
+    } catch (Exception e) {
       throw new HiveException("Cannot validate serde: " + serdeName, e);
     }
   }

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java Fri Sep 27 17:41:42 2013
@@ -394,7 +394,7 @@ public class FetchOperator implements Se
       this.inputSplits = inputSplits;
 
       splitNum = 0;
-      serde = partDesc.getDeserializerClass().newInstance();
+      serde = partDesc.getDeserializer();
       serde.initialize(job, partDesc.getOverlayedProperties());
 
       if (currTbl != null) {
@@ -630,7 +630,7 @@ public class FetchOperator implements Se
       // Get the OI corresponding to all the partitions
       for (PartitionDesc listPart : listParts) {
         partition = listPart;
-        Deserializer partSerde = listPart.getDeserializerClass().newInstance();
+        Deserializer partSerde = listPart.getDeserializer();
         partSerde.initialize(job, listPart.getOverlayedProperties());
 
         partitionedTableOI = ObjectInspectorConverters.getConvertedOI(

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Fri Sep 27 17:41:42 2013
@@ -176,6 +176,7 @@ public final class FunctionRegistry {
    */
   static Map<String, FunctionInfo> mFunctions = Collections.synchronizedMap(new LinkedHashMap<String, FunctionInfo>());
 
+  static Set<Class<?>> nativeUdfs = Collections.synchronizedSet(new HashSet<Class<?>>());
   /*
    * PTF variables
    * */
@@ -486,6 +487,7 @@ public final class FunctionRegistry {
       FunctionInfo fI = new FunctionInfo(isNative, displayName,
           new GenericUDFBridge(displayName, isOperator, UDFClass.getName()));
       mFunctions.put(functionName.toLowerCase(), fI);
+      registerNativeStatus(fI);
     } else {
       throw new RuntimeException("Registering UDF Class " + UDFClass
           + " which does not extend " + UDF.class);
@@ -508,6 +510,7 @@ public final class FunctionRegistry {
       FunctionInfo fI = new FunctionInfo(isNative, functionName,
           (GenericUDF) ReflectionUtils.newInstance(genericUDFClass, null));
       mFunctions.put(functionName.toLowerCase(), fI);
+      registerNativeStatus(fI);
     } else {
       throw new RuntimeException("Registering GenericUDF Class "
           + genericUDFClass + " which does not extend " + GenericUDF.class);
@@ -530,6 +533,7 @@ public final class FunctionRegistry {
       FunctionInfo fI = new FunctionInfo(isNative, functionName,
           (GenericUDTF) ReflectionUtils.newInstance(genericUDTFClass, null));
       mFunctions.put(functionName.toLowerCase(), fI);
+      registerNativeStatus(fI);
     } else {
       throw new RuntimeException("Registering GenericUDTF Class "
           + genericUDTFClass + " which does not extend " + GenericUDTF.class);
@@ -955,8 +959,9 @@ public final class FunctionRegistry {
 
   public static void registerGenericUDAF(boolean isNative, String functionName,
       GenericUDAFResolver genericUDAFResolver) {
-    mFunctions.put(functionName.toLowerCase(), new FunctionInfo(isNative,
-        functionName.toLowerCase(), genericUDAFResolver));
+    FunctionInfo fi = new FunctionInfo(isNative, functionName.toLowerCase(), genericUDAFResolver);
+    mFunctions.put(functionName.toLowerCase(), fi);
+    registerNativeStatus(fi);
   }
 
   public static void registerTemporaryUDAF(String functionName,
@@ -970,9 +975,11 @@ public final class FunctionRegistry {
 
   public static void registerUDAF(boolean isNative, String functionName,
       Class<? extends UDAF> udafClass) {
-    mFunctions.put(functionName.toLowerCase(), new FunctionInfo(isNative,
+    FunctionInfo fi = new FunctionInfo(isNative,
         functionName.toLowerCase(), new GenericUDAFBridge(
-        (UDAF) ReflectionUtils.newInstance(udafClass, null))));
+        (UDAF) ReflectionUtils.newInstance(udafClass, null)));
+    mFunctions.put(functionName.toLowerCase(), fi);
+    registerNativeStatus(fi);
   }
 
   public static void unregisterTemporaryUDF(String functionName) throws HiveException {
@@ -1560,6 +1567,7 @@ public final class FunctionRegistry {
     FunctionInfo fI = new FunctionInfo(false, macroName,
         new GenericUDFMacro(macroName, body, colNames, colTypes));
     mFunctions.put(macroName.toLowerCase(), fI);
+    registerNativeStatus(fI);
   }
 
   /**
@@ -1717,6 +1725,7 @@ public final class FunctionRegistry {
   {
     FunctionInfo tInfo = new FunctionInfo(name, tFnCls);
     mFunctions.put(name.toLowerCase(), tInfo);
+    registerNativeStatus(tInfo);
   }
 
   /**
@@ -1738,4 +1747,21 @@ public final class FunctionRegistry {
     }
     return false;
   }
+
+  /**
+   * @param fnExpr Function expression.
+   * @return True iff the fnExpr represents a hive built-in function.
+   */
+  public static boolean isNativeFuncExpr(ExprNodeGenericFuncDesc fnExpr) {
+    Class<?> udfClass = getUDFClassFromExprDesc(fnExpr);
+    if (udfClass == null) {
+      udfClass = getGenericUDFClassFromExprDesc(fnExpr);
+    }
+    return nativeUdfs.contains(udfClass);
+  }
+
+  private static void registerNativeStatus(FunctionInfo fi) {
+    if (!fi.isNative()) return;
+    nativeUdfs.add(fi.getFunctionClass());
+  }
 }

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java Fri Sep 27 17:41:42 2013
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.JoinDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -316,7 +317,7 @@ public class JoinUtil {
       // remove the last ','
       colNames.setLength(colNames.length() - 1);
       colTypes.setLength(colTypes.length() - 1);
-      TableDesc tblDesc = new TableDesc(LazyBinarySerDe.class,
+      TableDesc tblDesc = new TableDesc(
           SequenceFileInputFormat.class, HiveSequenceFileOutputFormat.class,
           Utilities.makeProperties(
           org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, ""
@@ -324,7 +325,8 @@ public class JoinUtil {
           org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, colNames
           .toString(),
           org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES,
-          colTypes.toString()));
+          colTypes.toString(),
+          serdeConstants.SERIALIZATION_LIB,LazyBinarySerDe.class.getName()));
       spillTableDesc[tag] = tblDesc;
     }
     return spillTableDesc;

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java Fri Sep 27 17:41:42 2013
@@ -186,12 +186,7 @@ public class MapOperator extends Operato
     opCtx.tableName = String.valueOf(partProps.getProperty("name"));
     opCtx.partName = String.valueOf(partSpec);
 
-    Class serdeclass = pd.getDeserializerClass();
-    if (serdeclass == null) {
-      String className = checkSerdeClassName(pd.getSerdeClassName(), opCtx.tableName);
-      serdeclass = hconf.getClassByName(className);
-    }
-
+    Class serdeclass = hconf.getClassByName(pd.getSerdeClassName());
     opCtx.deserializer = (Deserializer) serdeclass.newInstance();
     opCtx.deserializer.initialize(hconf, partProps);
 
@@ -285,14 +280,7 @@ public class MapOperator extends Operato
         Properties tblProps = tableDesc.getProperties();
         // If the partition does not exist, use table properties
         Properties partProps = isPartitioned(pd) ? pd.getOverlayedProperties() : tblProps;
-
-        Class sdclass = pd.getDeserializerClass();
-        if (sdclass == null) {
-          String className = checkSerdeClassName(pd.getSerdeClassName(),
-              pd.getProperties().getProperty("name"));
-          sdclass = hconf.getClassByName(className);
-        }
-
+        Class sdclass = hconf.getClassByName(pd.getSerdeClassName());
         Deserializer partDeserializer = (Deserializer) sdclass.newInstance();
         partDeserializer.initialize(hconf, partProps);
         StructObjectInspector partRawRowObjectInspector = (StructObjectInspector) partDeserializer
@@ -301,13 +289,8 @@ public class MapOperator extends Operato
         StructObjectInspector tblRawRowObjectInspector = tableDescOI.get(tableDesc);
         if ((tblRawRowObjectInspector == null) ||
             (identityConverterTableDesc.contains(tableDesc))) {
-          sdclass = tableDesc.getDeserializerClass();
-          if (sdclass == null) {
-            String className = checkSerdeClassName(tableDesc.getSerdeClassName(),
-                tableDesc.getProperties().getProperty("name"));
-            sdclass = hconf.getClassByName(className);
-          }
-          Deserializer tblDeserializer = (Deserializer) sdclass.newInstance();
+            sdclass = hconf.getClassByName(tableDesc.getSerdeClassName());
+            Deserializer tblDeserializer = (Deserializer) sdclass.newInstance();
           tblDeserializer.initialize(hconf, tblProps);
           tblRawRowObjectInspector =
               (StructObjectInspector) ObjectInspectorConverters.getConvertedOI(
@@ -336,14 +319,6 @@ public class MapOperator extends Operato
     return pd.getPartSpec() != null && !pd.getPartSpec().isEmpty();
   }
 
-  private String checkSerdeClassName(String className, String tableName) throws HiveException {
-    if (className == null || className.isEmpty()) {
-      throw new HiveException(
-          "SerDe class or the SerDe class name is not set for table: " + tableName);
-    }
-    return className;
-  }
-
   public void setChildren(Configuration hconf) throws HiveException {
 
     Path fpath = new Path(HiveConf.getVar(hconf,

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java Fri Sep 27 17:41:42 2013
@@ -613,6 +613,8 @@ public abstract class Operator<T extends
         op.close(abort);
       }
 
+      out = null;
+
       LOG.info(id + " Close done");
     } catch (HiveException e) {
       e.printStackTrace();

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/UDAF.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/UDAF.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/UDAF.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/UDAF.java Fri Sep 27 17:41:42 2013
@@ -49,6 +49,7 @@ package org.apache.hadoop.hive.ql.exec;
  * partial);
  * 
  */
+@Deprecated
 public class UDAF {
 
   /**

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Fri Sep 27 17:41:42 2013
@@ -129,10 +129,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.BaseWork;
 import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
-import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
 import org.apache.hadoop.hive.ql.plan.GroupByDesc;
 import org.apache.hadoop.hive.ql.plan.MapWork;
@@ -148,16 +145,10 @@ import org.apache.hadoop.hive.ql.plan.ap
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.stats.StatsFactory;
 import org.apache.hadoop.hive.ql.stats.StatsPublisher;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.Serializer;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.SequenceFile;
@@ -586,6 +577,31 @@ public final class Utilities {
     return null;
   }
 
+  /**
+   * Serializes expression via Kryo.
+   * @param expr Expression.
+   * @return Bytes.
+   */
+  public static byte[] serializeExpressionToKryo(ExprNodeDesc expr) {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    Output output = new Output(baos);
+    runtimeSerializationKryo.get().writeClassAndObject(output, expr);
+    output.close();
+    return baos.toByteArray();
+  }
+
+  /**
+   * Deserializes expression from Kryo.
+   * @param bytes Bytes containing the expression.
+   * @return Expression; null if deserialization succeeded, but the result type is incorrect.
+   */
+  public static ExprNodeDesc deserializeExpressionFromKryo(byte[] bytes) {
+    Input inp = new Input(new ByteArrayInputStream(bytes));
+    Object o = runtimeSerializationKryo.get().readClassAndObject(inp);
+    inp.close();
+    return (o instanceof ExprNodeDesc) ? (ExprNodeDesc)o : null;
+  }
+
   public static String serializeExpression(ExprNodeDesc expr) {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     XMLEncoder encoder = new XMLEncoder(baos);
@@ -652,7 +668,6 @@ public final class Utilities {
     public void write(Kryo kryo, Output output, java.sql.Date sqlDate) {
       output.writeLong(sqlDate.getTime());
     }
-
   }
 
   private static class CommonTokenSerializer extends com.esotericsoftware.kryo.Serializer<CommonToken> {
@@ -940,17 +955,20 @@ public final class Utilities {
   }
 
   public static TableDesc getTableDesc(Table tbl) {
-    return (new TableDesc(tbl.getDeserializer().getClass(), tbl.getInputFormatClass(), tbl
-        .getOutputFormatClass(), tbl.getMetadata()));
+    Properties props = tbl.getMetadata();
+    props.put(serdeConstants.SERIALIZATION_LIB, tbl.getDeserializer().getClass().getName());
+    return (new TableDesc(tbl.getInputFormatClass(), tbl
+        .getOutputFormatClass(), props));
   }
 
   // column names and column types are all delimited by comma
   public static TableDesc getTableDesc(String cols, String colTypes) {
-    return (new TableDesc(LazySimpleSerDe.class, SequenceFileInputFormat.class,
+    return (new TableDesc(SequenceFileInputFormat.class,
         HiveSequenceFileOutputFormat.class, Utilities.makeProperties(
-        org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "" + Utilities.ctrlaCode,
-        org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, cols,
-        org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES, colTypes)));
+        serdeConstants.SERIALIZATION_FORMAT, "" + Utilities.ctrlaCode,
+        serdeConstants.LIST_COLUMNS, cols,
+        serdeConstants.LIST_COLUMN_TYPES, colTypes,
+        serdeConstants.SERIALIZATION_LIB,LazySimpleSerDe.class.getName())));
   }
 
   public static PartitionDesc getPartitionDesc(Partition part) throws HiveException {
@@ -2367,126 +2385,6 @@ public final class Utilities {
   }
 
   /**
-   * Check if a function can be pushed down to JDO.
-   * Now only {compares, AND, OR} are supported.
-   * @param func a generic function.
-   * @return true if this function can be pushed down to JDO filter.
-   */
-  private static boolean supportedJDOFuncs(GenericUDF func) {
-    // TODO: we might also want to add "not" and "between" here in future.
-    // TODO: change to GenericUDFBaseCompare once DN is upgraded
-    //       (see HIVE-2609 - in DN 2.0, substrings do not work in MySQL).
-    return func instanceof GenericUDFOPEqual ||
-           func instanceof GenericUDFOPNotEqual ||
-           func instanceof GenericUDFOPAnd ||
-           func instanceof GenericUDFOPOr;
-  }
-
-  /**
-   * Check if a function can be pushed down to JDO for integral types.
-   * Only {=, !=} are supported. lt/gt/etc. to be dealt with in HIVE-4888.
-   * @param func a generic function.
-   * @return true iff this function can be pushed down to JDO filter for integral types.
-   */
-  private static boolean doesJDOFuncSupportIntegral(GenericUDF func) {
-    // AND, OR etc. don't need to be specified here.
-    return func instanceof GenericUDFOPEqual ||
-           func instanceof GenericUDFOPNotEqual;
-  }
-
-  /**
-   * @param type type
-   * @param constant The constant, if any.
-   * @return true iff type is an integral type.
-   */
-  private static boolean isIntegralType(String type) {
-    return type.equals(serdeConstants.TINYINT_TYPE_NAME) ||
-           type.equals(serdeConstants.SMALLINT_TYPE_NAME) ||
-           type.equals(serdeConstants.INT_TYPE_NAME) ||
-           type.equals(serdeConstants.BIGINT_TYPE_NAME);
-  }
-
-  /**
-   * Check if the partition pruning expression can be pushed down to JDO filtering.
-   * The partition expression contains only partition columns.
-   * The criteria that an expression can be pushed down are that:
-   *  1) the expression only contains function specified in supportedJDOFuncs().
-   *     Now only {=, AND, OR} can be pushed down.
-   *  2) the partition column type and the constant type have to be String. This is
-   *     restriction by the current JDO filtering implementation.
-   * @param tab The table that contains the partition columns.
-   * @param expr the partition pruning expression
-   * @param parent parent UDF of expr if parent exists and contains a UDF; otherwise null.
-   * @return null if the partition pruning expression can be pushed down to JDO filtering.
-   */
-  public static String checkJDOPushDown(
-      Table tab, ExprNodeDesc expr, GenericUDF parent) {
-    boolean isConst = expr instanceof ExprNodeConstantDesc;
-    boolean isCol = !isConst && (expr instanceof ExprNodeColumnDesc);
-    boolean isIntegralSupported = (parent != null) && (isConst || isCol)
-        && doesJDOFuncSupportIntegral(parent);
-
-    // JDO filter now only support String typed literals, as well as integers
-    // for some operators; see Filter.g and ExpressionTree.java.
-    if (isConst) {
-      Object value = ((ExprNodeConstantDesc)expr).getValue();
-      if (value instanceof String) {
-        return null;
-      }
-      if (isIntegralSupported && isIntegralType(expr.getTypeInfo().getTypeName())) {
-        return null;
-      }
-      return "Constant " + value + " is not string "
-        + (isIntegralSupported ? "or integral ": "") + "type: " + expr.getTypeInfo().getTypeName();
-    } else if (isCol) {
-      TypeInfo type = expr.getTypeInfo();
-      if (type.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)
-          || (isIntegralSupported && isIntegralType(type.getTypeName()))) {
-        String colName = ((ExprNodeColumnDesc)expr).getColumn();
-        for (FieldSchema fs: tab.getPartCols()) {
-          if (fs.getName().equals(colName)) {
-            if (fs.getType().equals(serdeConstants.STRING_TYPE_NAME)
-                || (isIntegralSupported && isIntegralType(fs.getType()))) {
-              return null;
-            }
-            return "Partition column " + fs.getName() + " is not string "
-              + (isIntegralSupported ? "or integral ": "") + "type: " + fs.getType();
-          }
-        }
-        assert(false); // cannot find the partition column!
-     } else {
-        return "Column " + expr.getExprString() + " is not string "
-          + (isIntegralSupported ? "or integral ": "") + "type: " + type.getTypeName();
-     }
-    } else if (expr instanceof ExprNodeGenericFuncDesc) {
-      ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) expr;
-      GenericUDF func = funcDesc.getGenericUDF();
-      if (!supportedJDOFuncs(func)) {
-        return "Expression " + expr.getExprString() + " cannot be evaluated";
-      }
-      boolean allChildrenConstant = true;
-      List<ExprNodeDesc> children = funcDesc.getChildExprs();
-      for (ExprNodeDesc child: children) {
-        if (!(child instanceof ExprNodeConstantDesc)) {
-          allChildrenConstant = false;
-        }
-        String message = checkJDOPushDown(tab, child, func);
-        if (message != null) {
-          return message;
-        }
-      }
-
-      // If all the children of the expression are constants then JDO cannot parse the expression
-      // see Filter.g
-      if (allChildrenConstant) {
-        return "Expression " + expr.getExprString() + " has only constants as children.";
-      }
-      return null;
-    }
-    return "Expression " + expr.getExprString() + " cannot be evaluated";
-  }
-
-  /**
    * The check here is kind of not clean. It first use a for loop to go through
    * all input formats, and choose the ones that extend ReworkMapredInputFormat
    * to a set. And finally go through the ReworkMapredInputFormat set, and call

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/PTFRowContainer.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/PTFRowContainer.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/PTFRowContainer.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/PTFRowContainer.java Fri Sep 27 17:41:42 2013
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.io.Hive
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.PTFDeserializer;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.io.BytesWritable;
@@ -226,17 +227,15 @@ public class PTFRowContainer<Row extends
   public static TableDesc createTableDesc(StructObjectInspector oI) {
     Map<String,String> props = new HashMap<String,String>();
     PTFDeserializer.addOIPropertiestoSerDePropsMap(oI, props);
-    String colNames = props.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS);
-    String colTypes = props.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES);
-    TableDesc tblDesc = new TableDesc(LazyBinarySerDe.class,
+    String colNames = props.get(serdeConstants.LIST_COLUMNS);
+    String colTypes = props.get(serdeConstants.LIST_COLUMN_TYPES);
+    TableDesc tblDesc = new TableDesc(
         PTFSequenceFileInputFormat.class, PTFHiveSequenceFileOutputFormat.class,
         Utilities.makeProperties(
-        org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, ""
-        + Utilities.ctrlaCode,
-        org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, colNames
-        .toString(),
-        org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES,
-        colTypes.toString()));
+        serdeConstants.SERIALIZATION_FORMAT, ""+ Utilities.ctrlaCode,
+        serdeConstants.LIST_COLUMNS, colNames.toString(),
+        serdeConstants.LIST_COLUMN_TYPES,colTypes.toString(),
+        serdeConstants.SERIALIZATION_LIB,LazyBinarySerDe.class.getName()));
     return tblDesc;
   }
 

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java Fri Sep 27 17:41:42 2013
@@ -307,8 +307,8 @@ public class CombineHiveInputFormat<K ex
       Class inputFormatClass = part.getInputFileFormatClass();
       String inputFormatClassName = inputFormatClass.getName();
       InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job);
-      String deserializerClassName = part.getDeserializerClass() == null ? null
-          : part.getDeserializerClass().getName();
+      String deserializerClassName = part.getDeserializer() == null ? null
+          : part.getDeserializer().getClass().getName();
 
       // Since there is no easy way of knowing whether MAPREDUCE-1597 is present in the tree or not,
       // we use a configuration variable for the same

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Fri Sep 27 17:41:42 2013
@@ -49,6 +49,7 @@ import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.HiveMetaException;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
 import org.apache.hadoop.hive.metastore.HiveMetaHookLoader;
@@ -80,6 +81,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
 import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.session.CreateTableAutomaticGrant;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.Deserializer;
@@ -1899,13 +1901,43 @@ private void constructOneLBLocationMap(F
 
     List<org.apache.hadoop.hive.metastore.api.Partition> tParts = getMSC().listPartitionsByFilter(
         tbl.getDbName(), tbl.getTableName(), filter, (short)-1);
-    List<Partition> results = new ArrayList<Partition>(tParts.size());
+    return convertFromMetastore(tbl, tParts, null);
+  }
 
-    for (org.apache.hadoop.hive.metastore.api.Partition tPart: tParts) {
-      Partition part = new Partition(tbl, tPart);
-      results.add(part);
+  private static List<Partition> convertFromMetastore(Table tbl,
+      List<org.apache.hadoop.hive.metastore.api.Partition> src,
+      List<Partition> dest) throws HiveException {
+    if (src == null) {
+      return dest;
+    }
+    if (dest == null) {
+      dest = new ArrayList<Partition>(src.size());
     }
-    return results;
+    for (org.apache.hadoop.hive.metastore.api.Partition tPart : src) {
+      dest.add(new Partition(tbl, tPart));
+    }
+    return dest;
+  }
+
+  /**
+   * Get a list of Partitions by expr.
+   * @param tbl The table containing the partitions.
+   * @param expr A serialized expression for partition predicates.
+   * @param conf Hive config.
+   * @param result the resulting list of partitions
+   * @return whether the resulting list contains partitions which may or may not match the expr
+   */
+  public boolean getPartitionsByExpr(Table tbl, ExprNodeDesc expr, HiveConf conf,
+      List<Partition> result) throws HiveException, TException {
+    assert result != null;
+    byte[] exprBytes = Utilities.serializeExpressionToKryo(expr);
+    String defaultPartitionName = HiveConf.getVar(conf, ConfVars.DEFAULTPARTITIONNAME);
+    List<org.apache.hadoop.hive.metastore.api.Partition> msParts =
+        new ArrayList<org.apache.hadoop.hive.metastore.api.Partition>();
+    boolean hasUnknownParts = getMSC().listPartitionsByExpr(tbl.getDbName(),
+        tbl.getTableName(), exprBytes, defaultPartitionName, (short)-1, msParts);
+    convertFromMetastore(tbl, msParts, result);
+    return hasUnknownParts;
   }
 
   public void validatePartitionNameCharacters(List<String> partVals) throws HiveException {

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java Fri Sep 27 17:41:42 2013
@@ -54,6 +54,7 @@ import org.apache.hadoop.hive.ql.plan.Ma
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.NullStructSerDe;
 
 /**
@@ -210,8 +211,8 @@ public class MetadataOnlyOptimizer imple
       if (desc != null) {
         desc.setInputFileFormatClass(OneNullRowInputFormat.class);
         desc.setOutputFileFormatClass(HiveIgnoreKeyTextOutputFormat.class);
-        desc.setDeserializerClass(NullStructSerDe.class);
-        desc.setSerdeClassName(NullStructSerDe.class.getName());
+        desc.getProperties().setProperty(serdeConstants.SERIALIZATION_LIB,
+          NullStructSerDe.class.getName());
       }
       return desc;
     }

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java Fri Sep 27 17:41:42 2013
@@ -32,6 +32,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -241,6 +242,19 @@ public class PartitionPruner implements 
     return expr;
   }
 
+  /**
+   * @param expr Expression.
+   * @return True iff expr contains any non-native user-defined functions.
+   */
+  static private boolean hasUserFunctions(ExprNodeDesc expr) {
+    if (!(expr instanceof ExprNodeGenericFuncDesc)) return false;
+    if (!FunctionRegistry.isNativeFuncExpr((ExprNodeGenericFuncDesc)expr)) return true;
+    for (ExprNodeDesc child : expr.getChildren()) {
+      if (hasUserFunctions(child)) return true;
+    }
+    return false;
+  }
+
   private static PrunedPartitionList getPartitionsFromServer(Table tab,
       ExprNodeDesc prunerExpr, HiveConf conf, String alias) throws HiveException {
     try {
@@ -258,36 +272,55 @@ public class PartitionPruner implements 
       }
 
       if (prunerExpr == null) {
-        // This can happen when hive.mapred.mode=nonstrict and there is no predicates at all.
+        // Non-strict mode, and there is no predicates at all - get everything.
         return new PrunedPartitionList(tab, Hive.get().getAllPartitionsForPruner(tab), false);
       }
 
-      // Remove virtual columns. See javadoc for details.
+      // Replace virtual columns with nulls. See javadoc for details.
       prunerExpr = removeNonPartCols(prunerExpr, extractPartColNames(tab));
-      // Remove all unknown parts e.g. non-partition columns. See javadoc for details.
+      // Remove all parts that are not partition columns. See javadoc for details.
       ExprNodeDesc compactExpr = compactExpr(prunerExpr.clone());
       String oldFilter = prunerExpr.getExprString();
       if (compactExpr == null) {
-        // This could happen when hive.mapred.mode=nonstrict and all the predicates
-        // are on non-partition columns.
+        // Non-strict mode, and all the predicates are on non-partition columns - get everything.
         LOG.debug("Filter " + oldFilter + " was null after compacting");
         return new PrunedPartitionList(tab, Hive.get().getAllPartitionsForPruner(tab), true);
       }
 
-      Set<Partition> partitions = new LinkedHashSet<Partition>();
+      LOG.debug("Filter w/ compacting: " + compactExpr.getExprString()
+        + "; filter w/o compacting: " + oldFilter);
+
+      // Finally, check the filter for non-built-in UDFs. If these are present, we cannot
+      // do filtering on the server, and have to fall back to client path.
+      boolean doEvalClientSide = hasUserFunctions(compactExpr);
+
+      // Now filter.
+      List<Partition> partitions = new ArrayList<Partition>();
       boolean hasUnknownPartitions = false;
-      String message = Utilities.checkJDOPushDown(tab, compactExpr, null);
-      if (message != null) {
-        LOG.info(ErrorMsg.INVALID_JDO_FILTER_EXPRESSION.getMsg("by condition '"
-            + message + "'"));
-        hasUnknownPartitions = pruneBySequentialScan(tab, partitions, prunerExpr, conf);
-      } else {
-        String filter = compactExpr.getExprString();
-        LOG.debug("Filter w/ compacting: " + filter +"; filter w/o compacting: " + oldFilter);
-        hasUnknownPartitions = !filter.equals(oldFilter);
-        partitions.addAll(Hive.get().getPartitionsByFilter(tab, filter));
+      PerfLogger perfLogger = PerfLogger.getPerfLogger();
+      if (!doEvalClientSide) {
+        perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
+        try {
+          hasUnknownPartitions = Hive.get().getPartitionsByExpr(
+              tab, compactExpr, conf, partitions);
+        } catch (IMetaStoreClient.IncompatibleMetastoreException ime) {
+          // TODO: backward compat for Hive <= 0.12. Can be removed later.
+          LOG.warn("Metastore doesn't support getPartitionsByExpr", ime);
+          doEvalClientSide = true;
+        } finally {
+          perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
+        }
       }
-      return new PrunedPartitionList(tab, partitions, hasUnknownPartitions);
+      if (doEvalClientSide) {
+        // Either we have user functions, or metastore is old version - filter names locally.
+        hasUnknownPartitions = pruneBySequentialScan(tab, partitions, compactExpr, conf);
+      }
+      // The partitions are "unknown" if the call says so due to the expression
+      // evaluator returning null for a partition, or if we sent a partial expression to
+      // metastore and so some partitions may have no data based on other filters.
+      boolean isPruningByExactFilter = oldFilter.equals(compactExpr.getExprString());
+      return new PrunedPartitionList(tab, new LinkedHashSet<Partition>(partitions),
+          hasUnknownPartitions || !isPruningByExactFilter);
     } catch (HiveException e) {
       throw e;
     } catch (Exception e) {
@@ -297,15 +330,15 @@ public class PartitionPruner implements 
 
   /**
    * Pruning partition by getting the partition names first and pruning using Hive expression
-   * evaluator.
+   * evaluator on client.
    * @param tab the table containing the partitions.
    * @param partitions the resulting partitions.
    * @param prunerExpr the SQL predicate that involves partition columns.
    * @param conf Hive Configuration object, can not be NULL.
    * @return true iff the partition pruning expression contains non-partition columns.
    */
-  static private boolean pruneBySequentialScan(Table tab, Set<Partition> partitions,
-      ExprNodeDesc prunerExpr, HiveConf conf) throws Exception {
+  static private boolean pruneBySequentialScan(Table tab, List<Partition> partitions,
+      ExprNodeDesc prunerExpr, HiveConf conf) throws HiveException, MetaException {
     PerfLogger perfLogger = PerfLogger.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PRUNE_LISTING);
 

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Fri Sep 27 17:41:42 2013
@@ -1915,10 +1915,9 @@ public class DDLSemanticAnalyzer extends
     String[] colTypes = schema.split("#");
     prop.setProperty("columns", colTypes[0]);
     prop.setProperty("columns.types", colTypes[1]);
-
+    prop.setProperty(serdeConstants.SERIALIZATION_LIB, LazySimpleSerDe.class.getName());
     FetchWork fetch = new FetchWork(ctx.getResFile().toString(), new TableDesc(
-        LazySimpleSerDe.class, TextInputFormat.class,
-        IgnoreKeyTextOutputFormat.class, prop), -1);
+        TextInputFormat.class,IgnoreKeyTextOutputFormat.class, prop), -1);
     fetch.setSerializationNullFormat(" ");
     return (FetchTask) TaskFactory.get(fetch, conf);
   }

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java Fri Sep 27 17:41:42 2013
@@ -1097,49 +1097,6 @@ public class PTFTranslator {
     return componentInvocations;
   }
 
-
-  /*
-   * Lead Lag functionality
-   */
-  public static class LeadLagInfo {
-    /*
-     * list of LL invocations in a Query.
-     */
-    List<ExprNodeGenericFuncDesc> leadLagExprs;
-    /*
-     * map from the Select Expr Node to the LL Function invocations in it.
-     */
-    Map<ExprNodeDesc, List<ExprNodeGenericFuncDesc>> mapTopExprToLLFunExprs;
-
-    private void addLeadLagExpr(ExprNodeGenericFuncDesc llFunc) {
-      leadLagExprs = leadLagExprs == null ? new ArrayList<ExprNodeGenericFuncDesc>() : leadLagExprs;
-      leadLagExprs.add(llFunc);
-    }
-
-    public List<ExprNodeGenericFuncDesc> getLeadLagExprs() {
-      return leadLagExprs;
-    }
-
-    public void addLLFuncExprForTopExpr(ExprNodeDesc topExpr, ExprNodeGenericFuncDesc llFuncExpr) {
-      addLeadLagExpr(llFuncExpr);
-      mapTopExprToLLFunExprs = mapTopExprToLLFunExprs == null ?
-          new HashMap<ExprNodeDesc, List<ExprNodeGenericFuncDesc>>() : mapTopExprToLLFunExprs;
-      List<ExprNodeGenericFuncDesc> funcList = mapTopExprToLLFunExprs.get(topExpr);
-      if (funcList == null) {
-        funcList = new ArrayList<ExprNodeGenericFuncDesc>();
-        mapTopExprToLLFunExprs.put(topExpr, funcList);
-      }
-      funcList.add(llFuncExpr);
-    }
-
-    public List<ExprNodeGenericFuncDesc> getLLFuncExprsInTopExpr(ExprNodeDesc topExpr) {
-      if (mapTopExprToLLFunExprs == null) {
-        return null;
-      }
-      return mapTopExprToLLFunExprs.get(topExpr);
-    }
-  }
-
   public static void validateNoLeadLagInValueBoundarySpec(ASTNode node)
       throws SemanticException {
     String errMsg = "Lead/Lag not allowed in ValueBoundary Spec";

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingExprNodeEvaluatorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingExprNodeEvaluatorFactory.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingExprNodeEvaluatorFactory.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingExprNodeEvaluatorFactory.java Fri Sep 27 17:41:42 2013
@@ -23,7 +23,6 @@ import java.util.List;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.PTFTranslator.LeadLagInfo;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java Fri Sep 27 17:41:42 2013
@@ -18,28 +18,38 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
+import java.io.Externalizable;
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.ql.exec.PTFUtils;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.util.ReflectionUtils;
 
 /**
  * AggregationDesc.
  *
  */
 public class AggregationDesc implements java.io.Serializable {
+
+  static {
+    PTFUtils.makeTransient(AggregationDesc.class, "genericUDAFEvaluator");
+  }
+
   private static final long serialVersionUID = 1L;
   private String genericUDAFName;
 
-  /**
-   * In case genericUDAFEvaluator is Serializable, we will serialize the object.
-   * 
-   * In case genericUDAFEvaluator does not implement Serializable, Java will
-   * remember the class of genericUDAFEvaluator and creates a new instance when
-   * deserialized. This is exactly what we want.
-   */
-  private GenericUDAFEvaluator genericUDAFEvaluator;
   private java.util.ArrayList<ExprNodeDesc> parameters;
   private boolean distinct;
   private GenericUDAFEvaluator.Mode mode;
 
+  // used for GenericUDAFEvaluator
+  private String genericUDAFEvaluatorClassName;
+  // used for GenericUDAFBridgeEvaluator
+  private GenericUDAFEvaluator genericUDAFWritableEvaluator;
+
+  private transient GenericUDAFEvaluator genericUDAFEvaluator;
+
   public AggregationDesc() {
   }
 
@@ -48,10 +58,10 @@ public class AggregationDesc implements 
       final java.util.ArrayList<ExprNodeDesc> parameters,
       final boolean distinct, final GenericUDAFEvaluator.Mode mode) {
     this.genericUDAFName = genericUDAFName;
-    this.genericUDAFEvaluator = genericUDAFEvaluator;
     this.parameters = parameters;
     this.distinct = distinct;
     this.mode = mode;
+    setGenericUDAFEvaluator(genericUDAFEvaluator);
   }
 
   public void setGenericUDAFName(final String genericUDAFName) {
@@ -65,10 +75,44 @@ public class AggregationDesc implements 
   public void setGenericUDAFEvaluator(
       final GenericUDAFEvaluator genericUDAFEvaluator) {
     this.genericUDAFEvaluator = genericUDAFEvaluator;
+    if (genericUDAFEvaluator instanceof Serializable ||
+        genericUDAFEvaluator instanceof Externalizable) {
+      this.genericUDAFWritableEvaluator = genericUDAFEvaluator;
+    } else {
+      this.genericUDAFEvaluatorClassName = genericUDAFEvaluator.getClass().getName();
+    }
   }
 
   public GenericUDAFEvaluator getGenericUDAFEvaluator() {
-    return genericUDAFEvaluator;
+    if (genericUDAFEvaluator != null) {
+      return genericUDAFEvaluator;
+    }
+    if (genericUDAFWritableEvaluator != null) {
+      return genericUDAFEvaluator = genericUDAFWritableEvaluator;
+    }
+    try {
+      return genericUDAFEvaluator =
+          ReflectionUtils.newInstance(Class.forName(genericUDAFEvaluatorClassName, true,
+          JavaUtils.getClassLoader()).asSubclass(GenericUDAFEvaluator.class), null);
+    } catch (ClassNotFoundException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  public String getGenericUDAFEvaluatorClassName() {
+    return genericUDAFEvaluatorClassName;
+  }
+
+  public void setGenericUDAFEvaluatorClassName(String genericUDAFEvaluatorClassName) {
+    this.genericUDAFEvaluatorClassName = genericUDAFEvaluatorClassName;
+  }
+
+  public GenericUDAFEvaluator getGenericUDAFWritableEvaluator() {
+    return genericUDAFWritableEvaluator;
+  }
+
+  public void setGenericUDAFWritableEvaluator(GenericUDAFEvaluator genericUDAFWritableEvaluator) {
+    this.genericUDAFWritableEvaluator = genericUDAFWritableEvaluator;
   }
 
   public java.util.ArrayList<ExprNodeDesc> getParameters() {

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java Fri Sep 27 17:41:42 2013
@@ -31,7 +31,6 @@ import org.apache.hadoop.hive.common.Jav
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
@@ -53,10 +52,10 @@ public class CreateTableDesc extends DDL
   String databaseName;
   String tableName;
   boolean isExternal;
-  ArrayList<FieldSchema> cols;
-  ArrayList<FieldSchema> partCols;
-  ArrayList<String> bucketCols;
-  ArrayList<Order> sortCols;
+  List<FieldSchema> cols;
+  List<FieldSchema> partCols;
+  List<String> bucketCols;
+  List<Order> sortCols;
   int numBuckets;
   String fieldDelim;
   String fieldEscape;
@@ -130,8 +129,12 @@ public class CreateTableDesc extends DDL
     this.serdeProps = serdeProps;
     this.tblProps = tblProps;
     this.ifNotExists = ifNotExists;
-    this.skewedColNames = new ArrayList<String>(skewedColNames);
-    this.skewedColValues = new ArrayList<List<String>>(skewedColValues);
+    this.skewedColNames = copyList(skewedColNames);
+    this.skewedColValues = copyList(skewedColValues);
+  }
+
+  private static <T> List<T> copyList(List<T> copy) {
+    return copy == null ? null : new ArrayList<T>(copy);
   }
 
   @Explain(displayName = "columns")
@@ -166,7 +169,7 @@ public class CreateTableDesc extends DDL
     this.tableName = tableName;
   }
 
-  public ArrayList<FieldSchema> getCols() {
+  public List<FieldSchema> getCols() {
     return cols;
   }
 
@@ -174,7 +177,7 @@ public class CreateTableDesc extends DDL
     this.cols = cols;
   }
 
-  public ArrayList<FieldSchema> getPartCols() {
+  public List<FieldSchema> getPartCols() {
     return partCols;
   }
 
@@ -183,7 +186,7 @@ public class CreateTableDesc extends DDL
   }
 
   @Explain(displayName = "bucket columns")
-  public ArrayList<String> getBucketCols() {
+  public List<String> getBucketCols() {
     return bucketCols;
   }
 
@@ -303,7 +306,7 @@ public class CreateTableDesc extends DDL
    * @return the sortCols
    */
   @Explain(displayName = "sort columns")
-  public ArrayList<Order> getSortCols() {
+  public List<Order> getSortCols() {
     return sortCols;
   }
 

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java Fri Sep 27 17:41:42 2013
@@ -26,9 +26,9 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.PTFUtils;
+import org.apache.hadoop.hive.ql.parse.LeadLagInfo;
 import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order;
 import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputType;
-import org.apache.hadoop.hive.ql.parse.PTFTranslator.LeadLagInfo;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.parse.TypeCheckCtx;
 import org.apache.hadoop.hive.ql.parse.WindowingSpec.Direction;

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java Fri Sep 27 17:41:42 2013
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.PTFPartition;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.PTFTranslator.LeadLagInfo;
+import org.apache.hadoop.hive.ql.parse.LeadLagInfo;
 import org.apache.hadoop.hive.ql.parse.WindowingExprNodeEvaluatorFactory;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.BoundaryDef;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.PTFExpressionDef;
@@ -47,8 +47,6 @@ import org.apache.hadoop.hive.ql.udf.ptf
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver;
 import org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver;
 import org.apache.hadoop.hive.serde2.SerDe;
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -259,13 +257,13 @@ public class PTFDeserializer {
       serDeProps.setProperty(serdeName, serdePropsMap.get(serdeName));
     }
     try {
-      SerDe serDe = (SerDe) SerDeUtils.lookupDeserializer(serdeClassName);
+      SerDe serDe =  ReflectionUtils.newInstance(hConf.getClassByName(serdeClassName).
+          asSubclass(SerDe.class), hConf);
       serDe.initialize(hConf, serDeProps);
       shp.setSerde(serDe);
       StructObjectInspector outOI = PTFPartition.setupPartitionOutputOI(serDe, OI);
       shp.setOI(outOI);
-    } catch (SerDeException se)
-    {
+    } catch (Exception se) {
       throw new HiveException(se);
     }
   }

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java Fri Sep 27 17:41:42 2013
@@ -20,16 +20,20 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 import java.util.Enumeration;
+import java.util.LinkedHashMap;
 import java.util.Properties;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
+import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.mapred.InputFormat;
 
 /**
@@ -40,12 +44,10 @@ import org.apache.hadoop.mapred.InputFor
 public class PartitionDesc implements Serializable, Cloneable {
   private static final long serialVersionUID = 2L;
   private TableDesc tableDesc;
-  private java.util.LinkedHashMap<String, String> partSpec;
-  private java.lang.Class<? extends org.apache.hadoop.hive.serde2.Deserializer> deserializerClass;
+  private LinkedHashMap<String, String> partSpec;
   private Class<? extends InputFormat> inputFileFormatClass;
   private Class<? extends HiveOutputFormat> outputFileFormatClass;
-  private java.util.Properties properties;
-  private String serdeClassName;
+  private Properties properties;
 
   private String baseFileName;
 
@@ -56,65 +58,25 @@ public class PartitionDesc implements Se
   public PartitionDesc() {
   }
 
-  public PartitionDesc(final TableDesc table,
-      final java.util.LinkedHashMap<String, String> partSpec) {
-    this(table, partSpec, null, null, null, null, null);
-  }
-
-  public PartitionDesc(final TableDesc table,
-      final java.util.LinkedHashMap<String, String> partSpec,
-      final Class<? extends Deserializer> serdeClass,
-      final Class<? extends InputFormat> inputFileFormatClass,
-      final Class<?> outputFormat, final java.util.Properties properties,
-      final String serdeClassName) {
+  public PartitionDesc(final TableDesc table, final LinkedHashMap<String, String> partSpec) {
     this.tableDesc = table;
-    this.properties = properties;
     this.partSpec = partSpec;
-    deserializerClass = serdeClass;
-    this.inputFileFormatClass = inputFileFormatClass;
-    if (outputFormat != null) {
-      outputFileFormatClass = HiveFileFormatUtils
-          .getOutputFormatSubstitute(outputFormat,false);
-    }
-    if (serdeClassName != null) {
-      this.serdeClassName = serdeClassName;
-    } else if (properties != null) {
-      this.serdeClassName = properties
-          .getProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB);
-    }
   }
 
-  public PartitionDesc(final org.apache.hadoop.hive.ql.metadata.Partition part)
-      throws HiveException {
+  public PartitionDesc(final Partition part) throws HiveException {
     tableDesc = Utilities.getTableDesc(part.getTable());
     properties = part.getMetadataFromPartitionSchema();
     partSpec = part.getSpec();
-    deserializerClass = part.getDeserializer(properties).getClass();
     inputFileFormatClass = part.getInputFormatClass();
     outputFileFormatClass = part.getOutputFormatClass();
-    serdeClassName = properties
-        .getProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB);
-    ;
   }
 
-  public PartitionDesc(final org.apache.hadoop.hive.ql.metadata.Partition part,
-      final TableDesc tblDesc) throws HiveException {
+  public PartitionDesc(final Partition part,final TableDesc tblDesc) throws HiveException {
     tableDesc = tblDesc;
     properties = part.getSchemaFromTableSchema(tblDesc.getProperties()); // each partition maintains a large properties
     partSpec = part.getSpec();
-    // deserializerClass = part.getDeserializer(properties).getClass();
-    Deserializer deserializer;
-    try {
-      deserializer = SerDeUtils.lookupDeserializer(
-          properties.getProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB));
-    } catch (SerDeException e) {
-      throw new HiveException(e);
-    }
-    deserializerClass = deserializer.getClass();
     inputFileFormatClass = part.getInputFormatClass();
     outputFileFormatClass = part.getOutputFormatClass();
-    serdeClassName = properties.getProperty(
-        org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB);
   }
 
   @Explain(displayName = "")
@@ -127,27 +89,15 @@ public class PartitionDesc implements Se
   }
 
   @Explain(displayName = "partition values")
-  public java.util.LinkedHashMap<String, String> getPartSpec() {
+  public LinkedHashMap<String, String> getPartSpec() {
     return partSpec;
   }
 
-  public void setPartSpec(final java.util.LinkedHashMap<String, String> partSpec) {
+  public void setPartSpec(final LinkedHashMap<String, String> partSpec) {
     this.partSpec = partSpec;
   }
 
-  public java.lang.Class<? extends org.apache.hadoop.hive.serde2.Deserializer> getDeserializerClass() {
-    if (deserializerClass == null && tableDesc != null) {
-      setDeserializerClass(tableDesc.getDeserializerClass());
-    }
-    return deserializerClass;
-  }
-
-  public void setDeserializerClass(
-      final java.lang.Class<? extends org.apache.hadoop.hive.serde2.Deserializer> serdeClass) {
-    deserializerClass = serdeClass;
-  }
-
-  public Class<? extends InputFormat> getInputFileFormatClass() {
+    public Class<? extends InputFormat> getInputFileFormatClass() {
     if (inputFileFormatClass == null && tableDesc != null) {
       setInputFileFormatClass(tableDesc.getInputFileFormatClass());
     }
@@ -157,10 +107,12 @@ public class PartitionDesc implements Se
   /**
    * Return a deserializer object corresponding to the tableDesc.
    */
-  public Deserializer getDeserializer() throws Exception {
-    Deserializer de = deserializerClass.newInstance();
-    de.initialize(null, properties);
-    return de;
+  public Deserializer getDeserializer() {
+    try {
+      return MetaStoreUtils.getDeserializer(Hive.get().getConf(), getProperties());
+    } catch (Exception e) {
+      return null;
+    }
   }
 
   public void setInputFileFormatClass(
@@ -181,14 +133,14 @@ public class PartitionDesc implements Se
   }
 
   @Explain(displayName = "properties", normalExplain = false)
-  public java.util.Properties getProperties() {
+  public Properties getProperties() {
     if (properties == null && tableDesc != null) {
       return tableDesc.getProperties();
     }
     return properties;
   }
 
-  public java.util.Properties getOverlayedProperties(){
+  public Properties getOverlayedProperties(){
     if (tableDesc != null) {
       Properties overlayedProps = new Properties(tableDesc.getProperties());
       overlayedProps.putAll(getProperties());
@@ -198,7 +150,7 @@ public class PartitionDesc implements Se
     }
   }
 
-  public void setProperties(final java.util.Properties properties) {
+  public void setProperties(final Properties properties) {
     this.properties = properties;
   }
 
@@ -207,24 +159,12 @@ public class PartitionDesc implements Se
    */
   @Explain(displayName = "serde")
   public String getSerdeClassName() {
-    if (serdeClassName == null && tableDesc != null) {
-      setSerdeClassName(tableDesc.getSerdeClassName());
-    }
-    return serdeClassName;
-  }
-
-  /**
-   * @param serdeClassName
-   *          the serde Class Name to set
-   */
-  public void setSerdeClassName(String serdeClassName) {
-    this.serdeClassName = serdeClassName;
+    return getProperties().getProperty(serdeConstants.SERIALIZATION_LIB);
   }
 
   @Explain(displayName = "name")
   public String getTableName() {
-    return getProperties().getProperty(
-        org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME);
+    return getProperties().getProperty(hive_metastoreConstants.META_TABLE_NAME);
   }
 
   @Explain(displayName = "input format")
@@ -250,8 +190,6 @@ public class PartitionDesc implements Se
   public PartitionDesc clone() {
     PartitionDesc ret = new PartitionDesc();
 
-    ret.setSerdeClassName(serdeClassName);
-    ret.setDeserializerClass(deserializerClass);
     ret.inputFileFormatClass = inputFileFormatClass;
     ret.outputFileFormatClass = outputFileFormatClass;
     if (properties != null) {

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Fri Sep 27 17:41:42 2013
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -132,12 +131,9 @@ public final class PlanUtils {
             serdeConstants.ESCAPE_CHAR, localDirectoryDesc.getFieldEscape());
       }
       if (localDirectoryDesc.getSerName() != null) {
-        tableDesc.setSerdeClassName(localDirectoryDesc.getSerName());
         tableDesc.getProperties().setProperty(
             serdeConstants.SERIALIZATION_LIB, localDirectoryDesc.getSerName());
-        tableDesc.setDeserializerClass(
-            (Class<? extends Deserializer>) Class.forName(localDirectoryDesc.getSerName()));
-      }
+        }
       if (localDirectoryDesc.getOutputFormat() != null){
           tableDesc.setOutputFileFormatClass(Class.forName(localDirectoryDesc.getOutputFormat()));
       }
@@ -263,7 +259,8 @@ public final class PlanUtils {
       inputFormat = TextInputFormat.class;
       outputFormat = IgnoreKeyTextOutputFormat.class;
     }
-    return new TableDesc(serdeClass, inputFormat, outputFormat, properties);
+    properties.setProperty(serdeConstants.SERIALIZATION_LIB, serdeClass.getName());
+    return new TableDesc(inputFormat, outputFormat, properties);
   }
 
   public static TableDesc getDefaultQueryOutputTableDesc(String cols, String colTypes,
@@ -274,7 +271,7 @@ public final class PlanUtils {
     tblDesc.getProperties().setProperty(serdeConstants.ESCAPE_CHAR, "\\");
     //enable extended nesting levels
     tblDesc.getProperties().setProperty(
-        LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS, "true");    
+        LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS, "true");
     return tblDesc;
   }
 
@@ -354,11 +351,11 @@ public final class PlanUtils {
    * "array<string>".
    */
   public static TableDesc getDefaultTableDesc(String separatorCode) {
-    return new TableDesc(MetadataTypedColumnsetSerDe.class,
+    return new TableDesc(
         TextInputFormat.class, IgnoreKeyTextOutputFormat.class, Utilities
         .makeProperties(
-            org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT,
-            separatorCode));
+            org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT,separatorCode,
+            serdeConstants.SERIALIZATION_LIB,MetadataTypedColumnsetSerDe.class.getName()));
   }
 
   /**
@@ -366,25 +363,27 @@ public final class PlanUtils {
    */
   public static TableDesc getReduceKeyTableDesc(List<FieldSchema> fieldSchemas,
       String order) {
-    return new TableDesc(BinarySortableSerDe.class,
+    return new TableDesc(
         SequenceFileInputFormat.class, SequenceFileOutputFormat.class,
         Utilities.makeProperties(serdeConstants.LIST_COLUMNS, MetaStoreUtils
         .getColumnNamesFromFieldSchema(fieldSchemas),
         serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
-        serdeConstants.SERIALIZATION_SORT_ORDER, order));
+        serdeConstants.SERIALIZATION_SORT_ORDER, order,
+        serdeConstants.SERIALIZATION_LIB, BinarySortableSerDe.class.getName()));
   }
 
   /**
    * Generate the table descriptor for Map-side join key.
    */
   public static TableDesc getMapJoinKeyTableDesc(List<FieldSchema> fieldSchemas) {
-    return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
+    return new TableDesc(SequenceFileInputFormat.class,
         SequenceFileOutputFormat.class, Utilities.makeProperties("columns",
         MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
         "columns.types", MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
-        serdeConstants.ESCAPE_CHAR, "\\"));
+        serdeConstants.ESCAPE_CHAR, "\\",
+        serdeConstants.SERIALIZATION_LIB,LazyBinarySerDe.class.getName()));
   }
 
   /**
@@ -392,12 +391,13 @@ public final class PlanUtils {
    */
   public static TableDesc getMapJoinValueTableDesc(
       List<FieldSchema> fieldSchemas) {
-    return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
+    return new TableDesc(SequenceFileInputFormat.class,
         SequenceFileOutputFormat.class, Utilities.makeProperties("columns",
         MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
         "columns.types", MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
-        serdeConstants.ESCAPE_CHAR, "\\"));
+        serdeConstants.ESCAPE_CHAR, "\\",
+        serdeConstants.SERIALIZATION_LIB,LazyBinarySerDe.class.getName()));
   }
 
   /**
@@ -405,26 +405,28 @@ public final class PlanUtils {
    */
   public static TableDesc getIntermediateFileTableDesc(
       List<FieldSchema> fieldSchemas) {
-    return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
+    return new TableDesc(SequenceFileInputFormat.class,
         SequenceFileOutputFormat.class, Utilities.makeProperties(
         serdeConstants.LIST_COLUMNS, MetaStoreUtils
         .getColumnNamesFromFieldSchema(fieldSchemas),
         serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
-        serdeConstants.ESCAPE_CHAR, "\\"));
+        serdeConstants.ESCAPE_CHAR, "\\",
+        serdeConstants.SERIALIZATION_LIB,LazyBinarySerDe.class.getName()));
   }
 
   /**
    * Generate the table descriptor for intermediate files.
    */
   public static TableDesc getReduceValueTableDesc(List<FieldSchema> fieldSchemas) {
-    return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
+    return new TableDesc(SequenceFileInputFormat.class,
         SequenceFileOutputFormat.class, Utilities.makeProperties(
         serdeConstants.LIST_COLUMNS, MetaStoreUtils
         .getColumnNamesFromFieldSchema(fieldSchemas),
         serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
-        serdeConstants.ESCAPE_CHAR, "\\"));
+        serdeConstants.ESCAPE_CHAR, "\\",
+        serdeConstants.SERIALIZATION_LIB,LazyBinarySerDe.class.getName()));
   }
 
   /**

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java Fri Sep 27 17:41:42 2013
@@ -24,9 +24,11 @@ import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Properties;
 
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.mapred.InputFormat;
 
@@ -36,36 +38,35 @@ import org.apache.hadoop.mapred.InputFor
  */
 public class TableDesc implements Serializable, Cloneable {
   private static final long serialVersionUID = 1L;
-  private Class<? extends Deserializer> deserializerClass;
   private Class<? extends InputFormat> inputFileFormatClass;
   private Class<? extends HiveOutputFormat> outputFileFormatClass;
   private java.util.Properties properties;
-  private String serdeClassName;
   private Map<String, String> jobProperties;
 
   public TableDesc() {
   }
 
-  public TableDesc(final Class<? extends Deserializer> serdeClass,
-      final Class<? extends InputFormat> inputFileFormatClass,
-      final Class<?> class1, final java.util.Properties properties) {
-    deserializerClass = serdeClass;
-    this.inputFileFormatClass = inputFileFormatClass;
+  /**
+   * @param inputFormatClass
+   * @param outputFormatClass
+   * @param properties must contain serde class name associate with this table.
+   */
+
+  public TableDesc(
+      final Class<? extends InputFormat> inputFormatClass,
+      final Class<?> outputFormatClass, final Properties properties) {
+    this.inputFileFormatClass = inputFormatClass;
     outputFileFormatClass = HiveFileFormatUtils
-        .getOutputFormatSubstitute(class1, false);
+        .getOutputFormatSubstitute(outputFormatClass, false);
     this.properties = properties;
-    serdeClassName = properties
-        .getProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB);
-    ;
   }
 
   public Class<? extends Deserializer> getDeserializerClass() {
-    return deserializerClass;
-  }
-
-  public void setDeserializerClass(
-      final Class<? extends Deserializer> serdeClass) {
-    deserializerClass = serdeClass;
+    try {
+      return (Class<? extends Deserializer>) Class.forName(getSerdeClassName());
+    } catch (ClassNotFoundException e) {
+      throw new RuntimeException(e);
+    }
   }
 
   public Class<? extends InputFormat> getInputFileFormatClass() {
@@ -76,7 +77,7 @@ public class TableDesc implements Serial
    * Return a deserializer object corresponding to the tableDesc.
    */
   public Deserializer getDeserializer() throws Exception {
-    Deserializer de = deserializerClass.newInstance();
+    Deserializer de = getDeserializerClass().newInstance();
     de.initialize(null, properties);
     return de;
   }
@@ -96,11 +97,11 @@ public class TableDesc implements Serial
   }
 
   @Explain(displayName = "properties", normalExplain = false)
-  public java.util.Properties getProperties() {
+  public Properties getProperties() {
     return properties;
   }
 
-  public void setProperties(final java.util.Properties properties) {
+  public void setProperties(final Properties properties) {
     this.properties = properties;
   }
 
@@ -118,21 +119,13 @@ public class TableDesc implements Serial
    */
   @Explain(displayName = "serde")
   public String getSerdeClassName() {
-    return serdeClassName;
-  }
-
-  /**
-   * @param serdeClassName
-   *          the serde Class Name to set
-   */
-  public void setSerdeClassName(String serdeClassName) {
-    this.serdeClassName = serdeClassName;
+    return properties.getProperty(serdeConstants.SERIALIZATION_LIB);
   }
 
   @Explain(displayName = "name")
   public String getTableName() {
     return properties
-        .getProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME);
+        .getProperty(hive_metastoreConstants.META_TABLE_NAME);
   }
 
   @Explain(displayName = "input format")
@@ -151,16 +144,12 @@ public class TableDesc implements Serial
   }
 
   public boolean isNonNative() {
-    return (properties.getProperty(
-        org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE)
-      != null);
+    return (properties.getProperty(hive_metastoreConstants.META_TABLE_STORAGE) != null);
   }
 
   @Override
   public Object clone() {
     TableDesc ret = new TableDesc();
-    ret.setSerdeClassName(serdeClassName);
-    ret.setDeserializerClass(deserializerClass);
     ret.setInputFileFormatClass(inputFileFormatClass);
     ret.setOutputFileFormatClass(outputFileFormatClass);
     Properties newProp = new Properties();
@@ -181,13 +170,11 @@ public class TableDesc implements Serial
   public int hashCode() {
     final int prime = 31;
     int result = 1;
-    result = prime * result + ((deserializerClass == null) ? 0 : deserializerClass.hashCode());
     result = prime * result +
         ((inputFileFormatClass == null) ? 0 : inputFileFormatClass.hashCode());
     result = prime * result +
         ((outputFileFormatClass == null) ? 0 : outputFileFormatClass.hashCode());
     result = prime * result + ((properties == null) ? 0 : properties.hashCode());
-    result = prime * result + ((serdeClassName == null) ? 0 : serdeClassName.hashCode());
     result = prime * result + ((jobProperties == null) ? 0 : jobProperties.hashCode());
     return result;
   }
@@ -200,16 +187,12 @@ public class TableDesc implements Serial
 
     TableDesc target = (TableDesc) o;
     boolean ret = true;
-    ret = ret && (deserializerClass == null ? target.deserializerClass == null :
-      deserializerClass.equals(target.deserializerClass));
     ret = ret && (inputFileFormatClass == null ? target.inputFileFormatClass == null :
       inputFileFormatClass.equals(target.inputFileFormatClass));
     ret = ret && (outputFileFormatClass == null ? target.outputFileFormatClass == null :
       outputFileFormatClass.equals(target.outputFileFormatClass));
     ret = ret && (properties == null ? target.properties == null :
       properties.equals(target.properties));
-    ret = ret && (serdeClassName == null ? target.serdeClassName == null :
-      serdeClassName.equals(target.serdeClassName));
     ret = ret && (jobProperties == null ? target.jobProperties == null :
       jobProperties.equals(target.jobProperties));
     return ret;

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Fri Sep 27 17:41:42 2013
@@ -203,7 +203,7 @@ public class SessionState {
   }
 
   private static final SimpleDateFormat DATE_FORMAT =
-    new SimpleDateFormat("yyyyMMddHHmm");
+      new SimpleDateFormat("yyyyMMddHHmm");
 
   public void setCmd(String cmdString) {
     conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, cmdString);
@@ -448,7 +448,7 @@ public class SessionState {
     } catch (IOException e) {
       console.printError("Unable to validate " + newFile + "\nException: "
           + e.getMessage(), "\n"
-          + org.apache.hadoop.util.StringUtils.stringifyException(e));
+              + org.apache.hadoop.util.StringUtils.stringifyException(e));
       return null;
     }
   }
@@ -465,7 +465,7 @@ public class SessionState {
     } catch (Exception e) {
       console.printError("Unable to register " + newJar + "\nException: "
           + e.getMessage(), "\n"
-          + org.apache.hadoop.util.StringUtils.stringifyException(e));
+              + org.apache.hadoop.util.StringUtils.stringifyException(e));
       return false;
     }
   }
@@ -479,7 +479,7 @@ public class SessionState {
     } catch (Exception e) {
       console.printError("Unable to unregister " + jarsToUnregister
           + "\nException: " + e.getMessage(), "\n"
-          + org.apache.hadoop.util.StringUtils.stringifyException(e));
+              + org.apache.hadoop.util.StringUtils.stringifyException(e));
       return false;
     }
   }
@@ -565,7 +565,7 @@ public class SessionState {
   }
 
   private final HashMap<ResourceType, Set<String>> resource_map =
-    new HashMap<ResourceType, Set<String>>();
+      new HashMap<ResourceType, Set<String>>();
 
   public String add_resource(ResourceType t, String value) {
     // By default don't convert to unix
@@ -783,7 +783,7 @@ public class SessionState {
 
   public void close() throws IOException {
     File resourceDir =
-      new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR));
+        new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR));
     LOG.debug("Removing resource dir " + resourceDir);
     try {
       if (resourceDir.exists()) {

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCollectList.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCollectList.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCollectList.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCollectList.java Fri Sep 27 17:41:42 2013
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.commons.logging.Log;

Modified: hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMkCollectionEvaluator.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMkCollectionEvaluator.java?rev=1526996&r1=1526995&r2=1526996&view=diff
==============================================================================
--- hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMkCollectionEvaluator.java (original)
+++ hive/branches/maven/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMkCollectionEvaluator.java Fri Sep 27 17:41:42 2013
@@ -1,5 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashSet;
@@ -12,14 +31,15 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector;
 
-import com.esotericsoftware.minlog.Log;
+public class GenericUDAFMkCollectionEvaluator extends GenericUDAFEvaluator
+    implements Serializable {
 
-public class GenericUDAFMkCollectionEvaluator extends GenericUDAFEvaluator {
+  private static final long serialVersionUID = 1l;
 
   enum BufferType { SET, LIST }
 
   // For PARTIAL1 and COMPLETE: ObjectInspectors for original data
-  private PrimitiveObjectInspector inputOI;
+  private transient PrimitiveObjectInspector inputOI;
   // For PARTIAL2 and FINAL: ObjectInspectors for partial aggregations (list
   // of objs)
   private transient StandardListObjectInspector loi;
@@ -29,8 +49,7 @@ public class GenericUDAFMkCollectionEval
   private BufferType bufferType;
 
   //needed by kyro
-  public GenericUDAFMkCollectionEvaluator(){
-
+  public GenericUDAFMkCollectionEvaluator() {
   }
 
   public GenericUDAFMkCollectionEvaluator(BufferType bufferType){