You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pr...@apache.org on 2015/12/11 22:49:59 UTC
hive git commit: HIVE-12609: Remove javaXML serialization (Prasanth
Jayachandran reviewed by Ashutosh Chauhan)
Repository: hive
Updated Branches:
refs/heads/master 452c67f68 -> 3e3d966f3
HIVE-12609: Remove javaXML serialization (Prasanth Jayachandran reviewed by Ashutosh Chauhan)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3e3d966f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3e3d966f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3e3d966f
Branch: refs/heads/master
Commit: 3e3d966f39f573247aad6e5694e3098ceb0101fc
Parents: 452c67f
Author: Prasanth Jayachandran <j....@gmail.com>
Authored: Fri Dec 11 15:49:32 2015 -0600
Committer: Prasanth Jayachandran <j....@gmail.com>
Committed: Fri Dec 11 15:49:32 2015 -0600
----------------------------------------------------------------------
.../org/apache/hadoop/hive/conf/HiveConf.java | 45 ++-
.../org/apache/hadoop/hive/ql/QTestUtil.java | 147 +--------
.../apache/hadoop/hive/ql/exec/PTFUtils.java | 180 -----------
.../hadoop/hive/ql/exec/ReduceSinkOperator.java | 4 -
.../hive/ql/exec/SerializationUtilities.java | 300 ++-----------------
.../org/apache/hadoop/hive/ql/exec/Task.java | 4 -
.../apache/hadoop/hive/ql/exec/Utilities.java | 18 +-
.../hadoop/hive/ql/exec/mr/ExecDriver.java | 5 +-
.../hadoop/hive/ql/exec/mr/MapRedTask.java | 2 +-
.../hadoop/hive/ql/exec/mr/MapredLocalTask.java | 2 +-
.../physical/GenSparkSkewJoinProcessor.java | 2 +-
.../hadoop/hive/ql/parse/GenTezUtils.java | 6 +-
.../hadoop/hive/ql/parse/TezCompiler.java | 2 +-
.../hive/ql/parse/spark/GenSparkUtils.java | 6 +-
.../hive/ql/parse/spark/SparkCompiler.java | 2 +-
.../hive/ql/parse/spark/SplitOpTreeForDPP.java | 3 +-
.../hive/ql/plan/AbstractOperatorDesc.java | 4 -
.../hadoop/hive/ql/plan/AggregationDesc.java | 4 -
.../hadoop/hive/ql/plan/LoadFileDesc.java | 3 -
.../hadoop/hive/ql/plan/LoadMultiFilesDesc.java | 3 -
.../org/apache/hadoop/hive/ql/plan/PTFDesc.java | 22 +-
.../hadoop/hive/ql/plan/TableScanDesc.java | 4 -
.../hive/ql/plan/ptf/PTFExpressionDef.java | 4 -
.../hadoop/hive/ql/plan/ptf/ShapeDetails.java | 4 -
.../hive/ql/udf/generic/GenericUDFLeadLag.java | 5 -
.../hive/ql/udf/ptf/TableFunctionEvaluator.java | 4 -
.../apache/hadoop/hive/ql/exec/TestPlan.java | 4 +-
.../hive/ql/io/orc/TestInputOutputFormat.java | 2 +-
.../clientpositive/cast_qualified_types.q | 2 -
29 files changed, 85 insertions(+), 708 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index ce6ad6b..182902e 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -18,7 +18,26 @@
package org.apache.hadoop.hive.conf;
-import com.google.common.base.Joiner;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.security.auth.login.LoginException;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
@@ -38,26 +57,7 @@ import org.apache.hive.common.HiveCompat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.security.auth.login.LoginException;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.PrintStream;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import com.google.common.base.Joiner;
/**
* Hive Configuration.
@@ -252,9 +252,6 @@ public class HiveConf extends Configuration {
// QL execution stuff
SCRIPTWRAPPER("hive.exec.script.wrapper", null, ""),
PLAN("hive.exec.plan", "", ""),
- PLAN_SERIALIZATION("hive.plan.serialization.format", "kryo",
- "Query plan format serialization between client and task nodes. \n" +
- "Two supported values are : kryo and javaXML. Kryo is default."),
STAGINGDIR("hive.exec.stagingdir", ".hive-staging",
"Directory name that will be created inside table locations in order to support HDFS encryption. " +
"This is replaces ${hive.exec.scratchdir} for query results with the exception of read-only tables. " +
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 5b2c8c2..f805087 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -56,11 +56,8 @@ import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -109,6 +106,8 @@ import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooKeeper;
import org.junit.Assert;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableList;
@@ -1328,49 +1327,6 @@ public class QTestUtil {
}
}
- private final Pattern[] xmlPlanMask = toPattern(new String[] {
- "<java version=\".*\" class=\"java.beans.XMLDecoder\">",
- "<string>.*/tmp/.*</string>",
- "<string>file:.*</string>",
- "<string>pfile:.*</string>",
- "<string>[0-9]{10}</string>",
- "<string>/.*/warehouse/.*</string>"
- });
-
- public int checkPlan(String tname, List<Task<? extends Serializable>> tasks) throws Exception {
-
- if (tasks == null) {
- throw new Exception("Plan is null");
- }
- File planDir = new File(outDir, "plan");
- String planFile = outPath(planDir.toString(), tname + ".xml");
-
- File outf = null;
- outf = new File(logDir);
- outf = new File(outf, tname.concat(".xml"));
-
- FileOutputStream ofs = new FileOutputStream(outf);
- try {
- conf.set(HiveConf.ConfVars.PLAN_SERIALIZATION.varname, "javaXML");
- for (Task<? extends Serializable> plan : tasks) {
- SerializationUtilities.serializePlan(plan, ofs, conf);
- }
- ofs.close();
- fixXml4JDK7(outf.getPath());
- maskPatterns(xmlPlanMask, outf.getPath());
-
- int exitVal = executeDiffCommand(outf.getPath(), planFile, true, false);
-
- if (exitVal != 0 && overWrite) {
- exitVal = overwriteResults(outf.getPath(), planFile);
- }
- return exitVal;
- } finally {
- conf.set(HiveConf.ConfVars.PLAN_SERIALIZATION.varname, "kryo");
- IOUtils.closeQuietly(ofs);
- }
- }
-
/**
* Given the current configurations (e.g., hadoop version and execution mode), return
* the correct file name to compare with the current test run output.
@@ -1406,103 +1362,6 @@ public class QTestUtil {
return ret;
}
- /**
- * Fix the XML generated by JDK7 which is slightly different from what's generated by JDK6,
- * causing 40+ test failures. There are mainly two problems:
- *
- * 1. object element's properties, id and class, are in reverse order, i.e.
- * <object class="org.apache.hadoop.hive.ql.exec.MapRedTask" id="MapRedTask0">
- * which needs to be fixed to
- * <object id="MapRedTask0" class="org.apache.hadoop.hive.ql.exec.MapRedTask">
- * 2. JDK introduces Enum as class, i.e.
- * <object id="GenericUDAFEvaluator$Mode0" class="java.lang.Enum">
- * <class>org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator$Mode</class>
- * which needs to be fixed to
- * <object id="GenericUDAFEvaluator$Mode0" class="org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator$Mode"
- * method="valueOf">
- *
- * Though not elegant, this allows these test cases to pass until we have a better serialization mechanism.
- *
- * Did I mention this is test code?
- *
- * @param fname the name of the file to fix
- * @throws Exception in case of IO error
- */
- private static void fixXml4JDK7(String fname) throws Exception {
- String version = System.getProperty("java.version");
- if (!version.startsWith("1.7")) {
- return;
- }
-
- BufferedReader in = new BufferedReader(new FileReader(fname));
- BufferedWriter out = new BufferedWriter(new FileWriter(fname + ".orig"));
- String line = null;
- while (null != (line = in.readLine())) {
- out.write(line);
- out.write('\n');
- }
- in.close();
- out.close();
-
- in = new BufferedReader(new FileReader(fname + ".orig"));
- out = new BufferedWriter(new FileWriter(fname));
-
- while (null != (line = in.readLine())) {
- if (line.indexOf("<object ") == -1 || line.indexOf("class=") == -1) {
- out.write(line);
- } else {
- StringBuilder sb = new StringBuilder();
- String prefix = line.substring(0, line.indexOf("<object") + 7);
- sb.append( prefix );
- String postfix = line.substring(line.lastIndexOf('"') + 1);
- String id = getPropertyValue(line, "id");
- if (id != null) {
- sb.append(" id=" + id);
- }
- String cls = getPropertyValue(line, "class");
- assert(cls != null);
- if (cls.equals("\"java.lang.Enum\"")) {
- line = in.readLine();
- cls = "\"" + getElementValue(line, "class") + "\"";
- sb.append(" class=" + cls + " method=\"valueOf\"" );
- } else {
- sb.append(" class=" + cls);
- }
-
- sb.append(postfix);
- out.write(sb.toString());
- }
-
- out.write('\n');
- }
-
- in.close();
- out.close();
- }
-
- /**
- * Get the value of a property in line. The returned value has original quotes
- */
- private static String getPropertyValue(String line, String name) {
- int start = line.indexOf( name + "=" );
- if (start == -1) {
- return null;
- }
- start += name.length() + 1;
- int end = line.indexOf("\"", start + 1);
- return line.substring( start, end + 1 );
- }
-
- /**
- * Get the value of the element in input. (Note: the returned value has no quotes.)
- */
- private static String getElementValue(String line, String name) {
- assert(line.contains("<" + name + ">"));
- int start = line.indexOf("<" + name + ">") + name.length() + 2;
- int end = line.indexOf("</" + name + ">");
- return line.substring(start, end);
- }
-
private Pattern[] toPattern(String[] patternStrs) {
Pattern[] patterns = new Pattern[patternStrs.length];
for (int i = 0; i < patternStrs.length; i++) {
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFUtils.java
index 721fbaa..7e5f950 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFUtils.java
@@ -18,34 +18,11 @@
package org.apache.hadoop.hive.ql.exec;
-import java.beans.BeanInfo;
-import java.beans.Encoder;
-import java.beans.ExceptionListener;
-import java.beans.Expression;
-import java.beans.IntrospectionException;
-import java.beans.Introspector;
-import java.beans.PersistenceDelegate;
-import java.beans.PropertyDescriptor;
-import java.beans.Statement;
-import java.beans.XMLDecoder;
-import java.beans.XMLEncoder;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Stack;
-import org.antlr.runtime.CommonToken;
-import org.antlr.runtime.tree.BaseTree;
-import org.antlr.runtime.tree.CommonTree;
-import org.apache.hadoop.hive.ql.exec.SerializationUtilities.EnumDelegate;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.WindowingSpec.Direction;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-
public class PTFUtils {
public static String toString(List<?> col)
@@ -127,162 +104,5 @@ public class PTFUtils {
public static abstract class Predicate<T>
{
public abstract boolean apply(T obj);
- };
-
-
-
- /*
- * serialization functions
- */
- public static void serialize(OutputStream out, Object o)
- {
- XMLEncoder e = new XMLEncoder(out);
- e.setExceptionListener(new EL());
- PTFUtils.addPersistenceDelegates(e);
- e.writeObject(o);
- e.close();
- }
-
- public static Object deserialize(InputStream in1)
- {
- XMLDecoder d = null;
- try
- {
- d = new XMLDecoder(in1, null, null);
- return d.readObject();
- }
- finally
- {
- if (null != d)
- {
- d.close();
- }
- }
- }
-
- public static void addPersistenceDelegates(XMLEncoder e)
- {
- addAntlrPersistenceDelegates(e);
- addHivePersistenceDelegates(e);
- addEnumDelegates(e);
- }
-
- public static void addEnumDelegates(XMLEncoder e)
- {
- e.setPersistenceDelegate(Direction.class, new EnumDelegate());
- }
-
- public static void addAntlrPersistenceDelegates(XMLEncoder e)
- {
- e.setPersistenceDelegate(ASTNode.class, new PersistenceDelegate()
- {
-
- @Override
- protected Expression instantiate(Object oldInstance, Encoder out)
- {
- return new Expression(oldInstance, oldInstance.getClass(),
- "new", new Object[]
- { ((ASTNode) oldInstance).getToken() });
- }
- });
- e.setPersistenceDelegate(CommonTree.class, new PersistenceDelegate()
- {
- @Override
- protected Expression instantiate(Object oldInstance, Encoder out)
- {
- return new Expression(oldInstance, oldInstance.getClass(),
- "new", new Object[]
- { ((CommonTree) oldInstance).getToken() });
- }
- });
- e.setPersistenceDelegate(BaseTree.class, new PersistenceDelegate()
- {
- @Override
- protected Expression instantiate(Object oldInstance, Encoder out)
- {
- return new Expression(oldInstance, oldInstance.getClass(),
- "new", new Object[]
- {});
- }
-
- @Override
- @SuppressWarnings("rawtypes")
- protected void initialize(Class type, Object oldInstance,
- Object newInstance, Encoder out)
- {
- super.initialize(type, oldInstance, newInstance, out);
-
- BaseTree t = (BaseTree) oldInstance;
-
- for (int i = 0; i < t.getChildCount(); i++)
- {
- out.writeStatement(new Statement(oldInstance, "addChild",
- new Object[]
- { t.getChild(i) }));
- }
- }
- });
- e.setPersistenceDelegate(CommonToken.class, new PersistenceDelegate()
- {
- @Override
- protected Expression instantiate(Object oldInstance, Encoder out)
- {
- return new Expression(oldInstance, oldInstance.getClass(),
- "new", new Object[]
- { ((CommonToken) oldInstance).getType(),
- ((CommonToken) oldInstance).getText() });
- }
- });
- }
-
- public static void addHivePersistenceDelegates(XMLEncoder e)
- {
- e.setPersistenceDelegate(PrimitiveTypeInfo.class,
- new PersistenceDelegate()
- {
- @Override
- protected Expression instantiate(Object oldInstance,
- Encoder out)
- {
- return new Expression(oldInstance,
- TypeInfoFactory.class, "getPrimitiveTypeInfo",
- new Object[]
- { ((PrimitiveTypeInfo) oldInstance)
- .getTypeName() });
- }
- });
- }
-
- static class EL implements ExceptionListener
- {
- public void exceptionThrown(Exception e)
- {
- e.printStackTrace();
- throw new RuntimeException("Cannot serialize the query plan", e);
- }
- }
-
- public static void makeTransient(Class<?> beanClass, String... pdNames) {
- try {
- BeanInfo info = Introspector.getBeanInfo(beanClass);
- PropertyDescriptor[] descs = info.getPropertyDescriptors();
- if (descs == null) {
- throw new RuntimeException("Cannot access property descriptor for class " + beanClass);
- }
- Map<String, PropertyDescriptor> mapping = new HashMap<String, PropertyDescriptor>();
- for (PropertyDescriptor desc : descs) {
- mapping.put(desc.getName(), desc);
- }
- for (String pdName : pdNames) {
- PropertyDescriptor desc = mapping.get(pdName);
- if (desc == null) {
- throw new RuntimeException("Property " + pdName + " does not exist in " + beanClass);
- }
- desc.setValue("transient", Boolean.TRUE);
- }
- }
- catch (IntrospectionException ie) {
- throw new RuntimeException(ie);
- }
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
index ef5ee95..4b65952 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
@@ -64,10 +64,6 @@ import org.apache.hadoop.util.hash.MurmurHash;
public class ReduceSinkOperator extends TerminalOperator<ReduceSinkDesc>
implements Serializable, TopNHash.BinaryCollector {
- static {
- PTFUtils.makeTransient(ReduceSinkOperator.class, "inputAliases", "valueIndex");
- }
-
/**
* Counters.
*/
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
index d5e946e..803f492 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
@@ -17,14 +17,6 @@
*/
package org.apache.hadoop.hive.ql.exec;
-import java.beans.DefaultPersistenceDelegate;
-import java.beans.Encoder;
-import java.beans.ExceptionListener;
-import java.beans.Expression;
-import java.beans.PersistenceDelegate;
-import java.beans.Statement;
-import java.beans.XMLDecoder;
-import java.beans.XMLEncoder;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
@@ -35,32 +27,20 @@ import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.net.URI;
import java.sql.Timestamp;
-import java.util.ArrayList;
import java.util.Arrays;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import org.antlr.runtime.CommonToken;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.tuple.Pair;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.plan.AbstractOperatorDesc;
import org.apache.hadoop.hive.ql.plan.BaseWork;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.plan.GroupByDesc;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
-import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.ReduceWork;
import org.apache.hadoop.hive.ql.plan.SparkEdgeProperty;
import org.apache.hadoop.hive.ql.plan.SparkWork;
@@ -289,46 +269,37 @@ public class SerializationUtilities {
}
}
-
/**
* Serializes the plan.
*
* @param plan The plan, such as QueryPlan, MapredWork, etc.
* @param out The stream to write to.
- * @param conf to pick which serialization format is desired.
*/
- public static void serializePlan(Object plan, OutputStream out, Configuration conf) {
- serializePlan(plan, out, conf, false);
+ public static void serializePlan(Object plan, OutputStream out) {
+ serializePlan(plan, out, false);
}
- public static void serializePlan(Kryo kryo, Object plan, OutputStream out, Configuration conf) {
- serializePlan(kryo, plan, out, conf, false);
+ public static void serializePlan(Kryo kryo, Object plan, OutputStream out) {
+ serializePlan(kryo, plan, out, false);
}
- private static void serializePlan(Object plan, OutputStream out, Configuration conf,
- boolean cloningPlan) {
+ private static void serializePlan(Object plan, OutputStream out, boolean cloningPlan) {
Kryo kryo = borrowKryo();
try {
- serializePlan(kryo, plan, out, conf, cloningPlan);
+ serializePlan(kryo, plan, out, cloningPlan);
} finally {
releaseKryo(kryo);
}
}
- private static void serializePlan(Kryo kryo, Object plan, OutputStream out, Configuration conf,
- boolean cloningPlan) {
+ private static void serializePlan(Kryo kryo, Object plan, OutputStream out, boolean cloningPlan) {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.SERIALIZE_PLAN);
- String serializationType = conf.get(HiveConf.ConfVars.PLAN_SERIALIZATION.varname, "kryo");
- LOG.info("Serializing " + plan.getClass().getSimpleName() + " via " + serializationType);
- if ("javaXML".equalsIgnoreCase(serializationType)) {
- serializeObjectByJavaXML(plan, out);
+ LOG.info("Serializing " + plan.getClass().getSimpleName() + " using kryo");
+ if (cloningPlan) {
+ serializeObjectByKryo(kryo, plan, out);
} else {
- if (cloningPlan) {
- serializeObjectByKryo(kryo, plan, out);
- } else {
- serializeObjectByKryo(kryo, plan, out);
- }
+ serializeObjectByKryo(kryo, plan, out);
}
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.SERIALIZE_PLAN);
}
@@ -338,24 +309,21 @@ public class SerializationUtilities {
*
* @param in The stream to read from.
* @param planClass class of plan
- * @param conf configuration
* @return The plan, such as QueryPlan, MapredWork, etc.
*/
- public static <T> T deserializePlan(InputStream in, Class<T> planClass, Configuration conf) {
- return deserializePlan(in, planClass, conf, false);
+ public static <T> T deserializePlan(InputStream in, Class<T> planClass) {
+ return deserializePlan(in, planClass, false);
}
- public static <T> T deserializePlan(Kryo kryo, InputStream in, Class<T> planClass,
- Configuration conf) {
- return deserializePlan(kryo, in, planClass, conf, false);
+ public static <T> T deserializePlan(Kryo kryo, InputStream in, Class<T> planClass) {
+ return deserializePlan(kryo, in, planClass, false);
}
- private static <T> T deserializePlan(InputStream in, Class<T> planClass, Configuration conf,
- boolean cloningPlan) {
+ private static <T> T deserializePlan(InputStream in, Class<T> planClass, boolean cloningPlan) {
Kryo kryo = borrowKryo();
T result = null;
try {
- result = deserializePlan(kryo, in, planClass, conf, cloningPlan);
+ result = deserializePlan(kryo, in, planClass, cloningPlan);
} finally {
releaseKryo(kryo);
}
@@ -363,20 +331,15 @@ public class SerializationUtilities {
}
private static <T> T deserializePlan(Kryo kryo, InputStream in, Class<T> planClass,
- Configuration conf, boolean cloningPlan) {
+ boolean cloningPlan) {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DESERIALIZE_PLAN);
T plan;
- String serializationType = conf.get(HiveConf.ConfVars.PLAN_SERIALIZATION.varname, "kryo");
- LOG.info("Deserializing " + planClass.getSimpleName() + " via " + serializationType);
- if ("javaXML".equalsIgnoreCase(serializationType)) {
- plan = deserializeObjectByJavaXML(in);
+ LOG.info("Deserializing " + planClass.getSimpleName() + " using kryo");
+ if (cloningPlan) {
+ plan = deserializeObjectByKryo(kryo, in, planClass);
} else {
- if (cloningPlan) {
- plan = deserializeObjectByKryo(kryo, in, planClass);
- } else {
- plan = deserializeObjectByKryo(kryo, in, planClass);
- }
+ plan = deserializeObjectByKryo(kryo, in, planClass);
}
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.DESERIALIZE_PLAN);
return plan;
@@ -392,10 +355,9 @@ public class SerializationUtilities {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.CLONE_PLAN);
ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
- Configuration conf = new HiveConf();
- serializePlan(plan, baos, conf, true);
+ serializePlan(plan, baos, true);
MapredWork newPlan = deserializePlan(new ByteArrayInputStream(baos.toByteArray()),
- MapredWork.class, conf, true);
+ MapredWork.class, true);
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.CLONE_PLAN);
return newPlan;
}
@@ -409,201 +371,14 @@ public class SerializationUtilities {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.CLONE_PLAN);
ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
- Configuration conf = new HiveConf();
- serializePlan(plan, baos, conf, true);
+ serializePlan(plan, baos, true);
BaseWork newPlan = deserializePlan(new ByteArrayInputStream(baos.toByteArray()),
- plan.getClass(), conf, true);
+ plan.getClass(), true);
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.CLONE_PLAN);
return newPlan;
}
/**
- * Serialize the object. This helper function mainly makes sure that enums,
- * counters, etc are handled properly.
- */
- private static void serializeObjectByJavaXML(Object plan, OutputStream out) {
- XMLEncoder e = new XMLEncoder(out);
- e.setExceptionListener(new ExceptionListener() {
- @Override
- public void exceptionThrown(Exception e) {
- LOG.warn(org.apache.hadoop.util.StringUtils.stringifyException(e));
- throw new RuntimeException("Cannot serialize object", e);
- }
- });
- // workaround for java 1.5
- e.setPersistenceDelegate(PlanUtils.ExpressionTypes.class, new EnumDelegate());
- e.setPersistenceDelegate(GroupByDesc.Mode.class, new EnumDelegate());
- e.setPersistenceDelegate(java.sql.Date.class, new DatePersistenceDelegate());
- e.setPersistenceDelegate(Timestamp.class, new TimestampPersistenceDelegate());
-
- e.setPersistenceDelegate(org.datanucleus.store.types.backed.Map.class, new MapDelegate());
- e.setPersistenceDelegate(org.datanucleus.store.types.backed.List.class, new ListDelegate());
- e.setPersistenceDelegate(CommonToken.class, new CommonTokenDelegate());
- e.setPersistenceDelegate(Path.class, new PathDelegate());
-
- e.writeObject(plan);
- e.close();
- }
-
-
- /**
- * Java 1.5 workaround. From http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=5015403
- */
- public static class EnumDelegate extends DefaultPersistenceDelegate {
- @Override
- protected Expression instantiate(Object oldInstance, Encoder out) {
- return new Expression(Enum.class, "valueOf", new Object[] {oldInstance.getClass(),
- ((Enum<?>) oldInstance).name()});
- }
-
- @Override
- protected boolean mutatesTo(Object oldInstance, Object newInstance) {
- return oldInstance == newInstance;
- }
- }
-
- public static class MapDelegate extends DefaultPersistenceDelegate {
- @Override
- protected Expression instantiate(Object oldInstance, Encoder out) {
- Map oldMap = (Map) oldInstance;
- HashMap newMap = new HashMap(oldMap);
- return new Expression(newMap, HashMap.class, "new", new Object[] {});
- }
-
- @Override
- protected boolean mutatesTo(Object oldInstance, Object newInstance) {
- return false;
- }
-
- @Override
- protected void initialize(Class<?> type, Object oldInstance, Object newInstance, Encoder out) {
- java.util.Collection oldO = (java.util.Collection) oldInstance;
- java.util.Collection newO = (java.util.Collection) newInstance;
-
- if (newO.size() != 0) {
- out.writeStatement(new Statement(oldInstance, "clear", new Object[] {}));
- }
- for (Iterator i = oldO.iterator(); i.hasNext();) {
- out.writeStatement(new Statement(oldInstance, "add", new Object[] {i.next()}));
- }
- }
- }
-
- public static class SetDelegate extends DefaultPersistenceDelegate {
- @Override
- protected Expression instantiate(Object oldInstance, Encoder out) {
- Set oldSet = (Set) oldInstance;
- HashSet newSet = new HashSet(oldSet);
- return new Expression(newSet, HashSet.class, "new", new Object[] {});
- }
-
- @Override
- protected boolean mutatesTo(Object oldInstance, Object newInstance) {
- return false;
- }
-
- @Override
- protected void initialize(Class<?> type, Object oldInstance, Object newInstance, Encoder out) {
- java.util.Collection oldO = (java.util.Collection) oldInstance;
- java.util.Collection newO = (java.util.Collection) newInstance;
-
- if (newO.size() != 0) {
- out.writeStatement(new Statement(oldInstance, "clear", new Object[] {}));
- }
- for (Iterator i = oldO.iterator(); i.hasNext();) {
- out.writeStatement(new Statement(oldInstance, "add", new Object[] {i.next()}));
- }
- }
-
- }
-
- public static class ListDelegate extends DefaultPersistenceDelegate {
- @Override
- protected Expression instantiate(Object oldInstance, Encoder out) {
- List oldList = (List) oldInstance;
- ArrayList newList = new ArrayList(oldList);
- return new Expression(newList, ArrayList.class, "new", new Object[] {});
- }
-
- @Override
- protected boolean mutatesTo(Object oldInstance, Object newInstance) {
- return false;
- }
-
- @Override
- protected void initialize(Class<?> type, Object oldInstance, Object newInstance, Encoder out) {
- java.util.Collection oldO = (java.util.Collection) oldInstance;
- java.util.Collection newO = (java.util.Collection) newInstance;
-
- if (newO.size() != 0) {
- out.writeStatement(new Statement(oldInstance, "clear", new Object[] {}));
- }
- for (Iterator i = oldO.iterator(); i.hasNext();) {
- out.writeStatement(new Statement(oldInstance, "add", new Object[] {i.next()}));
- }
- }
-
- }
-
- /**
- * DatePersistenceDelegate. Needed to serialize java.util.Date
- * since it is not serialization friendly.
- * Also works for java.sql.Date since it derives from java.util.Date.
- */
- public static class DatePersistenceDelegate extends PersistenceDelegate {
-
- @Override
- protected Expression instantiate(Object oldInstance, Encoder out) {
- Date dateVal = (Date)oldInstance;
- Object[] args = { dateVal.getTime() };
- return new Expression(dateVal, dateVal.getClass(), "new", args);
- }
-
- @Override
- protected boolean mutatesTo(Object oldInstance, Object newInstance) {
- if (oldInstance == null || newInstance == null) {
- return false;
- }
- return oldInstance.getClass() == newInstance.getClass();
- }
- }
-
- /**
- * TimestampPersistenceDelegate. Needed to serialize java.sql.Timestamp since
- * it is not serialization friendly.
- */
- public static class TimestampPersistenceDelegate extends DatePersistenceDelegate {
- @Override
- protected void initialize(Class<?> type, Object oldInstance, Object newInstance, Encoder out) {
- Timestamp ts = (Timestamp)oldInstance;
- Object[] args = { ts.getNanos() };
- Statement stmt = new Statement(oldInstance, "setNanos", args);
- out.writeStatement(stmt);
- }
- }
-
- /**
- * Need to serialize org.antlr.runtime.CommonToken
- */
- public static class CommonTokenDelegate extends PersistenceDelegate {
- @Override
- protected Expression instantiate(Object oldInstance, Encoder out) {
- CommonToken ct = (CommonToken)oldInstance;
- Object[] args = {ct.getType(), ct.getText()};
- return new Expression(ct, ct.getClass(), "new", args);
- }
- }
-
- public static class PathDelegate extends PersistenceDelegate {
- @Override
- protected Expression instantiate(Object oldInstance, Encoder out) {
- Path p = (Path)oldInstance;
- Object[] args = {p.toString()};
- return new Expression(p, p.getClass(), "new", args);
- }
- }
-
- /**
* @param plan Usually of type MapredWork, MapredLocalWork etc.
* @param out stream in which serialized plan is written into
*/
@@ -614,23 +389,6 @@ public class SerializationUtilities {
output.close();
}
- /**
- * De-serialize an object. This helper function mainly makes sure that enums,
- * counters, etc are handled properly.
- */
- @SuppressWarnings("unchecked")
- private static <T> T deserializeObjectByJavaXML(InputStream in) {
- XMLDecoder d = null;
- try {
- d = new XMLDecoder(in, null, null);
- return (T) d.readObject();
- } finally {
- if (null != d) {
- d.close();
- }
- }
- }
-
private static <T> T deserializeObjectByKryo(Kryo kryo, InputStream in, Class<T> clazz ) {
Input inp = new Input(in);
kryo.setClassLoader(Utilities.getSessionSpecifiedClassLoader());
@@ -639,13 +397,13 @@ public class SerializationUtilities {
return t;
}
- public static List<Operator<?>> cloneOperatorTree(Configuration conf, List<Operator<?>> roots) {
+ public static List<Operator<?>> cloneOperatorTree(List<Operator<?>> roots) {
ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
- serializePlan(roots, baos, conf, true);
+ serializePlan(roots, baos, true);
@SuppressWarnings("unchecked")
List<Operator<?>> result =
deserializePlan(new ByteArrayInputStream(baos.toByteArray()),
- roots.getClass(), conf, true);
+ roots.getClass(), true);
return result;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
index c8e7549..0eab63e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
@@ -48,10 +48,6 @@ import org.apache.hadoop.util.StringUtils;
public abstract class Task<T extends Serializable> implements Serializable, Node {
- static {
- PTFUtils.makeTransient(Task.class, "fetchSource");
- }
-
private static final long serialVersionUID = 1L;
public transient HashMap<String, Long> taskCounters;
public transient TaskHandle taskHandle;
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 54abdef..9491015 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -407,29 +407,29 @@ public final class Utilities {
if(MAP_PLAN_NAME.equals(name)){
if (ExecMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))){
- gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class, conf);
+ gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class);
} else if(MergeFileMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
- gWork = SerializationUtilities.deserializePlan(kryo, in, MergeFileWork.class, conf);
+ gWork = SerializationUtilities.deserializePlan(kryo, in, MergeFileWork.class);
} else if(ColumnTruncateMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
- gWork = SerializationUtilities.deserializePlan(kryo, in, ColumnTruncateWork.class, conf);
+ gWork = SerializationUtilities.deserializePlan(kryo, in, ColumnTruncateWork.class);
} else if(PartialScanMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
- gWork = SerializationUtilities.deserializePlan(kryo, in, PartialScanWork.class,conf);
+ gWork = SerializationUtilities.deserializePlan(kryo, in, PartialScanWork.class);
} else {
throw new RuntimeException("unable to determine work from configuration ."
+ MAPRED_MAPPER_CLASS + " was "+ conf.get(MAPRED_MAPPER_CLASS)) ;
}
} else if (REDUCE_PLAN_NAME.equals(name)) {
if(ExecReducer.class.getName().equals(conf.get(MAPRED_REDUCER_CLASS))) {
- gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class, conf);
+ gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class);
} else {
throw new RuntimeException("unable to determine work from configuration ."
+ MAPRED_REDUCER_CLASS +" was "+ conf.get(MAPRED_REDUCER_CLASS)) ;
}
} else if (name.contains(MERGE_PLAN_NAME)) {
if (name.startsWith(MAPNAME)) {
- gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class, conf);
+ gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class);
} else if (name.startsWith(REDUCENAME)) {
- gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class, conf);
+ gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class);
} else {
throw new RuntimeException("Unknown work type: " + name);
}
@@ -528,7 +528,7 @@ public final class Utilities {
ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
try {
out = new DeflaterOutputStream(byteOut, new Deflater(Deflater.BEST_SPEED));
- SerializationUtilities.serializePlan(kryo, w, out, conf);
+ SerializationUtilities.serializePlan(kryo, w, out);
out.close();
out = null;
} finally {
@@ -542,7 +542,7 @@ public final class Utilities {
FileSystem fs = planPath.getFileSystem(conf);
try {
out = fs.create(planPath);
- SerializationUtilities.serializePlan(kryo, w, out, conf);
+ SerializationUtilities.serializePlan(kryo, w, out);
out.close();
out = null;
} finally {
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index 2129bda..971dac9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -741,13 +741,12 @@ public class ExecDriver extends Task<MapredWork> implements Serializable, Hadoop
int ret;
if (localtask) {
memoryMXBean = ManagementFactory.getMemoryMXBean();
- MapredLocalWork plan = SerializationUtilities.deserializePlan(pathData, MapredLocalWork.class,
- conf);
+ MapredLocalWork plan = SerializationUtilities.deserializePlan(pathData, MapredLocalWork.class);
MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent);
ret = ed.executeInProcess(new DriverContext());
} else {
- MapredWork plan = SerializationUtilities.deserializePlan(pathData, MapredWork.class, conf);
+ MapredWork plan = SerializationUtilities.deserializePlan(pathData, MapredWork.class);
ExecDriver ed = new ExecDriver(plan, conf, isSilent);
ret = ed.execute(new DriverContext());
}
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
index cb70ac8..7ec5ed1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
@@ -178,7 +178,7 @@ public class MapRedTask extends ExecDriver implements Serializable {
OutputStream out = null;
try {
out = FileSystem.getLocal(conf).create(planPath);
- SerializationUtilities.serializePlan(plan, out, conf);
+ SerializationUtilities.serializePlan(plan, out);
out.close();
out = null;
} finally {
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
index cb7dfa1..f191572 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
@@ -160,7 +160,7 @@ public class MapredLocalTask extends Task<MapredLocalWork> implements Serializab
OutputStream out = null;
try {
out = FileSystem.getLocal(conf).create(planPath);
- SerializationUtilities.serializePlan(plan, out, conf);
+ SerializationUtilities.serializePlan(plan, out);
out.close();
out = null;
} finally {
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
index 41d3522..4805162 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
@@ -256,7 +256,7 @@ public class GenSparkSkewJoinProcessor {
List<Operator<?>> reducerList = new ArrayList<Operator<?>>();
reducerList.add(reduceWork.getReducer());
Operator<? extends OperatorDesc> reducer = SerializationUtilities.cloneOperatorTree(
- parseCtx.getConf(), reducerList).get(0);
+ reducerList).get(0);
Preconditions.checkArgument(reducer instanceof JoinOperator,
"Reducer should be join operator, but actually is " + reducer.getName());
JoinOperator cloneJoinOp = (JoinOperator) reducer;
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
index f656998..70912e0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
@@ -28,7 +28,6 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Set;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator;
@@ -204,8 +203,7 @@ public class GenTezUtils {
}
// removes any union operator and clones the plan
- public static void removeUnionOperators(Configuration conf, GenTezProcContext context,
- BaseWork work)
+ public static void removeUnionOperators(GenTezProcContext context, BaseWork work)
throws SemanticException {
List<Operator<?>> roots = new ArrayList<Operator<?>>();
@@ -216,7 +214,7 @@ public class GenTezUtils {
roots.addAll(context.eventOperatorSet);
// need to clone the plan.
- List<Operator<?>> newRoots = SerializationUtilities.cloneOperatorTree(conf, roots);
+ List<Operator<?>> newRoots = SerializationUtilities.cloneOperatorTree(roots);
// we're cloning the operator plan but we're retaining the original work. That means
// that root operators have to be replaced with the cloned ops. The replacement map
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
index f563b99..147ff7e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
@@ -371,7 +371,7 @@ public class TezCompiler extends TaskCompiler {
// we need to clone some operator plans and remove union operators still
for (BaseWork w: procCtx.workWithUnionOperators) {
- GenTezUtils.removeUnionOperators(conf, procCtx, w);
+ GenTezUtils.removeUnionOperators(procCtx, w);
}
// then we make sure the file sink operators are set up right
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
index 924848f..aa33103 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
@@ -28,7 +28,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
@@ -188,8 +187,7 @@ public class GenSparkUtils {
}
// removes any union operator and clones the plan
- public void removeUnionOperators(Configuration conf, GenSparkProcContext context,
- BaseWork work)
+ public void removeUnionOperators(GenSparkProcContext context, BaseWork work)
throws SemanticException {
List<Operator<?>> roots = new ArrayList<Operator<?>>();
@@ -207,7 +205,7 @@ public class GenSparkUtils {
}
// need to clone the plan.
- List<Operator<?>> newRoots = SerializationUtilities.cloneOperatorTree(conf, roots);
+ List<Operator<?>> newRoots = SerializationUtilities.cloneOperatorTree(roots);
// Build a map to map the original FileSinkOperator and the cloned FileSinkOperators
// This map is used for set the stats flag for the cloned FileSinkOperators in later process
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
index 3673da4..08278de 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
@@ -238,7 +238,7 @@ public class SparkCompiler extends TaskCompiler {
// we need to clone some operator plans and remove union operators still
for (BaseWork w : procCtx.workWithUnionOperators) {
- GenSparkUtils.getUtils().removeUnionOperators(conf, procCtx, w);
+ GenSparkUtils.getUtils().removeUnionOperators(procCtx, w);
}
// we need to fill MapWork with 'local' work and bucket information for SMB Join.
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java
index 4bb661a..d4f58be 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SplitOpTreeForDPP.java
@@ -107,8 +107,7 @@ public class SplitOpTreeForDPP implements NodeProcessor {
filterOp.setChildOperators(Utilities.makeList(selOp));
// Now clone the tree above selOp
- List<Operator<?>> newRoots = SerializationUtilities.cloneOperatorTree(
- context.parseContext.getConf(), roots);
+ List<Operator<?>> newRoots = SerializationUtilities.cloneOperatorTree(roots);
for (int i = 0; i < roots.size(); i++) {
TableScanOperator newTs = (TableScanOperator) newRoots.get(i);
TableScanOperator oldTs = (TableScanOperator) roots.get(i);
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
index 463da5d..bc67e5a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
@@ -32,10 +32,6 @@ public class AbstractOperatorDesc implements OperatorDesc {
protected transient Map<String, String> opProps;
protected long memNeeded = 0;
- static {
- PTFUtils.makeTransient(AbstractOperatorDesc.class, "opProps");
- }
-
@Override
@Explain(skipHeader = true, displayName = "Statistics", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public Statistics getStatistics() {
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java
index 1a0cdf8..1ecbaad 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java
@@ -33,10 +33,6 @@ import org.apache.hadoop.util.ReflectionUtils;
*/
public class AggregationDesc implements java.io.Serializable {
- static {
- PTFUtils.makeTransient(AggregationDesc.class, "genericUDAFEvaluator");
- }
-
private static final long serialVersionUID = 1L;
private String genericUDAFName;
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java
index 0226278..df153a2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java
@@ -36,9 +36,6 @@ public class LoadFileDesc extends LoadDesc implements Serializable {
private String columnTypes;
private String destinationCreateTable;
- static {
- PTFUtils.makeTransient(LoadFileDesc.class, "targetDir");
- }
public LoadFileDesc() {
}
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java
index 0627b9f..2b01712 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java
@@ -37,9 +37,6 @@ public class LoadMultiFilesDesc implements Serializable {
private String columnTypes;
private transient List<Path> srcDirs;
- static {
- PTFUtils.makeTransient(LoadMultiFilesDesc.class, "targetDirs");
- }
public LoadMultiFilesDesc() {
}
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
index 3ec3b1f..c4b49b6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
@@ -19,20 +19,19 @@
package org.apache.hadoop.hive.ql.plan;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.exec.PTFUtils;
import org.apache.hadoop.hive.ql.parse.LeadLagInfo;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
import org.apache.hadoop.hive.ql.plan.ptf.PTFInputDef;
import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
import org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef;
import org.apache.hadoop.hive.ql.udf.ptf.Noop;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Explain(displayName = "PTF Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
@@ -42,7 +41,7 @@ public class PTFDesc extends AbstractOperatorDesc {
private static final Logger LOG = LoggerFactory.getLogger(PTFDesc.class.getName());
PartitionedTableFunctionDef funcDef;
- LeadLagInfo llInfo;
+ transient LeadLagInfo llInfo;
/*
* is this PTFDesc for a Map-Side PTF Operation?
*/
@@ -50,11 +49,6 @@ public class PTFDesc extends AbstractOperatorDesc {
transient Configuration cfg;
- static{
- PTFUtils.makeTransient(PTFDesc.class, "llInfo");
- PTFUtils.makeTransient(PTFDesc.class, "cfg");
- }
-
public PartitionedTableFunctionDef getFuncDef() {
return funcDef;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
index 43bf7c5..098aa89 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
@@ -41,10 +41,6 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
public class TableScanDesc extends AbstractOperatorDesc {
private static final long serialVersionUID = 1L;
- static {
- PTFUtils.makeTransient(TableScanDesc.class, "filterObject", "referencedColumns", "tableMetadata");
- }
-
private String alias;
private List<VirtualColumn> virtualCols;
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java
index 02664a8..fa7fc76 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java
@@ -31,10 +31,6 @@ public class PTFExpressionDef {
transient ExprNodeEvaluator exprEvaluator;
transient ObjectInspector OI;
- static{
- PTFUtils.makeTransient(PTFExpressionDef.class, "exprEvaluator", "OI");
- }
-
public PTFExpressionDef() {}
public PTFExpressionDef(PTFExpressionDef e) {
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java
index 85d7f61..bc2ee83 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java
@@ -36,10 +36,6 @@ public class ShapeDetails {
transient RowResolver rr;
transient TypeCheckCtx typeCheckCtx;
- static {
- PTFUtils.makeTransient(ShapeDetails.class, "OI", "serde", "rr", "typeCheckCtx");
- }
-
public String getSerdeClassName() {
return serdeClassName;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLeadLag.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLeadLag.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLeadLag.java
index 4740b81..351b593 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLeadLag.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLeadLag.java
@@ -41,11 +41,6 @@ public abstract class GenericUDFLeadLag extends GenericUDF {
transient Converter defaultValueConverter;
int amt;
- static {
- PTFUtils.makeTransient(GenericUDFLeadLag.class, "exprEvaluator", "pItr", "firstArgOI",
- "defaultArgOI", "defaultValueConverter");
- }
-
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object defaultVal = null;
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java
index a0f5a7a..c76118b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java
@@ -94,10 +94,6 @@ public abstract class TableFunctionEvaluator {
transient protected PTFPartition outputPartition;
transient protected boolean canAcceptInputAsStream;
- static {
- PTFUtils.makeTransient(TableFunctionEvaluator.class, "outputOI", "rawInputOI");
- }
-
public StructObjectInspector getOutputOI() {
return OI;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
index c1667c2..64db486 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
@@ -83,7 +83,7 @@ public class TestPlan extends TestCase {
JobConf job = new JobConf(TestPlan.class);
// serialize the configuration once ..
ByteArrayOutputStream baos = new ByteArrayOutputStream();
- SerializationUtilities.serializePlan(mrwork, baos, job);
+ SerializationUtilities.serializePlan(mrwork, baos);
baos.close();
String v1 = baos.toString();
@@ -101,7 +101,7 @@ public class TestPlan extends TestCase {
// serialize again
baos.reset();
- SerializationUtilities.serializePlan(mrwork2, baos, job);
+ SerializationUtilities.serializePlan(mrwork2, baos);
baos.close();
// verify that the two are equal
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
index 1ff7eb5..a4b948a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
@@ -1615,7 +1615,7 @@ public class TestInputOutputFormat {
Path mapXml = new Path(workDir, "map.xml");
localFs.delete(mapXml, true);
FSDataOutputStream planStream = localFs.create(mapXml);
- SerializationUtilities.serializePlan(mapWork, planStream, conf);
+ SerializationUtilities.serializePlan(mapWork, planStream);
planStream.close();
return conf;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/3e3d966f/ql/src/test/queries/clientpositive/cast_qualified_types.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cast_qualified_types.q b/ql/src/test/queries/clientpositive/cast_qualified_types.q
index fe0abd4..4f977c1 100644
--- a/ql/src/test/queries/clientpositive/cast_qualified_types.q
+++ b/ql/src/test/queries/clientpositive/cast_qualified_types.q
@@ -1,5 +1,3 @@
-set hive.plan.serialization.format=javaXML;
-
select
cast(key as decimal(10,2)) as c1,
cast(key as char(10)) as c2,