You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/09/09 15:53:50 UTC
svn commit: r1521110 [1/5] - in /hive/trunk:
common/src/java/org/apache/hadoop/hive/conf/ conf/
contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/ hcatalog/
ivy/ ql/ ql/src/java/org/apache/hadoop/hive/ql/
ql/src/java/org/apache/hadoop/hive/q...
Author: hashutosh
Date: Mon Sep 9 13:53:47 2013
New Revision: 1521110
URL: http://svn.apache.org/r1521110
Log:
HIVE-1511
Summary: Hive Plan Serialization
Test Plan: Regression test suite
Reviewers: brock
Reviewed By: brock
Differential Revision: https://reviews.facebook.net/D12789
Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hive/trunk/conf/hive-default.xml.template
hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java
hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java
hive/trunk/hcatalog/pom.xml
hive/trunk/ivy/ivysettings.xml
hive/trunk/ivy/libraries.properties
hive/trunk/ql/build.xml
hive/trunk/ql/ivy.xml
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingCtx.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStruct.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby5.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby6.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input2.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input3.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input7.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input8.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input_part1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join3.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join5.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join6.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join7.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample3.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample5.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample7.q.xml
hive/trunk/ql/src/test/results/compiler/plan/subq.q.xml
hive/trunk/ql/src/test/results/compiler/plan/udf1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/udf4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/udf6.q.xml
hive/trunk/ql/src/test/results/compiler/plan/udf_case.q.xml
hive/trunk/ql/src/test/results/compiler/plan/udf_when.q.xml
hive/trunk/ql/src/test/results/compiler/plan/union.q.xml
Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Mon Sep 9 13:53:47 2013
@@ -177,6 +177,7 @@ public class HiveConf extends Configurat
// QL execution stuff
SCRIPTWRAPPER("hive.exec.script.wrapper", null),
PLAN("hive.exec.plan", ""),
+ PLAN_SERIALIZATION("hive.plan.serialization.format","kryo"),
SCRATCHDIR("hive.exec.scratchdir", "/tmp/hive-" + System.getProperty("user.name")),
LOCALSCRATCHDIR("hive.exec.local.scratchdir", System.getProperty("java.io.tmpdir") + File.separator + System.getProperty("user.name")),
SUBMITVIACHILD("hive.exec.submitviachild", false),
Modified: hive/trunk/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/trunk/conf/hive-default.xml.template?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/conf/hive-default.xml.template (original)
+++ hive/trunk/conf/hive-default.xml.template Mon Sep 9 13:53:47 2013
@@ -1963,4 +1963,13 @@
</description>
</property>
+<property>
+ <name>hive.plan.serialization.format</name>
+ <value>kryo</value>
+ <description>
+ Query plan format serialization between client and task nodes.
+ Two supported values are : kryo and javaXML. Kryo is default.
+ </description>
+</property>
+
</configuration>
Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java Mon Sep 9 13:53:47 2013
@@ -36,8 +36,8 @@ import org.apache.hadoop.hive.serde2.obj
*/
public class GenericUDTFCount2 extends GenericUDTF {
- Integer count = Integer.valueOf(0);
- Object forwardObj[] = new Object[1];
+ private transient Integer count = Integer.valueOf(0);
+ private transient Object forwardObj[] = new Object[1];
@Override
public void close() throws HiveException {
Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java Mon Sep 9 13:53:47 2013
@@ -67,7 +67,7 @@ public class GenericUDTFExplode2 extends
fieldOIs);
}
- Object forwardObj[] = new Object[2];
+ private transient Object forwardObj[] = new Object[2];
@Override
public void process(Object[] o) throws HiveException {
Modified: hive/trunk/hcatalog/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/pom.xml?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/hcatalog/pom.xml (original)
+++ hive/trunk/hcatalog/pom.xml Mon Sep 9 13:53:47 2013
@@ -215,7 +215,16 @@
<enabled>false</enabled>
</snapshots>
</repository>
-
+ <repository>
+ <id>sonatype-snapshots</id>
+ <url>https://oss.sonatype.org/content/repositories/snapshots/</url>
+ <releases>
+ <enabled>false</enabled>
+ </releases>
+ <snapshots>
+ <enabled>true</enabled>
+ </snapshots>
+ </repository>
</repositories>
<dependencies>
Modified: hive/trunk/ivy/ivysettings.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ivy/ivysettings.xml?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ivy/ivysettings.xml (original)
+++ hive/trunk/ivy/ivysettings.xml Mon Sep 9 13:53:47 2013
@@ -47,6 +47,10 @@
checkmodified="${ivy.checkmodified}"
changingPattern="${ivy.changingPattern}"/>
+ <ibiblio name="sonatype-snapshot" root="https://oss.sonatype.org/content/repositories/snapshots/" m2compatible="true"
+ checkmodified="${ivy.checkmodified}"
+ changingPattern="${ivy.changingPattern}"/>
+
<url name="datanucleus-repo" m2compatible="true">
<artifact pattern="${datanucleus.repo}/[organisation]/[module]/[revision]/[module]-[revision](-[classifier]).[ext]"/>
</url>
@@ -68,6 +72,7 @@
<resolver ref="maven2"/>
<resolver ref="datanucleus-repo"/>
<resolver ref="sourceforge"/>
+ <resolver ref="sonatype-snapshot"/>
</chain>
<chain name="internal" dual="true">
@@ -77,11 +82,13 @@
<resolver ref="maven2"/>
<resolver ref="datanucleus-repo"/>
<resolver ref="sourceforge"/>
+ <resolver ref="sonatype-snapshot"/>
</chain>
<chain name="external">
<resolver ref="maven2"/>
<resolver ref="datanucleus-repo"/>
+ <resolver ref="sonatype-snapshot"/>
</chain>
</resolvers>
Modified: hive/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hive/trunk/ivy/libraries.properties?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ivy/libraries.properties (original)
+++ hive/trunk/ivy/libraries.properties Mon Sep 9 13:53:47 2013
@@ -51,6 +51,7 @@ jetty.version=6.1.26
jline.version=0.9.94
json.version=20090211
junit.version=4.10
+kryo.version=2.22-SNAPSHOT
libfb303.version=0.9.0
libthrift.version=0.9.0
log4j.version=1.2.16
Modified: hive/trunk/ql/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/build.xml?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/build.xml (original)
+++ hive/trunk/ql/build.xml Mon Sep 9 13:53:47 2013
@@ -250,6 +250,15 @@
<exclude name="META-INF/MANIFEST.MF"/>
</patternset>
</unzip>
+ <unzip
+ src="${build.ivy.lib.dir}/default/kryo-${kryo.version}.jar"
+ dest="${build.dir.hive}/kryo/classes">
+ <patternset>
+ <exclude name="META-INF"/>
+ <exclude name="META-INF/MANIFEST.MF"/>
+ </patternset>
+ </unzip>
+
<unzip
src="${build.ivy.lib.dir}/default/protobuf-java-${protobuf.version}.jar"
dest="${build.dir.hive}/protobuf-java/classes">
@@ -298,7 +307,9 @@
<fileset dir="${build.dir.hive}/ql/classes" includes="**/*.class,**/*.properties"/>
<fileset dir="${build.dir.hive}/serde/classes" includes="**/*.class"/>
<fileset dir="${build.dir.hive}/thrift/classes" includes="**/*.class"/>
- <fileset dir="${build.dir.hive}/commons-lang/classes" includes="**/StringUtils.class,**/WordUtils.class"/>
+
+ <fileset dir="${build.dir.hive}/kryo/classes" includes="**/*.class"/>
+ <fileset dir="${build.dir.hive}/commons-lang/classes" includes="**/StringUtils.class,**/WordUtils.class"/>
<fileset dir="${build.dir.hive}/json/classes" includes="**/*.class"/>
<fileset dir="${build.dir.hive}/avro/classes" includes="**/*.class"/>
<fileset dir="${build.dir.hive}/avro-mapred/classes" includes="**/*.class"/>
Modified: hive/trunk/ql/ivy.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/ivy.xml?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/ivy.xml (original)
+++ hive/trunk/ql/ivy.xml Mon Sep 9 13:53:47 2013
@@ -46,6 +46,8 @@
rev="${protobuf.version}" transitive="false"/>
<dependency org="org.iq80.snappy" name="snappy"
rev="${snappy.version}" transitive="false"/>
+ <dependency org="com.esotericsoftware.kryo" name="kryo"
+ rev="${kryo.version}" />
<dependency org="org.json" name="json" rev="${json.version}"/>
<dependency org="commons-collections" name="commons-collections" rev="${commons-collections.version}"/>
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Mon Sep 9 13:53:47 2013
@@ -465,12 +465,12 @@ public class Driver implements CommandPr
// serialize the queryPlan
FileOutputStream fos = new FileOutputStream(queryPlanFileName);
- Utilities.serializeObject(plan, fos);
+ Utilities.serializePlan(plan, fos, conf);
fos.close();
// deserialize the queryPlan
FileInputStream fis = new FileInputStream(queryPlanFileName);
- QueryPlan newPlan = Utilities.deserializeObject(fis);
+ QueryPlan newPlan = Utilities.deserializePlan(fis, QueryPlan.class, conf);
fis.close();
// Use the deserialized plan
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java Mon Sep 9 13:53:47 2013
@@ -58,6 +58,8 @@ public class ColumnInfo implements Seria
private boolean isHiddenVirtualCol;
+ private String typeName;
+
public ColumnInfo() {
}
@@ -94,6 +96,7 @@ public class ColumnInfo implements Seria
this.tabAlias = tabAlias;
this.isVirtualCol = isVirtualCol;
this.isHiddenVirtualCol = isHiddenVirtualCol;
+ this.typeName = getType().getTypeName();
}
public ColumnInfo(ColumnInfo columnInfo) {
@@ -104,6 +107,15 @@ public class ColumnInfo implements Seria
this.isVirtualCol = columnInfo.getIsVirtualCol();
this.isHiddenVirtualCol = columnInfo.isHiddenVirtualCol();
this.setType(columnInfo.getType());
+ this.typeName = columnInfo.getType().getTypeName();
+ }
+
+ public String getTypeName() {
+ return this.typeName;
+ }
+
+ public void setTypeName(String typeName) {
+ this.typeName = typeName;
}
public TypeInfo getType() {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java Mon Sep 9 13:53:47 2013
@@ -31,12 +31,12 @@ import org.apache.hadoop.hive.serde2.obj
*/
public class ExprNodeColumnEvaluator extends ExprNodeEvaluator<ExprNodeColumnDesc> {
- transient boolean simpleCase;
- transient StructObjectInspector inspector;
- transient StructField field;
- transient StructObjectInspector[] inspectors;
- transient StructField[] fields;
- transient boolean[] unionField;
+ private transient boolean simpleCase;
+ private transient StructObjectInspector inspector;
+ private transient StructField field;
+ private transient StructObjectInspector[] inspectors;
+ private transient StructField[] fields;
+ private transient boolean[] unionField;
public ExprNodeColumnEvaluator(ExprNodeColumnDesc expr) {
super(expr);
@@ -61,18 +61,17 @@ public class ExprNodeColumnEvaluator ext
fields = new StructField[names.length];
unionField = new boolean[names.length];
int unionIndex = -1;
-
for (int i = 0; i < names.length; i++) {
if (i == 0) {
inspectors[0] = (StructObjectInspector) rowInspector;
} else {
- if (unionIndex != -1) {
+ if (unionIndex == -1) {
+ inspectors[i] = (StructObjectInspector) fields[i - 1]
+ .getFieldObjectInspector();
+ } else {
inspectors[i] = (StructObjectInspector) (
(UnionObjectInspector)fields[i-1].getFieldObjectInspector()).
getObjectInspectors().get(unionIndex);
- } else {
- inspectors[i] = (StructObjectInspector) fields[i - 1]
- .getFieldObjectInspector();
}
}
// to support names like _colx:1._coly
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Mon Sep 9 13:53:47 2013
@@ -482,7 +482,7 @@ public final class FunctionRegistry {
Class<? extends UDF> UDFClass, boolean isOperator, String displayName) {
if (UDF.class.isAssignableFrom(UDFClass)) {
FunctionInfo fI = new FunctionInfo(isNative, displayName,
- new GenericUDFBridge(displayName, isOperator, UDFClass));
+ new GenericUDFBridge(displayName, isOperator, UDFClass.getName()));
mFunctions.put(functionName.toLowerCase(), fI);
} else {
throw new RuntimeException("Registering UDF Class " + UDFClass
@@ -1284,7 +1284,7 @@ public final class FunctionRegistry {
if (genericUDF instanceof GenericUDFBridge) {
GenericUDFBridge bridge = (GenericUDFBridge) genericUDF;
return new GenericUDFBridge(bridge.getUdfName(), bridge.isOperator(),
- bridge.getUdfClass());
+ bridge.getUdfClassName());
} else if (genericUDF instanceof GenericUDFMacro) {
GenericUDFMacro bridge = (GenericUDFMacro) genericUDF;
return new GenericUDFMacro(bridge.getMacroName(), bridge.getBody(),
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java Mon Sep 9 13:53:47 2013
@@ -38,7 +38,7 @@ public class HashTableDummyOperator exte
this.outputObjInspector = serde.getObjectInspector();
initializeChildren(hconf);
} catch (Exception e) {
- LOG.error("Generating output obj inspector from dummy object error");
+ LOG.error("Generating output obj inspector from dummy object error", e);
e.printStackTrace();
}
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java Mon Sep 9 13:53:47 2013
@@ -58,7 +58,7 @@ public abstract class Operator<T extends
private static final long serialVersionUID = 1L;
- private Configuration configuration;
+ private transient Configuration configuration;
protected List<Operator<? extends OperatorDesc>> childOperators;
protected List<Operator<? extends OperatorDesc>> parentOperators;
protected String operatorId;
@@ -196,7 +196,7 @@ public abstract class Operator<T extends
}
// non-bean fields needed during compilation
- private transient RowSchema rowSchema;
+ private RowSchema rowSchema;
public void setSchema(RowSchema rowSchema) {
this.rowSchema = rowSchema;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java Mon Sep 9 13:53:47 2013
@@ -51,8 +51,10 @@ public class RowSchema implements Serial
@Override
public String toString() {
StringBuilder sb = new StringBuilder('(');
- for (ColumnInfo col: signature) {
- sb.append(col.toString());
+ if (signature != null) {
+ for (ColumnInfo col : signature) {
+ sb.append(col.toString());
+ }
}
sb.append(')');
return sb.toString();
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Mon Sep 9 13:53:47 2013
@@ -102,6 +102,8 @@ import org.apache.hadoop.hive.ql.ErrorMs
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
+import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
+import org.apache.hadoop.hive.ql.exec.mr.ExecReducer;
import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
import org.apache.hadoop.hive.ql.io.ContentSummaryInputFormat;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
@@ -112,6 +114,12 @@ import org.apache.hadoop.hive.ql.io.Hive
import org.apache.hadoop.hive.ql.io.OneNullRowInputFormat;
import org.apache.hadoop.hive.ql.io.RCFile;
import org.apache.hadoop.hive.ql.io.ReworkMapredInputFormat;
+import org.apache.hadoop.hive.ql.io.rcfile.merge.MergeWork;
+import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileMergeMapper;
+import org.apache.hadoop.hive.ql.io.rcfile.stats.PartialScanMapper;
+import org.apache.hadoop.hive.ql.io.rcfile.stats.PartialScanWork;
+import org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateMapper;
+import org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateWork;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
@@ -166,6 +174,10 @@ import org.apache.hadoop.mapred.Sequence
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Shell;
+import com.esotericsoftware.kryo.Kryo;
+import com.esotericsoftware.kryo.io.Input;
+import com.esotericsoftware.kryo.io.Output;
+
/**
* Utilities.
*
@@ -253,7 +265,7 @@ public final class Utilities {
return (ReduceWork) getBaseWork(conf, REDUCE_PLAN_NAME);
}
- public static BaseWork getBaseWork(Configuration conf, String name) {
+ private static BaseWork getBaseWork(Configuration conf, String name) {
BaseWork gWork = null;
Path path = null;
try {
@@ -269,8 +281,25 @@ public final class Utilities {
localPath = new Path(name);
}
InputStream in = new FileInputStream(localPath.toUri().getPath());
- BaseWork ret = deserializePlan(in);
- gWork = ret;
+ if(MAP_PLAN_NAME.equals(name)){
+ if (ExecMapper.class.getName().equals(conf.get("mapred.mapper.class"))){
+ gWork = deserializePlan(in, MapWork.class, conf);
+ } else if(RCFileMergeMapper.class.getName().equals(conf.get("mapred.mapper.class"))) {
+ gWork = deserializePlan(in, MergeWork.class, conf);
+ } else if(ColumnTruncateMapper.class.getName().equals(conf.get("mapred.mapper.class"))) {
+ gWork = deserializePlan(in, ColumnTruncateWork.class, conf);
+ } else if(PartialScanMapper.class.getName().equals(conf.get("mapred.mapper.class"))) {
+ gWork = deserializePlan(in, PartialScanWork.class,conf);
+ } else {
+ assert false;
+ }
+ } else {
+ if(ExecReducer.class.getName().equals(conf.get("mapred.reducer.class"))) {
+ gWork = deserializePlan(in, ReduceWork.class, conf);
+ } else {
+ assert false;
+ }
+ }
gWorkMap.put(path, gWork);
}
return gWork;
@@ -480,7 +509,7 @@ public final class Utilities {
// use the default file system of the conf
FileSystem fs = planPath.getFileSystem(conf);
FSDataOutputStream out = fs.create(planPath);
- serializePlan(w, out);
+ serializePlan(w, out, conf);
// Serialize the plan to the default hdfs instance
// Except for hadoop local mode execution where we should be
@@ -587,15 +616,38 @@ public final class Utilities {
}
}
+ /** Custom Kryo serializer for sql date, otherwise Kryo gets confused between
+ java.sql.Date and java.util.Date while deserializing
+ */
+ private static class SqlDateSerializer extends
+ com.esotericsoftware.kryo.Serializer<java.sql.Date> {
+
+ @Override
+ public java.sql.Date read(Kryo kryo, Input input, Class<java.sql.Date> clazz) {
+ return new java.sql.Date(input.readLong());
+ }
+
+ @Override
+ public void write(Kryo kryo, Output output, java.sql.Date sqlDate) {
+ output.writeLong(sqlDate.getTime());
+ }
+
+ }
+
/**
* Serializes the plan.
* @param plan The plan, such as QueryPlan, MapredWork, etc.
* @param out The stream to write to.
+ * @param conf to pick which serialization format is desired.
*/
- public static void serializePlan(Object plan, OutputStream out) {
+ public static void serializePlan(Object plan, OutputStream out, Configuration conf) {
PerfLogger perfLogger = PerfLogger.getPerfLogger();
perfLogger.PerfLogBegin(LOG, PerfLogger.SERIALIZE_PLAN);
- serializeObject(plan, out);
+ if(conf.get(HiveConf.ConfVars.PLAN_SERIALIZATION.varname, "kryo").equals("javaXML")) {
+ serializeObjectByJavaXML(plan, out);
+ } else {
+ serializeObjectByKryo(plan, out);
+ }
perfLogger.PerfLogEnd(LOG, PerfLogger.SERIALIZE_PLAN);
}
@@ -603,13 +655,19 @@ public final class Utilities {
* Deserializes the plan.
* @param in The stream to read from.
* @return The plan, such as QueryPlan, MapredWork, etc.
+ * @param To know what serialization format plan is in
*/
- public static <T> T deserializePlan(InputStream in) {
+ public static <T> T deserializePlan(InputStream in, Class<T> planClass, Configuration conf) {
PerfLogger perfLogger = PerfLogger.getPerfLogger();
perfLogger.PerfLogBegin(LOG, PerfLogger.DESERIALIZE_PLAN);
- T result = deserializeObject(in);
+ T plan;
+ if(conf.get(HiveConf.ConfVars.PLAN_SERIALIZATION.varname, "kryo").equals("javaXML")) {
+ plan = deserializeObjectByJavaXML(in);
+ } else {
+ plan = deserializeObjectByKryo(in, planClass);
+ }
perfLogger.PerfLogEnd(LOG, PerfLogger.DESERIALIZE_PLAN);
- return result;
+ return plan;
}
/**
@@ -617,22 +675,23 @@ public final class Utilities {
* @param plan The plan.
* @return The clone.
*/
- public static <T> T clonePlan(T plan) {
- // TODO: need proper clone. Meanwhiel, let's at least keep this horror in one place
+ public static MapredWork clonePlan(MapredWork plan) {
+ // TODO: need proper clone. Meanwhile, let's at least keep this horror in one place
PerfLogger perfLogger = PerfLogger.getPerfLogger();
perfLogger.PerfLogBegin(LOG, PerfLogger.CLONE_PLAN);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
- Utilities.serializeObject(plan, baos);
- T copy = Utilities.deserializeObject(new ByteArrayInputStream(baos.toByteArray()));
+ Utilities.serializeObjectByJavaXML(plan, baos);
+ MapredWork newPlan = Utilities.deserializeObjectByJavaXML(
+ new ByteArrayInputStream(baos.toByteArray()));
perfLogger.PerfLogEnd(LOG, PerfLogger.CLONE_PLAN);
- return copy;
+ return newPlan;
}
/**
* Serialize the object. This helper function mainly makes sure that enums,
* counters, etc are handled properly.
*/
- public static void serializeObject(Object plan, OutputStream out) {
+ private static void serializeObjectByJavaXML(Object plan, OutputStream out) {
XMLEncoder e = new XMLEncoder(out);
e.setExceptionListener(new ExceptionListener() {
public void exceptionThrown(Exception e) {
@@ -655,11 +714,21 @@ public final class Utilities {
}
/**
+ * @param plan Usually of type MapredWork, MapredLocalWork etc.
+ * @param out stream in which serialized plan is written into
+ */
+ private static void serializeObjectByKryo(Object plan, OutputStream out) {
+ Output output = new Output(out);
+ kryo.get().writeObject(output, plan);
+ output.close();
+ }
+
+ /**
* De-serialize an object. This helper function mainly makes sure that enums,
* counters, etc are handled properly.
*/
@SuppressWarnings("unchecked")
- public static <T> T deserializeObject(InputStream in) {
+ private static <T> T deserializeObjectByJavaXML(InputStream in) {
XMLDecoder d = null;
try {
d = new XMLDecoder(in, null, null);
@@ -671,6 +740,27 @@ public final class Utilities {
}
}
+ private static <T> T deserializeObjectByKryo(InputStream in, Class<T> clazz ) {
+ Input inp = new Input(in);
+ T t = kryo.get().readObject(inp,clazz);
+ inp.close();
+ return t;
+ }
+
+ // Kryo is not thread-safe,
+ // Also new Kryo() is expensive, so we want to do it just once.
+ private static ThreadLocal<Kryo> kryo = new ThreadLocal<Kryo>() {
+
+ @Override
+ protected synchronized Kryo initialValue() {
+ Kryo kryo = new Kryo();
+ kryo.setClassLoader(Thread.currentThread().getContextClassLoader());
+ kryo.register(java.sql.Date.class, new SqlDateSerializer());
+ return kryo;
+ };
+ };
+
+
public static TableDesc defaultTd;
static {
// by default we expect ^A separated strings
@@ -2135,7 +2225,7 @@ public final class Utilities {
if (columnTypes.length() > 0) {
columnTypes.append(",");
}
- columnTypes.append(colInfo.getType().getTypeName());
+ columnTypes.append(colInfo.getTypeName());
}
String columnTypesString = columnTypes.toString();
jobConf.set(serdeConstants.LIST_COLUMN_TYPES, columnTypesString);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java Mon Sep 9 13:53:47 2013
@@ -716,12 +716,12 @@ public class ExecDriver extends Task<Map
int ret;
if (localtask) {
memoryMXBean = ManagementFactory.getMemoryMXBean();
- MapredLocalWork plan = (MapredLocalWork) Utilities.deserializePlan(pathData);
+ MapredLocalWork plan = Utilities.deserializePlan(pathData, MapredLocalWork.class, conf);
MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent);
ret = ed.executeFromChildJVM(new DriverContext());
} else {
- MapredWork plan = (MapredWork) Utilities.deserializePlan(pathData);
+ MapredWork plan = Utilities.deserializePlan(pathData, MapredWork.class, conf);
ExecDriver ed = new ExecDriver(plan, conf, isSilent);
ret = ed.execute(new DriverContext());
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java Mon Sep 9 13:53:47 2013
@@ -35,10 +35,10 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.MapRedStats;
+import org.apache.hadoop.hive.ql.exec.Operator.ProgressCounter;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskHandle;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.exec.Operator.ProgressCounter;
import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
import org.apache.hadoop.hive.ql.plan.ReducerTimeStatsPerJob;
import org.apache.hadoop.hive.ql.session.SessionState;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java Mon Sep 9 13:53:47 2013
@@ -39,14 +39,11 @@ import org.apache.hadoop.hive.ql.DriverC
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.ReduceWork;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.mapred.JobConf;
/**
* Extension of ExecDriver:
* - can optionally spawn a map-reduce task from a separate jvm
@@ -69,7 +66,6 @@ public class MapRedTask extends ExecDriv
private transient ContentSummary inputSummary = null;
private transient boolean runningViaChild = false;
- private transient boolean inputSizeEstimated = false;
private transient long totalInputFileSize;
private transient long totalInputNumFiles;
@@ -79,10 +75,6 @@ public class MapRedTask extends ExecDriv
super();
}
- public MapRedTask(MapredWork plan, JobConf job, boolean isSilent) throws HiveException {
- throw new RuntimeException("Illegal Constructor call");
- }
-
@Override
public int execute(DriverContext driverContext) {
@@ -181,7 +173,7 @@ public class MapRedTask extends ExecDriv
OutputStream out = FileSystem.getLocal(conf).create(planPath);
MapredWork plan = getWork();
LOG.info("Generating plan file " + planPath.toString());
- Utilities.serializePlan(plan, out);
+ Utilities.serializePlan(plan, out, conf);
String isSilent = "true".equalsIgnoreCase(System
.getProperty("test.silent")) ? "-nolog" : "";
@@ -408,7 +400,7 @@ public class MapRedTask extends ExecDriv
if (inputSummary == null) {
inputSummary = Utilities.getInputSummary(driverContext.getCtx(), work.getMapWork(), null);
}
- int reducers = Utilities.estimateNumberOfReducers(conf, inputSummary, work.getMapWork(),
+ int reducers = Utilities.estimateNumberOfReducers(conf, inputSummary, work.getMapWork(),
work.isFinalMapRed());
rWork.setNumReduceTasks(reducers);
console
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java Mon Sep 9 13:53:47 2013
@@ -134,14 +134,13 @@ public class MapredLocalTask extends Tas
String hiveJar = conf.getJar();
String hadoopExec = conf.getVar(HiveConf.ConfVars.HADOOPBIN);
- String libJarsOption;
// write out the plan to a local file
Path planPath = new Path(ctx.getLocalTmpFileURI(), "plan.xml");
OutputStream out = FileSystem.getLocal(conf).create(planPath);
MapredLocalWork plan = getWork();
LOG.info("Generating plan file " + planPath.toString());
- Utilities.serializePlan(plan, out);
+ Utilities.serializePlan(plan, out, conf);
String isSilent = "true".equalsIgnoreCase(System.getProperty("test.silent")) ? "-nolog" : "";
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingCtx.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingCtx.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingCtx.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingCtx.java Mon Sep 9 13:53:47 2013
@@ -36,19 +36,19 @@ import org.apache.hadoop.hive.ql.plan.Op
*/
public class BucketingSortingCtx implements NodeProcessorCtx {
- boolean disableBucketing;
+ private boolean disableBucketing;
// A mapping from an operator to the columns by which it's output is bucketed
- Map<Operator<? extends OperatorDesc>, List<BucketCol>> bucketedColsByOp;
+ private Map<Operator<? extends OperatorDesc>, List<BucketCol>> bucketedColsByOp;
// A mapping from a directory which a FileSinkOperator writes into to the columns by which that
// output is bucketed
- Map<String, List<BucketCol>> bucketedColsByDirectory;
+ private Map<String, List<BucketCol>> bucketedColsByDirectory;
// A mapping from an operator to the columns by which it's output is sorted
- Map<Operator<? extends OperatorDesc>, List<SortCol>> sortedColsByOp;
+ private Map<Operator<? extends OperatorDesc>, List<SortCol>> sortedColsByOp;
// A mapping from a directory which a FileSinkOperator writes into to the columns by which that
// output is sorted
- Map<String, List<SortCol>> sortedColsByDirectory;
+ private Map<String, List<SortCol>> sortedColsByDirectory;
public BucketingSortingCtx(boolean disableBucketing) {
this.disableBucketing = disableBucketing;
@@ -189,13 +189,18 @@ public class BucketingSortingCtx impleme
* to be constant for all equivalent columns.
*/
public static final class SortCol implements BucketSortCol, Serializable {
+
+ public SortCol() {
+ super();
+ }
+
private static final long serialVersionUID = 1L;
// Equivalent aliases for the column
- private final List<String> names = new ArrayList<String>();
+ private List<String> names = new ArrayList<String>();
// Indexes of those equivalent columns
- private final List<Integer> indexes = new ArrayList<Integer>();
+ private List<Integer> indexes = new ArrayList<Integer>();
// Sort order (+|-)
- private final char sortOrder;
+ private char sortOrder;
public SortCol(String name, int index, char sortOrder) {
this(sortOrder);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java Mon Sep 9 13:53:47 2013
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hive.ql.optimizer.physical;
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java Mon Sep 9 13:53:47 2013
@@ -18,9 +18,7 @@
package org.apache.hadoop.hive.ql.optimizer.physical;
-import java.io.ByteArrayInputStream;
import java.io.Serializable;
-import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java Mon Sep 9 13:53:47 2013
@@ -163,7 +163,8 @@ public final class LocalMapJoinProcFacto
if (parent instanceof TableScanOperator) {
tbl = ((TableScanOperator) parent).getTableDesc();
} else {
- throw new SemanticException();
+ throw new SemanticException("Expected parent operator of type TableScanOperator." +
+ "Found " + parent.getClass().getName() + " instead.");
}
} else {
// get parent schema
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java Mon Sep 9 13:53:47 2013
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hive.ql.optimizer.physical;
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
@@ -236,7 +234,6 @@ public class SortMergeJoinTaskDispatcher
public Task<? extends Serializable> processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask, Context context)
throws SemanticException {
-
// whether it contains a sort merge join operator
MapredWork currWork = currTask.getWork();
SMBMapJoinOperator originalSMBJoinOp = getSMBMapJoinOp(currWork);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java Mon Sep 9 13:53:47 2013
@@ -51,22 +51,22 @@ public class QBJoinTree implements Seria
// keeps track of the right-hand-side table name of the left-semi-join, and
// its list of join keys
- private final HashMap<String, ArrayList<ASTNode>> rhsSemijoin;
+ private transient final HashMap<String, ArrayList<ASTNode>> rhsSemijoin;
// join conditions
- private ArrayList<ArrayList<ASTNode>> expressions;
+ private transient ArrayList<ArrayList<ASTNode>> expressions;
// key index to nullsafe join flag
private ArrayList<Boolean> nullsafes;
// filters
- private ArrayList<ArrayList<ASTNode>> filters;
+ private transient ArrayList<ArrayList<ASTNode>> filters;
// outerjoin-pos = other-pos:filter-len, other-pos:filter-len, ...
private int[][] filterMap;
// filters for pushing
- private ArrayList<ArrayList<ASTNode>> filtersForPushing;
+ private transient ArrayList<ArrayList<ASTNode>> filtersForPushing;
// user asked for map-side join
private boolean mapSideJoin;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java Mon Sep 9 13:53:47 2013
@@ -249,7 +249,7 @@ public class ExprNodeGenericFuncDesc ext
if (genericUDF instanceof GenericUDFBridge) {
GenericUDFBridge bridge = (GenericUDFBridge) genericUDF;
GenericUDFBridge bridge2 = (GenericUDFBridge) dest.getGenericUDF();
- if (!bridge.getUdfClass().equals(bridge2.getUdfClass())
+ if (!bridge.getUdfClassName().equals(bridge2.getUdfClassName())
|| !bridge.getUdfName().equals(bridge2.getUdfName())
|| bridge.isOperator() != bridge2.isOperator()) {
return false;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java Mon Sep 9 13:53:47 2013
@@ -52,13 +52,13 @@ import org.apache.hadoop.mapred.JobConf;
* distributed on the cluster. The ExecMapper will ultimately deserialize this
* class on the data nodes and setup it's operator pipeline accordingly.
*
- * This class is also used in the explain command any property with the
+ * This class is also used in the explain command any property with the
* appropriate annotation will be displayed in the explain output.
*/
@SuppressWarnings({"serial", "deprecation"})
public class MapWork extends BaseWork {
- private static transient final Log LOG = LogFactory.getLog(MapWork.class);
+ private static final Log LOG = LogFactory.getLog(MapWork.class);
private boolean hadoopSupportsSplittable;
@@ -102,15 +102,15 @@ public class MapWork extends BaseWork {
public static final int SAMPLING_ON_START = 2; // sampling on task running
// the following two are used for join processing
- private QBJoinTree joinTree;
- private LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap;
+ private transient QBJoinTree joinTree;
+ private transient LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap;
private boolean mapperCannotSpanPartns;
// used to indicate the input is sorted, and so a BinarySearchRecordReader shoudl be used
private boolean inputFormatSorted = false;
- private transient boolean useBucketizedHiveInputFormat;
+ private boolean useBucketizedHiveInputFormat;
public MapWork() {
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java Mon Sep 9 13:53:47 2013
@@ -18,12 +18,10 @@
package org.apache.hadoop.hive.ql.plan;
-import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.mapred.JobConf;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java Mon Sep 9 13:53:47 2013
@@ -52,7 +52,7 @@ public class PTFDesc extends AbstractOpe
*/
boolean isMapSide = false;
- HiveConf cfg;
+ transient HiveConf cfg;
static{
PTFUtils.makeTransient(PTFDesc.class, "llInfo");
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java Mon Sep 9 13:53:47 2013
@@ -47,7 +47,7 @@ public class PartitionDesc implements Se
private java.util.Properties properties;
private String serdeClassName;
- private transient String baseFileName;
+ private String baseFileName;
public void setBaseFileName(String baseFileName) {
this.baseFileName = baseFileName;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java Mon Sep 9 13:53:47 2013
@@ -160,15 +160,15 @@ public class GenericUDAFnGrams implement
*/
public static class GenericUDAFnGramEvaluator extends GenericUDAFEvaluator {
// For PARTIAL1 and COMPLETE: ObjectInspectors for original data
- private StandardListObjectInspector outerInputOI;
- private StandardListObjectInspector innerInputOI;
- private PrimitiveObjectInspector inputOI;
- private PrimitiveObjectInspector nOI;
- private PrimitiveObjectInspector kOI;
- private PrimitiveObjectInspector pOI;
+ private transient StandardListObjectInspector outerInputOI;
+ private transient StandardListObjectInspector innerInputOI;
+ private transient PrimitiveObjectInspector inputOI;
+ private transient PrimitiveObjectInspector nOI;
+ private transient PrimitiveObjectInspector kOI;
+ private transient PrimitiveObjectInspector pOI;
// For PARTIAL2 and FINAL: ObjectInspectors for partial aggregations
- private StandardListObjectInspector loi;
+ private transient StandardListObjectInspector loi;
@Override
public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java Mon Sep 9 13:53:47 2013
@@ -38,8 +38,8 @@ import org.apache.hadoop.hive.serde2.obj
@Description(name = "array",
value = "_FUNC_(n0, n1...) - Creates an array with the given elements ")
public class GenericUDFArray extends GenericUDF {
- private Converter[] converters;
- private ArrayList<Object> ret = new ArrayList<Object>();
+ private transient Converter[] converters;
+ private transient ArrayList<Object> ret = new ArrayList<Object>();
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java Mon Sep 9 13:53:47 2013
@@ -22,6 +22,7 @@ import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.ArrayList;
+import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -32,7 +33,6 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.util.ReflectionUtils;
/**
* GenericUDFBridge encapsulates UDF to provide the same interface as
@@ -55,29 +55,23 @@ public class GenericUDFBridge extends Ge
boolean isOperator;
/**
- * The underlying UDF class.
- */
- Class<? extends UDF> udfClass;
-
- /**
* The underlying UDF class Name.
*/
String udfClassName;
/**
- * Greate a new GenericUDFBridge object.
+ * Create a new GenericUDFBridge object.
*
* @param udfName
* The name of the corresponding udf.
* @param isOperator
- * @param udfClass
+ * @param udfClassName java class name of UDF
*/
public GenericUDFBridge(String udfName, boolean isOperator,
- Class<? extends UDF> udfClass) {
+ String udfClassName) {
this.udfName = udfName;
this.isOperator = isOperator;
- this.udfClass = udfClass;
- this.udfClassName = udfClass != null ? udfClass.getName() : null;
+ this.udfClassName = udfClassName;
}
// For Java serialization only
@@ -108,12 +102,12 @@ public class GenericUDFBridge extends Ge
this.isOperator = isOperator;
}
- public void setUdfClass(Class<? extends UDF> udfClass) {
- this.udfClass = udfClass;
- }
-
public Class<? extends UDF> getUdfClass() {
- return udfClass;
+ try {
+ return (Class<? extends UDF>) Class.forName(udfClassName, true, JavaUtils.getClassLoader());
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException(e);
+ }
}
/**
@@ -137,12 +131,13 @@ public class GenericUDFBridge extends Ge
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
- if (udfClass == null) {
+ try {
+ udf = (UDF) Class.forName(udfClassName, true, JavaUtils.getClassLoader()).newInstance();
+ } catch (Exception e) {
throw new UDFArgumentException(
"The UDF implementation class '" + udfClassName
+ "' is not present in the class path");
}
- udf = (UDF) ReflectionUtils.newInstance(udfClass, null);
// Resolve for the method based on argument types
ArrayList<TypeInfo> argumentTypeInfos = new ArrayList<TypeInfo>(
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java Mon Sep 9 13:53:47 2013
@@ -54,10 +54,10 @@ import org.apache.hadoop.io.Text;
+ " '12,332.1235'")
public class GenericUDFFormatNumber extends GenericUDF {
private transient ObjectInspector[] argumentOIs;
- private final Text resultText = new Text();
- private final StringBuilder pattern = new StringBuilder("");
- private final DecimalFormat numberFormat = new DecimalFormat("");
- private int lastDValue = -1;
+ private transient final Text resultText = new Text();
+ private transient final StringBuilder pattern = new StringBuilder("");
+ private transient final DecimalFormat numberFormat = new DecimalFormat("");
+ private transient int lastDValue = -1;
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java Mon Sep 9 13:53:47 2013
@@ -36,11 +36,11 @@ import org.apache.hadoop.hive.serde2.obj
*/
@Description(name = "index", value = "_FUNC_(a, n) - Returns the n-th element of a ")
public class GenericUDFIndex extends GenericUDF {
- private MapObjectInspector mapOI;
+ private transient MapObjectInspector mapOI;
private boolean mapKeyPreferWritable;
- private ListObjectInspector listOI;
- private PrimitiveObjectInspector indexOI;
- private ObjectInspector returnOI;
+ private transient ListObjectInspector listOI;
+ private transient PrimitiveObjectInspector indexOI;
+ private transient ObjectInspector returnOI;
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java Mon Sep 9 13:53:47 2013
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.serde2.obj
value = "_FUNC_(name1, val1, name2, val2, ...) - Creates a struct with the given " +
"field names and values")
public class GenericUDFNamedStruct extends GenericUDF {
- Object[] ret;
+ private transient Object[] ret;
@Override
public ObjectInspector initialize(ObjectInspector[] arguments)
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStruct.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStruct.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStruct.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStruct.java Mon Sep 9 13:53:47 2013
@@ -31,7 +31,7 @@ import org.apache.hadoop.hive.serde2.obj
@Description(name = "struct",
value = "_FUNC_(col1, col2, col3, ...) - Creates a struct with the given field values")
public class GenericUDFStruct extends GenericUDF {
- Object[] ret;
+ private transient Object[] ret;
@Override
public ObjectInspector initialize(ObjectInspector[] arguments)
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java Mon Sep 9 13:53:47 2013
@@ -47,7 +47,7 @@ public class GenericUDFToUnixTimeStamp e
private transient TimestampObjectInspector inputTimestampOI;
private transient StringObjectInspector patternOI;
- private String lasPattern = "yyyy-MM-dd HH:mm:ss";
+ private transient String lasPattern = "yyyy-MM-dd HH:mm:ss";
private transient final SimpleDateFormat formatter = new SimpleDateFormat(lasPattern);
@Override
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java Mon Sep 9 13:53:47 2013
@@ -77,8 +77,8 @@ public class GenericUDTFExplode extends
fieldOIs);
}
- private final Object[] forwardListObj = new Object[1];
- private final Object[] forwardMapObj = new Object[2];
+ private transient final Object[] forwardListObj = new Object[1];
+ private transient final Object[] forwardMapObj = new Object[2];
@Override
public void process(Object[] o) throws HiveException {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java Mon Sep 9 13:53:47 2013
@@ -63,9 +63,10 @@ public class GenericUDTFJSONTuple extend
int numCols; // number of output columns
String[] paths; // array of path expressions, each of which corresponds to a column
- Text[] retCols; // array of returned column values
- Text[] cols; // object pool of non-null Text, avoid creating objects all the time
- Object[] nullCols; // array of null column values
+ private transient Text[] retCols; // array of returned column values
+ //object pool of non-null Text, avoid creating objects all the time
+ private transient Text[] cols;
+ private transient Object[] nullCols; // array of null column values
private transient ObjectInspector[] inputOIs; // input ObjectInspectors
boolean pathParsed = false;
boolean seenErrors = false;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java Mon Sep 9 13:53:47 2013
@@ -18,11 +18,11 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.net.URL;
import java.net.MalformedURLException;
+import java.net.URL;
import java.util.ArrayList;
-import java.util.regex.Pattern;
import java.util.regex.Matcher;
+import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -65,13 +65,13 @@ public class GenericUDTFParseUrlTuple ex
PARTNAME[] partnames; // mapping from pathnames to enum PARTNAME
Text[] retCols; // array of returned column values
Text[] cols; // object pool of non-null Text, avoid creating objects all the time
- Object[] nullCols; // array of null column values
- ObjectInspector[] inputOIs; // input ObjectInspectors
+ private transient Object[] nullCols; // array of null column values
+ private transient ObjectInspector[] inputOIs; // input ObjectInspectors
boolean pathParsed = false;
boolean seenErrors = false;
- URL url = null;
- Pattern p = null;
- String lastKey = null;
+ private transient URL url = null;
+ private transient Pattern p = null;
+ private transient String lastKey = null;
@Override
public void close() throws HiveException {
@@ -122,7 +122,7 @@ public class GenericUDTFParseUrlTuple ex
// all returned type will be Text
fieldOIs.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
-
+
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
@@ -137,7 +137,7 @@ public class GenericUDTFParseUrlTuple ex
if (!pathParsed) {
for (int i = 0;i < numCols; ++i) {
paths[i] = ((StringObjectInspector) inputOIs[i+1]).getPrimitiveJavaObject(o[i+1]);
-
+
if (paths[i] == null) {
partnames[i] = PARTNAME.NULLNAME;
} else if (paths[i].equals("HOST")) {
@@ -158,11 +158,11 @@ public class GenericUDTFParseUrlTuple ex
partnames[i] = PARTNAME.USERINFO;
} else if (paths[i].startsWith("QUERY:")) {
partnames[i] = PARTNAME.QUERY_WITH_KEY;
- paths[i] = paths[i].substring(6); // update paths[i], e.g., from "QUERY:id" to "id"
+ paths[i] = paths[i].substring(6); // update paths[i], e.g., from "QUERY:id" to "id"
} else {
partnames[i] = PARTNAME.NULLNAME;
- }
- }
+ }
+ }
pathParsed = true;
}
@@ -171,9 +171,9 @@ public class GenericUDTFParseUrlTuple ex
forward(nullCols);
return;
}
-
+
try {
- String ret = null;
+ String ret = null;
url = new URL(urlStr);
for (int i = 0; i < numCols; ++i) {
ret = evaluate(url, i);
@@ -188,7 +188,7 @@ public class GenericUDTFParseUrlTuple ex
}
forward(retCols);
- return;
+ return;
} catch (MalformedURLException e) {
// parsing error, invalid url string
if (!seenErrors) {
@@ -204,10 +204,11 @@ public class GenericUDTFParseUrlTuple ex
public String toString() {
return "parse_url_tuple";
}
-
+
private String evaluate(URL url, int index) {
- if (url == null || index < 0 || index >= partnames.length)
+ if (url == null || index < 0 || index >= partnames.length) {
return null;
+ }
switch (partnames[index]) {
case HOST : return url.getHost();
@@ -239,5 +240,5 @@ public class GenericUDTFParseUrlTuple ex
return m.group(2);
}
return null;
- }
+ }
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java Mon Sep 9 13:53:47 2013
@@ -48,7 +48,7 @@ public class GenericUDTFStack extends Ge
}
private transient List<ObjectInspector> argOIs = new ArrayList<ObjectInspector>();
- Object[] forwardObj = null;
+ private transient Object[] forwardObj = null;
private transient ArrayList<ReturnObjectInspectorResolver> returnOIResolvers =
new ArrayList<ReturnObjectInspectorResolver>();
IntWritable numRows = null;
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Mon Sep 9 13:53:47 2013
@@ -198,7 +198,7 @@ public class QTestUtil {
public String getLogDirectory() {
return logDir;
}
-
+
private String getHadoopMainVersion(String input) {
if (input == null) {
return null;
@@ -874,17 +874,21 @@ public class QTestUtil {
public int checkPlan(String tname, List<Task<? extends Serializable>> tasks) throws Exception {
- if (tasks != null) {
- File planDir = new File(outDir, "plan");
- String planFile = outPath(planDir.toString(), tname + ".xml");
+ if (tasks == null) {
+ throw new Exception("Plan is null");
+ }
+ File planDir = new File(outDir, "plan");
+ String planFile = outPath(planDir.toString(), tname + ".xml");
- File outf = null;
- outf = new File(logDir);
- outf = new File(outf, tname.concat(".xml"));
+ File outf = null;
+ outf = new File(logDir);
+ outf = new File(outf, tname.concat(".xml"));
- FileOutputStream ofs = new FileOutputStream(outf);
+ FileOutputStream ofs = new FileOutputStream(outf);
+ try {
+ conf.set(HiveConf.ConfVars.PLAN_SERIALIZATION.varname, "javaXML");
for (Task<? extends Serializable> plan : tasks) {
- Utilities.serializeObject(plan, ofs);
+ Utilities.serializePlan(plan, ofs, conf);
}
String[] patterns = new String[] {
@@ -895,7 +899,7 @@ public class QTestUtil {
"<string>[0-9]{10}</string>",
"<string>/.*/warehouse/.*</string>"
};
-
+
fixXml4JDK7(outf.getPath());
maskPatterns(patterns, outf.getPath());
@@ -904,12 +908,10 @@ public class QTestUtil {
if (exitVal != 0 && overWrite) {
exitVal = overwriteResults(outf.getPath(), planFile);
}
-
return exitVal;
- } else {
- throw new Exception("Plan is null");
+ } finally {
+ conf.set(HiveConf.ConfVars.PLAN_SERIALIZATION.varname, "kryo");
}
-
}
/**
@@ -950,10 +952,10 @@ public class QTestUtil {
/**
* Fix the XML generated by JDK7 which is slightly different from what's generated by JDK6,
* causing 40+ test failures. There are mainly two problems:
- *
+ *
* 1. object element's properties, id and class, are in reverse order, i.e.
* <object class="org.apache.hadoop.hive.ql.exec.MapRedTask" id="MapRedTask0">
- * which needs to be fixed to
+ * which needs to be fixed to
* <object id="MapRedTask0" class="org.apache.hadoop.hive.ql.exec.MapRedTask">
* 2. JDK introduces Enum as class, i.e.
* <object id="GenericUDAFEvaluator$Mode0" class="java.lang.Enum">
@@ -961,11 +963,11 @@ public class QTestUtil {
* which needs to be fixed to
* <object id="GenericUDAFEvaluator$Mode0" class="org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator$Mode"
* method="valueOf">
- *
+ *
* Though not elegant, this allows these test cases to pass until we have a better serialization mechanism.
- *
+ *
* Did I mention this is test code?
- *
+ *
* @param fname the name of the file to fix
* @throws Exception in case of IO error
*/
@@ -974,7 +976,7 @@ public class QTestUtil {
if (!version.startsWith("1.7")) {
return;
}
-
+
BufferedReader in = new BufferedReader(new FileReader(fname));
BufferedWriter out = new BufferedWriter(new FileWriter(fname + ".orig"));
String line = null;
@@ -987,7 +989,7 @@ public class QTestUtil {
in = new BufferedReader(new FileReader(fname + ".orig"));
out = new BufferedWriter(new FileWriter(fname));
-
+
while (null != (line = in.readLine())) {
if (line.indexOf("<object ") == -1 || line.indexOf("class=") == -1) {
out.write(line);
@@ -997,8 +999,9 @@ public class QTestUtil {
sb.append( prefix );
String postfix = line.substring(line.lastIndexOf('"') + 1);
String id = getPropertyValue(line, "id");
- if (id != null)
+ if (id != null) {
sb.append(" id=" + id);
+ }
String cls = getPropertyValue(line, "class");
assert(cls != null);
if (cls.equals("\"java.lang.Enum\"")) {
@@ -1011,27 +1014,28 @@ public class QTestUtil {
sb.append(postfix);
out.write(sb.toString());
- }
-
+ }
+
out.write('\n');
}
in.close();
out.close();
}
-
+
/**
* Get the value of a property in line. The returned value has original quotes
*/
private static String getPropertyValue(String line, String name) {
int start = line.indexOf( name + "=" );
- if (start == -1)
+ if (start == -1) {
return null;
+ }
start += name.length() + 1;
int end = line.indexOf("\"", start + 1);
return line.substring( start, end + 1 );
}
-
+
/**
* Get the value of the element in input. (Note: the returned value has no quotes.)
*/
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Mon Sep 9 13:53:47 2013
@@ -79,14 +79,15 @@ public class TestPlan extends TestCase {
mrwork.getMapWork().setPathToPartitionInfo(pt);
mrwork.getMapWork().setAliasToWork(ao);
+ JobConf job = new JobConf(TestPlan.class);
// serialize the configuration once ..
ByteArrayOutputStream baos = new ByteArrayOutputStream();
- Utilities.serializeObject(mrwork, baos);
+ Utilities.serializePlan(mrwork, baos, job);
baos.close();
String v1 = baos.toString();
// store into configuration
- JobConf job = new JobConf(TestPlan.class);
+
job.set("fs.default.name", "file:///");
Utilities.setMapRedWork(job, mrwork, System.getProperty("java.io.tmpdir") + File.separator +
System.getProperty("user.name") + File.separator + "hive");
@@ -99,7 +100,7 @@ public class TestPlan extends TestCase {
// serialize again
baos.reset();
- Utilities.serializeObject(mrwork2, baos);
+ Utilities.serializePlan(mrwork2, baos, job);
baos.close();
// verify that the two are equal
Modified: hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml?rev=1521110&r1=1521109&r2=1521110&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml (original)
+++ hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml Mon Sep 9 13:53:47 2013
@@ -201,6 +201,9 @@
</void>
</object>
</void>
+ <void property="typeName">
+ <string>string</string>
+ </void>
</object>
</void>
<void method="add">
@@ -214,6 +217,9 @@
<void property="type">
<object idref="PrimitiveTypeInfo0"/>
</void>
+ <void property="typeName">
+ <string>string</string>
+ </void>
</object>
</void>
</object>
@@ -960,6 +966,9 @@
<void property="type">
<object idref="PrimitiveTypeInfo1"/>
</void>
+ <void property="typeName">
+ <string>int</string>
+ </void>
</object>
</void>
<void method="add">
@@ -970,6 +979,9 @@
<void property="type">
<object idref="PrimitiveTypeInfo0"/>
</void>
+ <void property="typeName">
+ <string>string</string>
+ </void>
</object>
</void>
</object>
@@ -1090,6 +1102,9 @@
<void property="type">
<object idref="ListTypeInfo1"/>
</void>
+ <void property="typeName">
+ <string>array<int></string>
+ </void>
</object>
</void>
<void method="add">
@@ -1103,6 +1118,9 @@
<void property="type">
<object idref="ListTypeInfo0"/>
</void>
+ <void property="typeName">
+ <string>array<struct<myint:int,mystring:string,underscore_int:int>></string>
+ </void>
</object>
</void>
</object>
@@ -1177,6 +1195,9 @@
<void property="type">
<object idref="PrimitiveTypeInfo1"/>
</void>
+ <void property="typeName">
+ <string>int</string>
+ </void>
</object>
</void>
<void method="add">
@@ -1190,6 +1211,9 @@
<void property="type">
<object idref="PrimitiveTypeInfo0"/>
</void>
+ <void property="typeName">
+ <string>string</string>
+ </void>
</object>
</void>
<void method="add">
@@ -1210,6 +1234,9 @@
</void>
</object>
</void>
+ <void property="typeName">
+ <string>array<string></string>
+ </void>
</object>
</void>
<void method="add">
@@ -1233,6 +1260,9 @@
</void>
</object>
</void>
+ <void property="typeName">
+ <string>map<string,string></string>
+ </void>
</object>
</void>
<void method="add">
@@ -1253,6 +1283,9 @@
</void>
</object>
</void>
+ <void property="typeName">
+ <string>bigint</string>
+ </void>
</object>
</void>
<void method="add">
@@ -1269,6 +1302,9 @@
<void property="type">
<object idref="PrimitiveTypeInfo0"/>
</void>
+ <void property="typeName">
+ <string>string</string>
+ </void>
</object>
</void>
</object>