You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/10/21 06:43:06 UTC
svn commit: r1534023 [1/3] - in /hive/trunk:
hbase-handler/src/java/org/apache/hadoop/hive/hbase/
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/exec/vector/
ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/ ql/s...
Author: hashutosh
Date: Mon Oct 21 04:43:04 2013
New Revision: 1534023
URL: http://svn.apache.org/r1534023
Log:
HIVE-5411 : Migrate expression serialization to Kryo (Ashutosh Chauhan via Thejas Nair)
Modified:
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgument.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartExprEvalUtils.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java
hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby5.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby6.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input2.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input3.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input8.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input_part1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml
hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join5.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join6.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join7.q.xml
hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample3.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample5.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml
hive/trunk/ql/src/test/results/compiler/plan/sample7.q.xml
hive/trunk/ql/src/test/results/compiler/plan/subq.q.xml
hive/trunk/ql/src/test/results/compiler/plan/udf1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/udf4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/udf6.q.xml
hive/trunk/ql/src/test/results/compiler/plan/udf_case.q.xml
hive/trunk/ql/src/test/results/compiler/plan/udf_when.q.xml
hive/trunk/ql/src/test/results/compiler/plan/union.q.xml
Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java Mon Oct 21 04:43:04 2013
@@ -50,6 +50,7 @@ import org.apache.hadoop.hive.ql.index.I
import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDe;
@@ -410,8 +411,8 @@ public class HBaseStorageHandler extends
hbaseSerde.getStorageFormatOfCol(keyColPos).get(0));
List<IndexSearchCondition> searchConditions =
new ArrayList<IndexSearchCondition>();
- ExprNodeDesc residualPredicate =
- analyzer.analyzePredicate(predicate, searchConditions);
+ ExprNodeGenericFuncDesc residualPredicate =
+ (ExprNodeGenericFuncDesc)analyzer.analyzePredicate(predicate, searchConditions);
int scSize = searchConditions.size();
if (scSize < 1 || 2 < scSize) {
// Either there was nothing which could be pushed down (size = 0),
Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java Mon Oct 21 04:43:04 2013
@@ -43,6 +43,7 @@ import org.apache.hadoop.hive.ql.index.I
import org.apache.hadoop.hive.ql.index.IndexSearchCondition;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.ByteStream;
@@ -253,8 +254,8 @@ public class HiveHBaseTableInputFormat e
if (filterExprSerialized == null) {
return scan;
}
- ExprNodeDesc filterExpr =
- Utilities.deserializeExpression(filterExprSerialized, jobConf);
+ ExprNodeGenericFuncDesc filterExpr =
+ Utilities.deserializeExpression(filterExprSerialized);
String colName = jobConf.get(serdeConstants.LIST_COLUMNS).split(",")[iKey];
String colType = jobConf.get(serdeConstants.LIST_COLUMN_TYPES).split(",")[iKey];
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java Mon Oct 21 04:43:04 2013
@@ -83,10 +83,10 @@ public class ExprNodeGenericFuncEvaluato
public ExprNodeGenericFuncEvaluator(ExprNodeGenericFuncDesc expr) throws HiveException {
super(expr);
- children = new ExprNodeEvaluator[expr.getChildExprs().size()];
+ children = new ExprNodeEvaluator[expr.getChildren().size()];
isEager = false;
for (int i = 0; i < children.length; i++) {
- ExprNodeDesc child = expr.getChildExprs().get(i);
+ ExprNodeDesc child = expr.getChildren().get(i);
ExprNodeEvaluator nodeEvaluator = ExprNodeEvaluatorFactory.get(child);
children[i] = nodeEvaluator;
// If we have eager evaluators anywhere below us, then we are eager too.
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Mon Oct 21 04:43:04 2013
@@ -80,6 +80,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.antlr.runtime.CommonToken;
+import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.WordUtils;
import org.apache.commons.logging.Log;
@@ -99,7 +100,6 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
@@ -129,7 +129,7 @@ import org.apache.hadoop.hive.ql.metadat
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.BaseWork;
import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
import org.apache.hadoop.hive.ql.plan.MapWork;
@@ -594,10 +594,10 @@ public final class Utilities {
* @param expr Expression.
* @return Bytes.
*/
- public static byte[] serializeExpressionToKryo(ExprNodeDesc expr) {
+ public static byte[] serializeExpressionToKryo(ExprNodeGenericFuncDesc expr) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Output output = new Output(baos);
- runtimeSerializationKryo.get().writeClassAndObject(output, expr);
+ runtimeSerializationKryo.get().writeObject(output, expr);
output.close();
return baos.toByteArray();
}
@@ -607,47 +607,30 @@ public final class Utilities {
* @param bytes Bytes containing the expression.
* @return Expression; null if deserialization succeeded, but the result type is incorrect.
*/
- public static ExprNodeDesc deserializeExpressionFromKryo(byte[] bytes) {
+ public static ExprNodeGenericFuncDesc deserializeExpressionFromKryo(byte[] bytes) {
Input inp = new Input(new ByteArrayInputStream(bytes));
- Object o = runtimeSerializationKryo.get().readClassAndObject(inp);
+ ExprNodeGenericFuncDesc func = runtimeSerializationKryo.get().
+ readObject(inp,ExprNodeGenericFuncDesc.class);
inp.close();
- return (o instanceof ExprNodeDesc) ? (ExprNodeDesc)o : null;
+ return func;
}
- public static String serializeExpression(ExprNodeDesc expr) {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- XMLEncoder encoder = new XMLEncoder(baos);
- encoder.setPersistenceDelegate(java.sql.Date.class, new DatePersistenceDelegate());
- encoder.setPersistenceDelegate(Timestamp.class, new TimestampPersistenceDelegate());
+ public static String serializeExpression(ExprNodeGenericFuncDesc expr) {
try {
- encoder.writeObject(expr);
- } finally {
- encoder.close();
- }
- try {
- return baos.toString("UTF-8");
+ return new String(Base64.encodeBase64(serializeExpressionToKryo(expr)), "UTF-8");
} catch (UnsupportedEncodingException ex) {
throw new RuntimeException("UTF-8 support required", ex);
}
}
- public static ExprNodeDesc deserializeExpression(String s, Configuration conf) {
+ public static ExprNodeGenericFuncDesc deserializeExpression(String s) {
byte[] bytes;
try {
- bytes = s.getBytes("UTF-8");
+ bytes = Base64.decodeBase64(s.getBytes("UTF-8"));
} catch (UnsupportedEncodingException ex) {
throw new RuntimeException("UTF-8 support required", ex);
}
-
- ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
-
- XMLDecoder decoder = new XMLDecoder(bais, null, null);
- try {
- ExprNodeDesc expr = (ExprNodeDesc) decoder.readObject();
- return expr;
- } finally {
- decoder.close();
- }
+ return deserializeExpressionFromKryo(bytes);
}
public static class CollectionPersistenceDelegate extends DefaultPersistenceDelegate {
@@ -665,6 +648,26 @@ public final class Utilities {
}
}
+ /**
+ * Kryo serializer for timestamp.
+ */
+ private static class TimestampSerializer extends
+ com.esotericsoftware.kryo.Serializer<Timestamp> {
+
+ @Override
+ public Timestamp read(Kryo kryo, Input input, Class<Timestamp> clazz) {
+ Timestamp ts = new Timestamp(input.readLong());
+ ts.setNanos(input.readInt());
+ return ts;
+ }
+
+ @Override
+ public void write(Kryo kryo, Output output, Timestamp ts) {
+ output.writeLong(ts.getTime());
+ output.writeInt(ts.getNanos());
+ }
+ }
+
/** Custom Kryo serializer for sql date, otherwise Kryo gets confused between
java.sql.Date and java.util.Date while deserializing
*/
@@ -835,6 +838,7 @@ public final class Utilities {
Kryo kryo = new Kryo();
kryo.setClassLoader(Thread.currentThread().getContextClassLoader());
kryo.register(java.sql.Date.class, new SqlDateSerializer());
+ kryo.register(java.sql.Timestamp.class, new TimestampSerializer());
removeField(kryo, Operator.class, "colExprMap");
removeField(kryo, ColumnInfo.class, "objectInspector");
removeField(kryo, MapWork.class, "opParseCtxMap");
@@ -855,6 +859,7 @@ public final class Utilities {
kryo.setClassLoader(Thread.currentThread().getContextClassLoader());
kryo.register(CommonToken.class, new CommonTokenSerializer());
kryo.register(java.sql.Date.class, new SqlDateSerializer());
+ kryo.register(java.sql.Timestamp.class, new TimestampSerializer());
return kryo;
};
};
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java Mon Oct 21 04:43:04 2013
@@ -321,7 +321,7 @@ public class VectorizationContext {
ve = getCustomUDFExpression(expr);
} else {
ve = getVectorExpression(expr.getGenericUDF(),
- expr.getChildExprs());
+ expr.getChildren());
}
} else if (exprDesc instanceof ExprNodeConstantDesc) {
ve = getConstantVectorExpression((ExprNodeConstantDesc) exprDesc);
@@ -376,7 +376,7 @@ public class VectorizationContext {
// Return the type string of the first argument (argument 0).
public static String arg0Type(ExprNodeGenericFuncDesc expr) {
- String type = expr.getChildExprs().get(0).getTypeString();
+ String type = expr.getChildren().get(0).getTypeString();
return type;
}
@@ -1109,10 +1109,10 @@ public class VectorizationContext {
throws HiveException {
//GenericUDFBridge udfBridge = (GenericUDFBridge) expr.getGenericUDF();
- List<ExprNodeDesc> childExprList = expr.getChildExprs();
+ List<ExprNodeDesc> childExprList = expr.getChildren();
// argument descriptors
- VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[expr.getChildExprs().size()];
+ VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[expr.getChildren().size()];
for (int i = 0; i < argDescs.length; i++) {
argDescs[i] = new VectorUDFArgDesc();
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java Mon Oct 21 04:43:04 2013
@@ -92,9 +92,9 @@ public class VectorUDFAdaptor extends Ve
// Initialize transient fields. To be called after deserialization of other fields.
public void init() throws HiveException, UDFArgumentException {
genericUDF = expr.getGenericUDF();
- deferredChildren = new GenericUDF.DeferredObject[expr.getChildExprs().size()];
- childrenOIs = new ObjectInspector[expr.getChildExprs().size()];
- writers = VectorExpressionWriterFactory.getExpressionWriters(expr.getChildExprs());
+ deferredChildren = new GenericUDF.DeferredObject[expr.getChildren().size()];
+ childrenOIs = new ObjectInspector[expr.getChildren().size()];
+ writers = VectorExpressionWriterFactory.getExpressionWriters(expr.getChildren());
for (int i = 0; i < childrenOIs.length; i++) {
childrenOIs[i] = writers[i].getObjectInspector();
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java Mon Oct 21 04:43:04 2013
@@ -26,6 +26,8 @@ import java.util.Map;
import java.util.Set;
import java.util.Stack;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -44,12 +46,10 @@ import org.apache.hadoop.hive.ql.plan.Ex
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.ReflectionUtils;
/**
@@ -62,7 +62,7 @@ import org.apache.hadoop.util.Reflection
public class IndexPredicateAnalyzer
{
private static final Log LOG = LogFactory.getLog(IndexPredicateAnalyzer.class.getName());
- private Set<String> udfNames;
+ private final Set<String> udfNames;
private Set<String> allowedColumnNames;
@@ -135,7 +135,7 @@ public class IndexPredicateAnalyzer
}
}
- return analyzeExpr((ExprNodeDesc) nd, searchConditions, nodeOutputs);
+ return analyzeExpr((ExprNodeGenericFuncDesc) nd, searchConditions, nodeOutputs);
}
};
@@ -155,13 +155,11 @@ public class IndexPredicateAnalyzer
}
private ExprNodeDesc analyzeExpr(
- ExprNodeDesc expr,
+ ExprNodeGenericFuncDesc expr,
List<IndexSearchCondition> searchConditions,
Object... nodeOutputs) {
- if (!(expr instanceof ExprNodeGenericFuncDesc)) {
- return expr;
- }
+ expr = (ExprNodeGenericFuncDesc) expr;
if (FunctionRegistry.isOpAnd(expr)) {
assert(nodeOutputs.length == 2);
ExprNodeDesc residual1 = (ExprNodeDesc) nodeOutputs[0];
@@ -182,12 +180,11 @@ public class IndexPredicateAnalyzer
}
String udfName;
- ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) expr;
- if (funcDesc.getGenericUDF() instanceof GenericUDFBridge) {
- GenericUDFBridge func = (GenericUDFBridge) funcDesc.getGenericUDF();
+ if (expr.getGenericUDF() instanceof GenericUDFBridge) {
+ GenericUDFBridge func = (GenericUDFBridge) expr.getGenericUDF();
udfName = func.getUdfName();
} else {
- udfName = funcDesc.getGenericUDF().getClass().getName();
+ udfName = expr.getGenericUDF().getClass().getName();
}
if (!udfNames.contains(udfName)) {
return expr;
@@ -255,7 +252,7 @@ public class IndexPredicateAnalyzer
}
}
- for (ExprNodeDesc child : func.getChildExprs()) {
+ for (ExprNodeDesc child : func.getChildren()) {
if (child instanceof ExprNodeConstantDesc) {
continue;
} else if (child instanceof ExprNodeGenericFuncDesc) {
@@ -283,12 +280,12 @@ public class IndexPredicateAnalyzer
*
* @param searchConditions (typically produced by analyzePredicate)
*
- * @return ExprNodeDesc form of search conditions
+ * @return ExprNodeGenericFuncDesc form of search conditions
*/
- public ExprNodeDesc translateSearchConditions(
+ public ExprNodeGenericFuncDesc translateSearchConditions(
List<IndexSearchCondition> searchConditions) {
- ExprNodeDesc expr = null;
+ ExprNodeGenericFuncDesc expr = null;
for (IndexSearchCondition searchCondition : searchConditions) {
if (expr == null) {
expr = searchCondition.getComparisonExpr();
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java Mon Oct 21 04:43:04 2013
@@ -19,7 +19,7 @@ package org.apache.hadoop.hive.ql.index;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
/**
* IndexSearchCondition represents an individual search condition
@@ -31,7 +31,7 @@ public class IndexSearchCondition
private ExprNodeColumnDesc columnDesc;
private String comparisonOp;
private ExprNodeConstantDesc constantDesc;
- private ExprNodeDesc comparisonExpr;
+ private ExprNodeGenericFuncDesc comparisonExpr;
/**
* Constructs a search condition, which takes the form
@@ -50,7 +50,7 @@ public class IndexSearchCondition
ExprNodeColumnDesc columnDesc,
String comparisonOp,
ExprNodeConstantDesc constantDesc,
- ExprNodeDesc comparisonExpr) {
+ ExprNodeGenericFuncDesc comparisonExpr) {
this.columnDesc = columnDesc;
this.comparisonOp = comparisonOp;
@@ -82,11 +82,11 @@ public class IndexSearchCondition
return constantDesc;
}
- public void setComparisonExpr(ExprNodeDesc comparisonExpr) {
+ public void setComparisonExpr(ExprNodeGenericFuncDesc comparisonExpr) {
this.comparisonExpr = comparisonExpr;
}
- public ExprNodeDesc getComparisonExpr() {
+ public ExprNodeGenericFuncDesc getComparisonExpr() {
return comparisonExpr;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java Mon Oct 21 04:43:04 2013
@@ -316,7 +316,8 @@ public class CompactIndexHandler extends
IndexPredicateAnalyzer analyzer = getIndexPredicateAnalyzer(index, queryPartitions);
List<IndexSearchCondition> searchConditions = new ArrayList<IndexSearchCondition>();
// split predicate into pushed (what we can handle), and residual (what we can't handle)
- ExprNodeDesc residualPredicate = analyzer.analyzePredicate(predicate, searchConditions);
+ ExprNodeGenericFuncDesc residualPredicate = (ExprNodeGenericFuncDesc)analyzer.
+ analyzePredicate(predicate, searchConditions);
if (searchConditions.size() == 0) {
return null;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java Mon Oct 21 04:43:04 2013
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.ql.exec.Op
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.log.PerfLogger;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
@@ -347,7 +347,7 @@ public class HiveInputFormat<K extends W
Utilities.setColumnNameList(jobConf, tableScan);
Utilities.setColumnTypeList(jobConf, tableScan);
// push down filters
- ExprNodeDesc filterExpr = scanDesc.getFilterExpr();
+ ExprNodeGenericFuncDesc filterExpr = (ExprNodeGenericFuncDesc)scanDesc.getFilterExpr();
if (filterExpr == null) {
return;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java Mon Oct 21 04:43:04 2013
@@ -170,7 +170,7 @@ public class OrcInputFormat implements
return null;
}
SearchArgument sarg = SearchArgument.FACTORY.create
- (Utilities.deserializeExpression(serializedPushdown, conf));
+ (Utilities.deserializeExpression(serializedPushdown));
LOG.info("ORC pushdown predicate: " + sarg);
return sarg;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgument.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgument.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgument.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgument.java Mon Oct 21 04:43:04 2013
@@ -18,10 +18,10 @@
package org.apache.hadoop.hive.ql.io.sarg;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-
import java.util.List;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+
/**
* Primary interface for <a href="http://en.wikipedia.org/wiki/Sargable">
* SearchArgument</a>, which are the subset of predicates
@@ -170,7 +170,7 @@ public interface SearchArgument {
* in interfaces. *DOH*
*/
public static class Factory {
- public SearchArgument create(ExprNodeDesc expression) {
+ public SearchArgument create(ExprNodeGenericFuncDesc expression) {
return new SearchArgumentImpl(expression);
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java Mon Oct 21 04:43:04 2013
@@ -18,6 +18,14 @@
package org.apache.hadoop.hive.ql.io.sarg;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Deque;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -40,14 +48,6 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import java.util.ArrayDeque;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Deque;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
/**
* The implementation of SearchArguments.
*/
@@ -221,6 +221,7 @@ final class SearchArgumentImpl implement
}
}
+ @Override
public String toString() {
StringBuilder buffer = new StringBuilder();
switch (operator) {
@@ -471,49 +472,49 @@ final class SearchArgumentImpl implement
return new ExpressionTree(TruthValue.YES_NO_NULL);
}
// get the kind of expression
- ExprNodeGenericFuncDesc typed = (ExprNodeGenericFuncDesc) expression;
- Class<?> op = typed.getGenericUDF().getClass();
+ ExprNodeGenericFuncDesc expr = (ExprNodeGenericFuncDesc) expression;
+ Class<?> op = expr.getGenericUDF().getClass();
ExpressionTree result;
// handle the logical operators
if (op == GenericUDFOPOr.class) {
result = new ExpressionTree(ExpressionTree.Operator.OR);
- addChildren(result, typed, leafCache);
+ addChildren(result, expr, leafCache);
} else if (op == GenericUDFOPAnd.class) {
result = new ExpressionTree(ExpressionTree.Operator.AND);
- addChildren(result, typed, leafCache);
+ addChildren(result, expr, leafCache);
} else if (op == GenericUDFOPNot.class) {
result = new ExpressionTree(ExpressionTree.Operator.NOT);
- addChildren(result, typed, leafCache);
+ addChildren(result, expr, leafCache);
} else if (op == GenericUDFOPEqual.class) {
- result = createLeaf(PredicateLeaf.Operator.EQUALS, typed, leafCache);
+ result = createLeaf(PredicateLeaf.Operator.EQUALS, expr, leafCache);
} else if (op == GenericUDFOPNotEqual.class) {
- result = negate(createLeaf(PredicateLeaf.Operator.EQUALS, typed,
+ result = negate(createLeaf(PredicateLeaf.Operator.EQUALS, expr,
leafCache));
} else if (op == GenericUDFOPEqualNS.class) {
- result = createLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS, typed,
+ result = createLeaf(PredicateLeaf.Operator.NULL_SAFE_EQUALS, expr,
leafCache);
} else if (op == GenericUDFOPGreaterThan.class) {
result = negate(createLeaf(PredicateLeaf.Operator.LESS_THAN_EQUALS,
- typed, leafCache));
+ expr, leafCache));
} else if (op == GenericUDFOPEqualOrGreaterThan.class) {
- result = negate(createLeaf(PredicateLeaf.Operator.LESS_THAN, typed,
+ result = negate(createLeaf(PredicateLeaf.Operator.LESS_THAN, expr,
leafCache));
} else if (op == GenericUDFOPLessThan.class) {
- result = createLeaf(PredicateLeaf.Operator.LESS_THAN, typed, leafCache);
+ result = createLeaf(PredicateLeaf.Operator.LESS_THAN, expr, leafCache);
} else if (op == GenericUDFOPEqualOrLessThan.class) {
- result = createLeaf(PredicateLeaf.Operator.LESS_THAN_EQUALS, typed,
+ result = createLeaf(PredicateLeaf.Operator.LESS_THAN_EQUALS, expr,
leafCache);
} else if (op == GenericUDFIn.class) {
- result = createLeaf(PredicateLeaf.Operator.IN, typed, leafCache, 0);
+ result = createLeaf(PredicateLeaf.Operator.IN, expr, leafCache, 0);
} else if (op == GenericUDFBetween.class) {
- result = createLeaf(PredicateLeaf.Operator.BETWEEN, typed, leafCache,
+ result = createLeaf(PredicateLeaf.Operator.BETWEEN, expr, leafCache,
1);
} else if (op == GenericUDFOPNull.class) {
- result = createLeaf(PredicateLeaf.Operator.IS_NULL, typed, leafCache,
+ result = createLeaf(PredicateLeaf.Operator.IS_NULL, expr, leafCache,
0);
} else if (op == GenericUDFOPNotNull.class) {
- result = negate(createLeaf(PredicateLeaf.Operator.IS_NULL, typed,
+ result = negate(createLeaf(PredicateLeaf.Operator.IS_NULL, expr,
leafCache, 0));
// otherwise, we didn't understand it, so mark it maybe
@@ -753,7 +754,7 @@ final class SearchArgumentImpl implement
* @param expression the expression to translate
* @return The normalized expression.
*/
- ExpressionTree expression(ExprNodeDesc expression) {
+ ExpressionTree expression(ExprNodeGenericFuncDesc expression) {
List<PredicateLeaf> leafCache = new ArrayList<PredicateLeaf>();
ExpressionTree expr = parse(expression, leafCache);
return expression(expr, leafCache);
@@ -786,7 +787,7 @@ final class SearchArgumentImpl implement
private final List<PredicateLeaf> leaves;
private final ExpressionTree expression;
- SearchArgumentImpl(ExprNodeDesc expr) {
+ SearchArgumentImpl(ExprNodeGenericFuncDesc expr) {
if (expr == null) {
leaves = new ArrayList<PredicateLeaf>();
expression = null;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Mon Oct 21 04:43:04 2013
@@ -81,7 +81,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.session.CreateTableAutomaticGrant;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde2.Deserializer;
@@ -1927,7 +1927,7 @@ private void constructOneLBLocationMap(F
* @param result the resulting list of partitions
* @return whether the resulting list contains partitions which may or may not match the expr
*/
- public boolean getPartitionsByExpr(Table tbl, ExprNodeDesc expr, HiveConf conf,
+ public boolean getPartitionsByExpr(Table tbl, ExprNodeGenericFuncDesc expr, HiveConf conf,
List<Partition> result) throws HiveException, TException {
assert result != null;
byte[] exprBytes = Utilities.serializeExpressionToKryo(expr);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java Mon Oct 21 04:43:04 2013
@@ -18,9 +18,10 @@
package org.apache.hadoop.hive.ql.metadata;
-import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hadoop.mapred.JobConf;
/**
* HiveStoragePredicateHandler is an optional companion to {@link
@@ -65,12 +66,12 @@ public interface HiveStoragePredicateHan
* Portion of predicate to be evaluated by storage handler. Hive
* will pass this into the storage handler's input format.
*/
- public ExprNodeDesc pushedPredicate;
+ public ExprNodeGenericFuncDesc pushedPredicate;
/**
* Portion of predicate to be post-evaluated by Hive for any rows
* which are returned by storage handler.
*/
- public ExprNodeDesc residualPredicate;
+ public ExprNodeGenericFuncDesc residualPredicate;
}
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java Mon Oct 21 04:43:04 2013
@@ -176,7 +176,7 @@ public final class RewriteCanApplyProcFa
canApplyCtx.getGbKeyNameList().addAll(expr.getCols());
}else if(expr instanceof ExprNodeGenericFuncDesc){
ExprNodeGenericFuncDesc funcExpr = (ExprNodeGenericFuncDesc)expr;
- List<ExprNodeDesc> childExprs = funcExpr.getChildExprs();
+ List<ExprNodeDesc> childExprs = funcExpr.getChildren();
for (ExprNodeDesc childExpr : childExprs) {
if(childExpr instanceof ExprNodeColumnDesc){
canApplyCtx.getGbKeyNameList().addAll(expr.getCols());
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java Mon Oct 21 04:43:04 2013
@@ -209,7 +209,7 @@ public final class PcrExprProcFactory {
children.add(wrapper.outExpr);
}
}
- funcExpr.setChildExprs(children);
+ funcExpr.setChildren(children);
return funcExpr;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartExprEvalUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartExprEvalUtils.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartExprEvalUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartExprEvalUtils.java Mon Oct 21 04:43:04 2013
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.exec.Ex
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -84,7 +85,7 @@ public class PartExprEvalUtils {
}
static synchronized public ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> prepareExpr(
- ExprNodeDesc expr, List<String> partNames) throws HiveException {
+ ExprNodeGenericFuncDesc expr, List<String> partNames) throws HiveException {
// Create the row object
List<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>();
for (int i = 0; i < partNames.size(); i++) {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionExpressionForMetastore.java Mon Oct 21 04:43:04 2013
@@ -26,7 +26,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
/**
* The basic implementation of PartitionExpressionProxy that uses ql package classes.
@@ -42,7 +42,7 @@ public class PartitionExpressionForMetas
@Override
public boolean filterPartitionsByExpr(List<String> columnNames, byte[] exprBytes,
String defaultPartitionName, List<String> partitionNames) throws MetaException {
- ExprNodeDesc expr = deserializeExpr(exprBytes);
+ ExprNodeGenericFuncDesc expr = deserializeExpr(exprBytes);
try {
long startTime = System.nanoTime(), len = partitionNames.size();
boolean result = PartitionPruner.prunePartitionNames(
@@ -56,8 +56,8 @@ public class PartitionExpressionForMetas
}
}
- private ExprNodeDesc deserializeExpr(byte[] exprBytes) throws MetaException {
- ExprNodeDesc expr = null;
+ private ExprNodeGenericFuncDesc deserializeExpr(byte[] exprBytes) throws MetaException {
+ ExprNodeGenericFuncDesc expr = null;
try {
expr = Utilities.deserializeExpressionFromKryo(exprBytes);
} catch (Exception ex) {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java Mon Oct 21 04:43:04 2013
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.optimi
import java.util.AbstractSequentialList;
import java.util.ArrayList;
import java.util.Iterator;
-import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
@@ -40,7 +39,6 @@ import org.apache.hadoop.hive.ql.ErrorMs
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -192,7 +190,8 @@ public class PartitionPruner implements
if (((ExprNodeConstantDesc)expr).getValue() == null) {
return null;
} else {
- return expr;
+ throw new IllegalStateException("Unexpected non-null ExprNodeConstantDesc: "
+ + expr.getExprString());
}
} else if (expr instanceof ExprNodeGenericFuncDesc) {
GenericUDF udf = ((ExprNodeGenericFuncDesc)expr).getGenericUDF();
@@ -213,9 +212,10 @@ public class PartitionPruner implements
return isAnd ? children.get(0) : null;
}
}
- return expr;
+ return (ExprNodeGenericFuncDesc)expr;
+ } else {
+ throw new IllegalStateException("Unexpected type of ExprNodeDesc: " + expr.getExprString());
}
- return expr;
}
/**
@@ -247,10 +247,16 @@ public class PartitionPruner implements
* @return True iff expr contains any non-native user-defined functions.
*/
static private boolean hasUserFunctions(ExprNodeDesc expr) {
- if (!(expr instanceof ExprNodeGenericFuncDesc)) return false;
- if (!FunctionRegistry.isNativeFuncExpr((ExprNodeGenericFuncDesc)expr)) return true;
+ if (!(expr instanceof ExprNodeGenericFuncDesc)) {
+ return false;
+ }
+ if (!FunctionRegistry.isNativeFuncExpr((ExprNodeGenericFuncDesc)expr)) {
+ return true;
+ }
for (ExprNodeDesc child : expr.getChildren()) {
- if (hasUserFunctions(child)) return true;
+ if (hasUserFunctions(child)) {
+ return true;
+ }
}
return false;
}
@@ -279,7 +285,7 @@ public class PartitionPruner implements
// Replace virtual columns with nulls. See javadoc for details.
prunerExpr = removeNonPartCols(prunerExpr, extractPartColNames(tab));
// Remove all parts that are not partition columns. See javadoc for details.
- ExprNodeDesc compactExpr = compactExpr(prunerExpr.clone());
+ ExprNodeGenericFuncDesc compactExpr = (ExprNodeGenericFuncDesc)compactExpr(prunerExpr.clone());
String oldFilter = prunerExpr.getExprString();
if (compactExpr == null) {
// Non-strict mode, and all the predicates are on non-partition columns - get everything.
@@ -346,7 +352,7 @@ public class PartitionPruner implements
* @return true iff the partition pruning expression contains non-partition columns.
*/
static private boolean pruneBySequentialScan(Table tab, List<Partition> partitions,
- ExprNodeDesc prunerExpr, HiveConf conf) throws HiveException, MetaException {
+ ExprNodeGenericFuncDesc prunerExpr, HiveConf conf) throws HiveException, MetaException {
PerfLogger perfLogger = PerfLogger.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PRUNE_LISTING);
@@ -385,7 +391,7 @@ public class PartitionPruner implements
* @param partNames Partition names to filter. The list is modified in place.
* @return Whether the list has any partitions for which the expression may or may not match.
*/
- public static boolean prunePartitionNames(List<String> columnNames, ExprNodeDesc prunerExpr,
+ public static boolean prunePartitionNames(List<String> columnNames, ExprNodeGenericFuncDesc prunerExpr,
String defaultPartitionName, List<String> partNames) throws HiveException, MetaException {
// Prepare the expression to filter on the columns.
ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> handle =
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Mon Oct 21 04:43:04 2013
@@ -4322,7 +4322,7 @@ public class SemanticAnalyzer extends Ba
if (exprNode instanceof ExprNodeGenericFuncDesc) {
ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) exprNode;
- for (ExprNodeDesc childExpr : funcDesc.getChildExprs()) {
+ for (ExprNodeDesc childExpr : funcDesc.getChildren()) {
extractColumns(colNamesExprs, childExpr);
}
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java Mon Oct 21 04:43:04 2013
@@ -68,7 +68,7 @@ public class ExprNodeDescUtils {
}
// duplicate function with possibily replaced children
ExprNodeGenericFuncDesc clone = (ExprNodeGenericFuncDesc) func.clone();
- clone.setChildExprs(children);
+ clone.setChildren(children);
return clone;
}
// constant or null, just return it
@@ -94,7 +94,7 @@ public class ExprNodeDescUtils {
/**
* bind two predicates by AND op
*/
- public static ExprNodeDesc mergePredicates(ExprNodeDesc prev, ExprNodeDesc next) {
+ public static ExprNodeGenericFuncDesc mergePredicates(ExprNodeDesc prev, ExprNodeDesc next) {
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(2);
children.add(prev);
children.add(next);
@@ -196,7 +196,7 @@ public class ExprNodeDescUtils {
if (source instanceof ExprNodeGenericFuncDesc) {
// all children expression should be resolved
ExprNodeGenericFuncDesc function = (ExprNodeGenericFuncDesc) source.clone();
- function.setChildExprs(backtrack(function.getChildren(), current, terminal));
+ function.setChildren(backtrack(function.getChildren(), current, terminal));
return function;
}
if (source instanceof ExprNodeColumnDesc) {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java Mon Oct 21 04:43:04 2013
@@ -61,7 +61,7 @@ public class ExprNodeGenericFuncDesc ext
* exactly what we want.
*/
private GenericUDF genericUDF;
- private List<ExprNodeDesc> childExprs;
+ private List<ExprNodeDesc> chidren;
private transient String funcText;
/**
* This class uses a writableObjectInspector rather than a TypeInfo to store
@@ -93,7 +93,7 @@ public class ExprNodeGenericFuncDesc ext
ObjectInspectorUtils.getWritableObjectInspector(oi);
assert (genericUDF != null);
this.genericUDF = genericUDF;
- this.childExprs = children;
+ this.chidren = children;
this.funcText = funcText;
}
@@ -121,17 +121,13 @@ public class ExprNodeGenericFuncDesc ext
this.genericUDF = genericUDF;
}
- public List<ExprNodeDesc> getChildExprs() {
- return childExprs;
- }
-
- public void setChildExprs(List<ExprNodeDesc> children) {
- childExprs = children;
+ public void setChildren(List<ExprNodeDesc> children) {
+ chidren = children;
}
@Override
public List<ExprNodeDesc> getChildren() {
- return childExprs;
+ return chidren;
}
@Override
@@ -139,11 +135,11 @@ public class ExprNodeGenericFuncDesc ext
StringBuilder sb = new StringBuilder();
sb.append(genericUDF.getClass().toString());
sb.append("(");
- for (int i = 0; i < childExprs.size(); i++) {
+ for (int i = 0; i < chidren.size(); i++) {
if (i > 0) {
sb.append(", ");
}
- sb.append(childExprs.get(i).toString());
+ sb.append(chidren.get(i).toString());
}
sb.append("(");
sb.append(")");
@@ -154,9 +150,9 @@ public class ExprNodeGenericFuncDesc ext
@Override
public String getExprString() {
// Get the children expr strings
- String[] childrenExprStrings = new String[childExprs.size()];
+ String[] childrenExprStrings = new String[chidren.size()];
for (int i = 0; i < childrenExprStrings.length; i++) {
- childrenExprStrings[i] = childExprs.get(i).getExprString();
+ childrenExprStrings[i] = chidren.get(i).getExprString();
}
return genericUDF.getDisplayString(childrenExprStrings);
@@ -165,10 +161,10 @@ public class ExprNodeGenericFuncDesc ext
@Override
public List<String> getCols() {
List<String> colList = new ArrayList<String>();
- if (childExprs != null) {
+ if (chidren != null) {
int pos = 0;
- while (pos < childExprs.size()) {
- List<String> colCh = childExprs.get(pos).getCols();
+ while (pos < chidren.size()) {
+ List<String> colCh = chidren.get(pos).getCols();
colList = Utilities.mergeUniqElems(colList, colCh);
pos++;
}
@@ -179,8 +175,8 @@ public class ExprNodeGenericFuncDesc ext
@Override
public ExprNodeDesc clone() {
- List<ExprNodeDesc> cloneCh = new ArrayList<ExprNodeDesc>(childExprs.size());
- for (ExprNodeDesc ch : childExprs) {
+ List<ExprNodeDesc> cloneCh = new ArrayList<ExprNodeDesc>(chidren.size());
+ for (ExprNodeDesc ch : chidren) {
cloneCh.add(ch.clone());
}
ExprNodeGenericFuncDesc clone = new ExprNodeGenericFuncDesc(typeInfo,
@@ -286,12 +282,12 @@ public class ExprNodeGenericFuncDesc ext
}
}
- if (childExprs.size() != dest.getChildExprs().size()) {
+ if (chidren.size() != dest.getChildren().size()) {
return false;
}
- for (int pos = 0; pos < childExprs.size(); pos++) {
- if (!childExprs.get(pos).isSame(dest.getChildExprs().get(pos))) {
+ for (int pos = 0; pos < chidren.size(); pos++) {
+ if (!chidren.get(pos).isSame(dest.getChildren().get(pos))) {
return false;
}
}
@@ -304,7 +300,7 @@ public class ExprNodeGenericFuncDesc ext
int superHashCode = super.hashCode();
HashCodeBuilder builder = new HashCodeBuilder();
builder.appendSuper(superHashCode);
- builder.append(childExprs);
+ builder.append(chidren);
return builder.toHashCode();
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java Mon Oct 21 04:43:04 2013
@@ -60,7 +60,7 @@ public class TableScanDesc extends Abstr
private boolean statsReliable;
private int maxStatsKeyPrefixLength = -1;
- private ExprNodeDesc filterExpr;
+ private ExprNodeGenericFuncDesc filterExpr;
public static final String FILTER_EXPR_CONF_STR =
"hive.io.filter.expr.serialized";
@@ -96,11 +96,11 @@ public class TableScanDesc extends Abstr
}
@Explain(displayName = "filterExpr")
- public ExprNodeDesc getFilterExpr() {
+ public ExprNodeGenericFuncDesc getFilterExpr() {
return filterExpr;
}
- public void setFilterExpr(ExprNodeDesc filterExpr) {
+ public void setFilterExpr(ExprNodeGenericFuncDesc filterExpr) {
this.filterExpr = filterExpr;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java Mon Oct 21 04:43:04 2013
@@ -171,7 +171,7 @@ public final class ExprWalkerProcFactory
ExprNodeDesc ch = (ExprNodeDesc) nd.getChildren().get(i);
ExprNodeDesc newCh = ctx.getConvertedNode(ch);
if (newCh != null) {
- expr.getChildExprs().set(i, newCh);
+ expr.getChildren().set(i, newCh);
ch = newCh;
}
String chAlias = ctx.getAlias(ch);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Mon Oct 21 04:43:04 2013
@@ -55,6 +55,7 @@ import org.apache.hadoop.hive.ql.parse.S
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
@@ -695,6 +696,9 @@ public final class OpProcFactory {
}
ExprNodeDesc condn = ExprNodeDescUtils.mergePredicates(preds);
+ if(!(condn instanceof ExprNodeGenericFuncDesc)) {
+ return null;
+ }
if (op instanceof TableScanOperator) {
boolean pushFilterToStorage;
@@ -704,7 +708,7 @@ public final class OpProcFactory {
if (pushFilterToStorage) {
condn = pushFilterToStorageHandler(
(TableScanOperator) op,
- condn,
+ (ExprNodeGenericFuncDesc)condn,
owi,
hiveConf);
if (condn == null) {
@@ -769,9 +773,9 @@ public final class OpProcFactory {
* by Hive as a post-filter, or null if it was possible
* to push down the entire predicate
*/
- private static ExprNodeDesc pushFilterToStorageHandler(
+ private static ExprNodeGenericFuncDesc pushFilterToStorageHandler(
TableScanOperator tableScanOp,
- ExprNodeDesc originalPredicate,
+ ExprNodeGenericFuncDesc originalPredicate,
OpWalkerInfo owi,
HiveConf hiveConf) {
@@ -832,7 +836,7 @@ public final class OpProcFactory {
}
}
tableScanDesc.setFilterExpr(decomposed.pushedPredicate);
- return decomposed.residualPredicate;
+ return (ExprNodeGenericFuncDesc)decomposed.residualPredicate;
}
public static NodeProcessor getFilterProc() {
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java Mon Oct 21 04:43:04 2013
@@ -25,7 +25,6 @@ import java.util.Stack;
import junit.framework.TestCase;
-
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.Database;
@@ -40,11 +39,11 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -53,7 +52,7 @@ import org.apache.thrift.TException;
import com.google.common.collect.Lists;
-/**
+/**
* Tests hive metastore expression support. This should be moved in metastore module
* as soon as we are able to use ql from metastore server (requires splitting metastore
* server and client).
@@ -165,7 +164,7 @@ public class TestMetastoreExpr extends T
}
public void checkExpr(int numParts,
- String dbName, String tblName, ExprNodeDesc expr) throws Exception {
+ String dbName, String tblName, ExprNodeGenericFuncDesc expr) throws Exception {
List<Partition> parts = new ArrayList<Partition>();
client.listPartitionsByExpr(
dbName, tblName, Utilities.serializeExpressionToKryo(expr), null, (short)-1, parts);
@@ -180,9 +179,11 @@ public class TestMetastoreExpr extends T
this.tblName = tblName;
}
- public ExprNodeDesc build() throws Exception {
- if (stack.size() != 1) throw new Exception("Bad test: " + stack.size());
- return stack.pop();
+ public ExprNodeGenericFuncDesc build() throws Exception {
+ if (stack.size() != 1) {
+ throw new Exception("Bad test: " + stack.size());
+ }
+ return (ExprNodeGenericFuncDesc)stack.pop();
}
public ExprBuilder pred(String name, int args) {
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java Mon Oct 21 04:43:04 2013
@@ -18,17 +18,19 @@
package org.apache.hadoop.hive.ql.exec;
-import java.sql.Date;
-import java.sql.Timestamp;
-
import static org.apache.hadoop.hive.ql.exec.Utilities.getFileExtension;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+import java.util.List;
+
import junit.framework.TestCase;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.mapred.JobConf;
@@ -64,10 +66,12 @@ public class TestUtilities extends TestC
public void testSerializeTimestamp() {
Timestamp ts = new Timestamp(1374554702000L);
ts.setNanos(123456);
- ExprNodeConstantDesc constant = new ExprNodeConstantDesc(
- TypeInfoFactory.timestampTypeInfo, ts);
- String serialized = Utilities.serializeExpression(constant);
- ExprNodeDesc deserialized = Utilities.deserializeExpression(serialized, new Configuration());
- assertEquals(constant.getExprString(), deserialized.getExprString());
+ ExprNodeConstantDesc constant = new ExprNodeConstantDesc(ts);
+ List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
+ children.add(constant);
+ ExprNodeGenericFuncDesc desc = new ExprNodeGenericFuncDesc(TypeInfoFactory.timestampTypeInfo,
+ new GenericUDFFromUtcTimestamp(), children);
+ assertEquals(desc.getExprString(), Utilities.deserializeExpression(
+ Utilities.serializeExpression(desc)).getExprString());
}
}
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java Mon Oct 21 04:43:04 2013
@@ -100,7 +100,7 @@ public class TestVectorSelectOperator {
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(colDesc1);
children.add(colDesc2);
- plusDesc.setChildExprs(children);
+ plusDesc.setChildren(children);
plusDesc.setTypeInfo(TypeInfoFactory.longTypeInfo);
colList.add(plusDesc);
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java?rev=1534023&r1=1534022&r2=1534023&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java Mon Oct 21 04:43:04 2013
@@ -103,23 +103,23 @@ public class TestVectorizationContext {
children1.add(minusExpr);
children1.add(multiplyExpr);
- sumExpr.setChildExprs(children1);
+ sumExpr.setChildren(children1);
children2.add(sum2Expr);
children2.add(col3Expr);
- minusExpr.setChildExprs(children2);
+ minusExpr.setChildren(children2);
children3.add(col1Expr);
children3.add(col2Expr);
- sum2Expr.setChildExprs(children3);
+ sum2Expr.setChildren(children3);
children4.add(col4Expr);
children4.add(modExpr);
- multiplyExpr.setChildExprs(children4);
+ multiplyExpr.setChildren(children4);
children5.add(col5Expr);
children5.add(col6Expr);
- modExpr.setChildExprs(children5);
+ modExpr.setChildren(children5);
Map<String, Integer> columnMap = new HashMap<String, Integer>();
columnMap.put("col1", 1);
@@ -165,7 +165,7 @@ public class TestVectorizationContext {
List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
children1.add(col1Expr);
children1.add(constDesc);
- exprDesc.setChildExprs(children1);
+ exprDesc.setChildren(children1);
Map<String, Integer> columnMap = new HashMap<String, Integer>();
columnMap.put("col1", 1);
@@ -190,7 +190,7 @@ public class TestVectorizationContext {
List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
children1.add(col1Expr);
children1.add(col2Expr);
- exprDesc.setChildExprs(children1);
+ exprDesc.setChildren(children1);
Map<String, Integer> columnMap = new HashMap<String, Integer>();
columnMap.put("col1", 1);
@@ -216,7 +216,7 @@ public class TestVectorizationContext {
List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
children1.add(col1Expr);
children1.add(constDesc);
- exprDesc.setChildExprs(children1);
+ exprDesc.setChildren(children1);
Map<String, Integer> columnMap = new HashMap<String, Integer>();
columnMap.put("col1", 0);
@@ -240,7 +240,7 @@ public class TestVectorizationContext {
List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
children1.add(col1Expr);
children1.add(constDesc);
- greaterExprDesc.setChildExprs(children1);
+ greaterExprDesc.setChildren(children1);
ExprNodeColumnDesc col2Expr = new ExprNodeColumnDesc(Float.class, "col2", "table", false);
ExprNodeConstantDesc const2Desc = new ExprNodeConstantDesc(new Float(1.0));
@@ -251,7 +251,7 @@ public class TestVectorizationContext {
List<ExprNodeDesc> children2 = new ArrayList<ExprNodeDesc>(2);
children2.add(col2Expr);
children2.add(const2Desc);
- lessExprDesc.setChildExprs(children2);
+ lessExprDesc.setChildren(children2);
GenericUDFOPAnd andUdf = new GenericUDFOPAnd();
ExprNodeGenericFuncDesc andExprDesc = new ExprNodeGenericFuncDesc();
@@ -259,7 +259,7 @@ public class TestVectorizationContext {
List<ExprNodeDesc> children3 = new ArrayList<ExprNodeDesc>(2);
children3.add(greaterExprDesc);
children3.add(lessExprDesc);
- andExprDesc.setChildExprs(children3);
+ andExprDesc.setChildren(children3);
Map<String, Integer> columnMap = new HashMap<String, Integer>();
columnMap.put("col1", 0);
@@ -280,7 +280,7 @@ public class TestVectorizationContext {
List<ExprNodeDesc> children4 = new ArrayList<ExprNodeDesc>(2);
children4.add(greaterExprDesc);
children4.add(lessExprDesc);
- orExprDesc.setChildExprs(children4);
+ orExprDesc.setChildren(children4);
VectorExpression veOr = vc.getVectorExpression(orExprDesc);
@@ -302,7 +302,7 @@ public class TestVectorizationContext {
children.add(constDesc);
children.add(colDesc);
- scalarMinusConstant.setChildExprs(children);
+ scalarMinusConstant.setChildren(children);
Map<String, Integer> columnMap = new HashMap<String, Integer>();
columnMap.put("a", 0);
@@ -324,7 +324,7 @@ public class TestVectorizationContext {
List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
children1.add(col1Expr);
children1.add(constDesc);
- exprDesc.setChildExprs(children1);
+ exprDesc.setChildren(children1);
Map<String, Integer> columnMap = new HashMap<String, Integer>();
columnMap.put("col1", 1);
@@ -346,7 +346,7 @@ public class TestVectorizationContext {
negExprDesc.setGenericUDF(gudf);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
children.add(col1Expr);
- negExprDesc.setChildExprs(children);
+ negExprDesc.setChildren(children);
Map<String, Integer> columnMap = new HashMap<String, Integer>();
columnMap.put("col1", 1);
VectorizationContext vc = new VectorizationContext(columnMap, 1);
@@ -365,7 +365,7 @@ public class TestVectorizationContext {
negExprDesc.setGenericUDF(gudf);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
children.add(col1Expr);
- negExprDesc.setChildExprs(children);
+ negExprDesc.setChildren(children);
Map<String, Integer> columnMap = new HashMap<String, Integer>();
columnMap.put("col1", 1);
VectorizationContext vc = new VectorizationContext(columnMap, 1);
@@ -390,7 +390,7 @@ public class TestVectorizationContext {
children.add(constDesc);
children.add(colDesc);
- scalarGreaterColExpr.setChildExprs(children);
+ scalarGreaterColExpr.setChildren(children);
Map<String, Integer> columnMap = new HashMap<String, Integer>();
columnMap.put("a", 0);
@@ -415,7 +415,7 @@ public class TestVectorizationContext {
children.add(colDesc);
children.add(constDesc);
- colEqualScalar.setChildExprs(children);
+ colEqualScalar.setChildren(children);
Map<String, Integer> columnMap = new HashMap<String, Integer>();
columnMap.put("a", 0);