You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jv...@apache.org on 2011/09/01 01:41:12 UTC
svn commit: r1163869 - in /hive/trunk/ql/src:
java/org/apache/hadoop/hive/ql/ppd/ test/queries/clientpositive/
test/results/clientpositive/ test/results/compiler/plan/
Author: jvs
Date: Wed Aug 31 23:41:11 2011
New Revision: 1163869
URL: http://svn.apache.org/viewvc?rev=1163869&view=rev
Log:
HIVE-2383. Incorrect alias filtering for predicate pushdown
(Charles Chen via jvs)
Added:
hive/trunk/ql/src/test/queries/clientpositive/ppd_repeated_alias.q
hive/trunk/ql/src/test/results/clientpositive/ppd_repeated_alias.q.out
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
hive/trunk/ql/src/test/results/clientpositive/auto_join8.q.out
hive/trunk/ql/src/test/results/clientpositive/index_auto_mult_tables.q.out
hive/trunk/ql/src/test/results/clientpositive/join8.q.out
hive/trunk/ql/src/test/results/clientpositive/udf_bitmap_empty.q.out
hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java?rev=1163869&r1=1163868&r2=1163869&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java Wed Aug 31 23:41:11 2011
@@ -85,12 +85,24 @@ public class ExprWalkerInfo implements N
* the tree by its parent.
*/
private final Map<ExprNodeDesc, ExprInfo> exprInfoMap;
+
+ /**
+ * This is a map from a new pushdown expressions generated by the ExprWalker
+ * to the old pushdown expression that it originated from. For example, if
+ * an output column of the current operator is _col0, which comes from an
+ * input column _col1, this would map the filter "Column[_col1]=2" to
+ * "Column[_col0]=2" ("Column[_col1]=2" is new because we move from children
+ * operators to parents in PPD)
+ */
+ private final Map<ExprNodeDesc, ExprNodeDesc> newToOldExprMap;
+
private boolean isDeterministic = true;
public ExprWalkerInfo() {
pushdownPreds = new HashMap<String, List<ExprNodeDesc>>();
nonFinalPreds = new HashMap<String, List<ExprNodeDesc>>();
exprInfoMap = new HashMap<ExprNodeDesc, ExprInfo>();
+ newToOldExprMap = new HashMap<ExprNodeDesc, ExprNodeDesc>();
}
public ExprWalkerInfo(Operator<? extends Serializable> op,
@@ -101,6 +113,7 @@ public class ExprWalkerInfo implements N
pushdownPreds = new HashMap<String, List<ExprNodeDesc>>();
exprInfoMap = new HashMap<ExprNodeDesc, ExprInfo>();
nonFinalPreds = new HashMap<String, List<ExprNodeDesc>>();
+ newToOldExprMap = new HashMap<ExprNodeDesc, ExprNodeDesc>();
}
/**
@@ -118,6 +131,13 @@ public class ExprWalkerInfo implements N
}
/**
+ * @return the new expression to old expression map
+ */
+ public Map<ExprNodeDesc, ExprNodeDesc> getNewToOldExprMap() {
+ return newToOldExprMap;
+ }
+
+ /**
* @return converted expression for give node. If there is none then returns
* null.
*/
@@ -131,7 +151,7 @@ public class ExprWalkerInfo implements N
/**
* adds a replacement node for this expression.
- *
+ *
* @param oldNode
* original node
* @param newNode
@@ -149,7 +169,7 @@ public class ExprWalkerInfo implements N
/**
* Returns true if the specified expression is pushdown candidate else false.
- *
+ *
* @param expr
* @return true or false
*/
@@ -163,7 +183,7 @@ public class ExprWalkerInfo implements N
/**
* Marks the specified expr to the specified value.
- *
+ *
* @param expr
* @param b
* can
@@ -179,7 +199,7 @@ public class ExprWalkerInfo implements N
/**
* Returns the alias of the specified expr.
- *
+ *
* @param expr
* @return The alias of the expression
*/
@@ -193,7 +213,7 @@ public class ExprWalkerInfo implements N
/**
* Adds the specified alias to the specified expr.
- *
+ *
* @param expr
* @param alias
*/
@@ -212,7 +232,7 @@ public class ExprWalkerInfo implements N
/**
* Adds the specified expr as the top-most pushdown expr (ie all its children
* can be pushed).
- *
+ *
* @param expr
*/
public void addFinalCandidate(ExprNodeDesc expr) {
@@ -220,7 +240,7 @@ public class ExprWalkerInfo implements N
if (pushdownPreds.get(alias) == null) {
pushdownPreds.put(alias, new ArrayList<ExprNodeDesc>());
}
- pushdownPreds.get(alias).add(expr.clone());
+ pushdownPreds.get(alias).add(expr);
}
/**
@@ -240,7 +260,7 @@ public class ExprWalkerInfo implements N
* Returns the list of pushdown expressions for each alias that appear in the
* current operator's RowResolver. The exprs in each list can be combined
* using conjunction (AND).
- *
+ *
* @return the map of alias to a list of pushdown predicates
*/
public Map<String, List<ExprNodeDesc>> getFinalCandidates() {
@@ -257,7 +277,7 @@ public class ExprWalkerInfo implements N
if (nonFinalPreds.get(alias) == null) {
nonFinalPreds.put(alias, new ArrayList<ExprNodeDesc>());
}
- nonFinalPreds.get(alias).add(expr.clone());
+ nonFinalPreds.get(alias).add(expr);
}
/**
@@ -271,7 +291,7 @@ public class ExprWalkerInfo implements N
/**
* Merges the specified pushdown predicates with the current class.
- *
+ *
* @param ewi
* ExpressionWalkerInfo
*/
@@ -288,11 +308,21 @@ public class ExprWalkerInfo implements N
pushdownPreds.put(e.getKey(), e.getValue());
}
}
+ for (Entry<String, List<ExprNodeDesc>> e : ewi.getNonFinalCandidates()
+ .entrySet()) {
+ List<ExprNodeDesc> predList = nonFinalPreds.get(e.getKey());
+ if (predList != null) {
+ predList.addAll(e.getValue());
+ } else {
+ nonFinalPreds.put(e.getKey(), e.getValue());
+ }
+ }
+ newToOldExprMap.putAll(ewi.getNewToOldExprMap());
}
/**
* sets the deterministic flag for this expression.
- *
+ *
* @param b
* deterministic or not
*/
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java?rev=1163869&r1=1163868&r2=1163869&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java Wed Aug 31 23:41:11 2011
@@ -51,7 +51,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
* pushdown optimization for the given operator
*/
public final class ExprWalkerProcFactory {
-
+
private static final Log LOG = LogFactory
.getLog(ExprWalkerProcFactory.class.getName());
@@ -239,7 +239,7 @@ public final class ExprWalkerProcFactory
/**
* Extracts pushdown predicates from the given list of predicate expression.
- *
+ *
* @param opContext
* operator context used for resolving column references
* @param op
@@ -277,7 +277,9 @@ public final class ExprWalkerProcFactory
List<Node> startNodes = new ArrayList<Node>();
List<ExprNodeDesc> clonedPreds = new ArrayList<ExprNodeDesc>();
for (ExprNodeDesc node : preds) {
- clonedPreds.add(node.clone());
+ ExprNodeDesc clone = node.clone();
+ clonedPreds.add(clone);
+ exprContext.getNewToOldExprMap().put(clone, node);
}
startNodes.addAll(clonedPreds);
@@ -308,8 +310,16 @@ public final class ExprWalkerProcFactory
if (FunctionRegistry.isOpAnd(expr)) {
// If the operator is AND, we need to determine if any of the children are
// final candidates.
- for (Node ch : expr.getChildren()) {
- extractFinalCandidates((ExprNodeDesc) ch, ctx, conf);
+
+ // For the children, we populate the NewToOldExprMap to keep track of
+ // the original condition before rewriting it for this operator
+ assert ctx.getNewToOldExprMap().containsKey(expr);
+ for (int i = 0; i < expr.getChildren().size(); i++) {
+ ctx.getNewToOldExprMap().put(
+ (ExprNodeDesc) expr.getChildren().get(i),
+ ctx.getNewToOldExprMap().get(expr).getChildren().get(i));
+ extractFinalCandidates((ExprNodeDesc) expr.getChildren().get(i),
+ ctx, conf);
}
}
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=1163869&r1=1163868&r2=1163869&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Wed Aug 31 23:41:11 2011
@@ -23,9 +23,9 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.Set;
import java.util.Stack;
-import java.util.Map.Entry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -43,7 +43,6 @@ import org.apache.hadoop.hive.ql.lib.Nod
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
-import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.parse.RowResolver;
@@ -222,14 +221,49 @@ public final class OpProcFactory {
OpWalkerInfo owi = (OpWalkerInfo) procCtx;
Set<String> aliases = getQualifiedAliases((JoinOperator) nd, owi
.getRowResolver(nd));
- boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, null, aliases, false);
- if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
- HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) {
- if (hasUnpushedPredicates) {
- aliases = null;
+ // we pass null for aliases here because mergeWithChildrenPred filters
+ // aliases in the children node context and we need to filter them in
+ // the current JoinOperator's context
+ boolean hasUnpushedPredicates =
+ mergeWithChildrenPred(nd, owi, null, null, false);
+ ExprWalkerInfo prunePreds =
+ owi.getPrunedPreds((Operator<? extends Serializable>) nd);
+ if (prunePreds != null) {
+ Set<String> toRemove = new HashSet<String>();
+ // we don't push down any expressions that refer to aliases that can;t
+ // be pushed down per getQualifiedAliases
+ for (String key : prunePreds.getFinalCandidates().keySet()) {
+ if (!aliases.contains(key)) {
+ toRemove.add(key);
+ }
+ }
+ for (String alias : toRemove) {
+ for (ExprNodeDesc expr :
+ prunePreds.getFinalCandidates().get(alias)) {
+ // add expr to the list of predicates rejected from further pushing
+ // so that we know to add it in createFilter()
+ prunePreds.addAlias(expr, alias);
+ prunePreds.addNonFinalCandidate(expr);
+ }
+ prunePreds.getFinalCandidates().remove(alias);
+ }
+ if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
+ HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) {
+ // Here, we add all the "non-final candidiates", ie. the predicates
+ // rejected from pushdown through this operator to unpushedPreds
+ // and pass it to createFilter
+ ExprWalkerInfo unpushedPreds = new ExprWalkerInfo();
+ for (Entry<String, List<ExprNodeDesc>> entry :
+ prunePreds.getNonFinalCandidates().entrySet()) {
+ for (ExprNodeDesc expr : entry.getValue()) {
+ assert prunePreds.getNewToOldExprMap().containsKey(expr);
+ ExprNodeDesc oldExpr = prunePreds.getNewToOldExprMap().get(expr);
+ unpushedPreds.addAlias(oldExpr, entry.getKey());
+ unpushedPreds.addFinalCandidate(oldExpr);
+ }
+ }
+ return createFilter((Operator)nd, unpushedPreds, owi);
}
- ExprWalkerInfo unpushedPreds = mergeChildrenPred(nd, owi, aliases, false);
- return createFilter((Operator)nd, unpushedPreds, owi);
}
return null;
}
Added: hive/trunk/ql/src/test/queries/clientpositive/ppd_repeated_alias.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/ppd_repeated_alias.q?rev=1163869&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/ppd_repeated_alias.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/ppd_repeated_alias.q Wed Aug 31 23:41:11 2011
@@ -0,0 +1,33 @@
+drop table pokes;
+drop table pokes2;
+create table pokes (foo int, bar int, blah int);
+create table pokes2 (foo int, bar int, blah int);
+
+-- Q1: predicate should not be pushed on the right side of a left outer join
+explain
+SELECT a.foo as foo1, b.foo as foo2, b.bar
+FROM pokes a LEFT OUTER JOIN pokes2 b
+ON a.foo=b.foo
+WHERE b.bar=3;
+
+-- Q2: predicate should not be pushed on the right side of a left outer join
+explain
+SELECT * FROM
+ (SELECT a.foo as foo1, b.foo as foo2, b.bar
+ FROM pokes a LEFT OUTER JOIN pokes2 b
+ ON a.foo=b.foo) a
+WHERE a.bar=3;
+
+-- Q3: predicate should be pushed
+explain
+SELECT * FROM
+ (SELECT a.foo as foo1, b.foo as foo2, a.bar
+ FROM pokes a JOIN pokes2 b
+ ON a.foo=b.foo) a
+WHERE a.bar=3;
+
+-- Q4: here, the filter c.bar should be created under the first join but above the second
+explain select c.foo, d.bar from (select c.foo, b.bar, c.blah from pokes c left outer join pokes b on c.foo=b.foo) c left outer join pokes d where d.foo=1 and c.bar=2;
+
+drop table pokes;
+drop table pokes2;
Modified: hive/trunk/ql/src/test/results/clientpositive/auto_join8.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_join8.q.out?rev=1163869&r1=1163868&r2=1163869&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_join8.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_join8.q.out Wed Aug 31 23:41:11 2011
@@ -87,7 +87,7 @@ STAGE PLANS:
alias: src1
Filter Operator
predicate:
- expr: ((key > 10) and (key < 20))
+ expr: (((key > 10) and (key < 20)) and key is not null)
type: boolean
Select Operator
expressions:
@@ -110,7 +110,7 @@ STAGE PLANS:
Position of Big Table: 0
Filter Operator
predicate:
- expr: (_col2 is null and _col0 is not null)
+ expr: _col2 is null
type: boolean
Select Operator
expressions:
@@ -177,7 +177,7 @@ STAGE PLANS:
alias: src1
Filter Operator
predicate:
- expr: ((key > 10) and (key < 20))
+ expr: (((key > 10) and (key < 20)) and key is not null)
type: boolean
Select Operator
expressions:
@@ -239,7 +239,7 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col2, _col3
Filter Operator
predicate:
- expr: (_col2 is null and _col0 is not null)
+ expr: _col2 is null
type: boolean
Select Operator
expressions:
@@ -323,11 +323,11 @@ POSTHOOK: Lineage: dest1.c4 SIMPLE [(src
PREHOOK: query: SELECT sum(hash(dest1.c1,dest1.c2,dest1.c3,dest1.c4)) FROM dest1
PREHOOK: type: QUERY
PREHOOK: Input: default@dest1
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-18-54_235_7212718859095519671/-mr-10000
+PREHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-20-19_577_8461311373630191052/-mr-10000
POSTHOOK: query: SELECT sum(hash(dest1.c1,dest1.c2,dest1.c3,dest1.c4)) FROM dest1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@dest1
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-18-54_235_7212718859095519671/-mr-10000
+POSTHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-20-19_577_8461311373630191052/-mr-10000
POSTHOOK: Lineage: dest1.c1 EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: dest1.c2 SIMPLE [(src)src1.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: dest1.c3 EXPRESSION [(src)src2.FieldSchema(name:key, type:string, comment:default), ]
Modified: hive/trunk/ql/src/test/results/clientpositive/index_auto_mult_tables.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/index_auto_mult_tables.q.out?rev=1163869&r1=1163868&r2=1163869&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/index_auto_mult_tables.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/index_auto_mult_tables.q.out Wed Aug 31 23:41:11 2011
@@ -86,7 +86,7 @@ STAGE PLANS:
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/salbiz/hive_2011-08-03_11-40-52_874_7254064139212481338/-mr-10002
+ file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-11-37_886_962059869422887819/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -119,7 +119,7 @@ PREHOOK: Input: default@srcpart@ds=2008-
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-40-53_275_1751206294928260840/-mr-10000
+PREHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-11-38_313_2643938706286429021/-mr-10000
POSTHOOK: query: SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
@@ -127,7 +127,7 @@ POSTHOOK: Input: default@srcpart@ds=2008
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-40-53_275_1751206294928260840/-mr-10000
+POSTHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-11-38_313_2643938706286429021/-mr-10000
82 val_82
82 val_82
82 val_82
@@ -338,7 +338,7 @@ STAGE PLANS:
Move Operator
files:
hdfs directory: true
- destination: file:/tmp/salbiz/hive_2011-08-03_11-41-25_407_3322108409707049422/-mr-10003
+ destination: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-12-29_480_7231326688955426901/-mr-10003
Stage: Stage-1
Map Reduce
@@ -415,7 +415,7 @@ STAGE PLANS:
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/salbiz/hive_2011-08-03_11-41-25_407_3322108409707049422/-mr-10002
+ file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-12-29_480_7231326688955426901/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -521,7 +521,7 @@ STAGE PLANS:
Move Operator
files:
hdfs directory: true
- destination: file:/tmp/salbiz/hive_2011-08-03_11-41-25_407_3322108409707049422/-mr-10004
+ destination: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-12-29_480_7231326688955426901/-mr-10004
Stage: Stage-0
Fetch Operator
@@ -540,7 +540,7 @@ PREHOOK: Input: default@srcpart@ds=2008-
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-41-26_398_2468484527777301186/-mr-10000
+PREHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-12-30_228_1386208254160104156/-mr-10000
POSTHOOK: query: SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@default__src_src_index__
@@ -553,7 +553,7 @@ POSTHOOK: Input: default@srcpart@ds=2008
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/tmp/salbiz/hive_2011-08-03_11-41-26_398_2468484527777301186/-mr-10000
+POSTHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-12-30_228_1386208254160104156/-mr-10000
POSTHOOK: Lineage: default__src_src_index__._bitmaps EXPRESSION [(src)src.FieldSchema(name:ROW__OFFSET__INSIDE__BLOCK, type:bigint, comment:), ]
POSTHOOK: Lineage: default__src_src_index__._bucketname SIMPLE [(src)src.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
POSTHOOK: Lineage: default__src_src_index__._offset SIMPLE [(src)src.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
Modified: hive/trunk/ql/src/test/results/clientpositive/join8.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/join8.q.out?rev=1163869&r1=1163868&r2=1163869&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/join8.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/join8.q.out Wed Aug 31 23:41:11 2011
@@ -50,7 +50,7 @@ STAGE PLANS:
alias: src1
Filter Operator
predicate:
- expr: ((key > 10) and (key < 20))
+ expr: (((key > 10) and (key < 20)) and key is not null)
type: boolean
Select Operator
expressions:
@@ -112,7 +112,7 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col2, _col3
Filter Operator
predicate:
- expr: (_col2 is null and _col0 is not null)
+ expr: _col2 is null
type: boolean
Select Operator
expressions:
@@ -209,11 +209,11 @@ POSTHOOK: Lineage: dest1.c4 SIMPLE [(src
PREHOOK: query: SELECT dest1.* FROM dest1
PREHOOK: type: QUERY
PREHOOK: Input: default@dest1
-PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-08-43_966_1049149156870911441/-mr-10000
+PREHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-14-28_754_928376312270623780/-mr-10000
POSTHOOK: query: SELECT dest1.* FROM dest1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@dest1
-POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-08-43_966_1049149156870911441/-mr-10000
+POSTHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-14-28_754_928376312270623780/-mr-10000
POSTHOOK: Lineage: dest1.c1 EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: dest1.c2 SIMPLE [(src)src1.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: dest1.c3 EXPRESSION [(src)src2.FieldSchema(name:key, type:string, comment:default), ]
Added: hive/trunk/ql/src/test/results/clientpositive/ppd_repeated_alias.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ppd_repeated_alias.q.out?rev=1163869&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ppd_repeated_alias.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/ppd_repeated_alias.q.out Wed Aug 31 23:41:11 2011
@@ -0,0 +1,459 @@
+PREHOOK: query: drop table pokes
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table pokes
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table pokes2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table pokes2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table pokes (foo int, bar int, blah int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table pokes (foo int, bar int, blah int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@pokes
+PREHOOK: query: create table pokes2 (foo int, bar int, blah int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table pokes2 (foo int, bar int, blah int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@pokes2
+PREHOOK: query: -- Q1: predicate should not be pushed on the right side of a left outer join
+explain
+SELECT a.foo as foo1, b.foo as foo2, b.bar
+FROM pokes a LEFT OUTER JOIN pokes2 b
+ON a.foo=b.foo
+WHERE b.bar=3
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Q1: predicate should not be pushed on the right side of a left outer join
+explain
+SELECT a.foo as foo1, b.foo as foo2, b.bar
+FROM pokes a LEFT OUTER JOIN pokes2 b
+ON a.foo=b.foo
+WHERE b.bar=3
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF (TOK_TABNAME pokes) a) (TOK_TABREF (TOK_TABNAME pokes2) b) (= (. (TOK_TABLE_OR_COL a) foo) (. (TOK_TABLE_OR_COL b) foo)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) foo) foo1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) foo) foo2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) bar))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL b) bar) 3))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ a
+ TableScan
+ alias: a
+ Reduce Output Operator
+ key expressions:
+ expr: foo
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: foo
+ type: int
+ tag: 0
+ value expressions:
+ expr: foo
+ type: int
+ b
+ TableScan
+ alias: b
+ Reduce Output Operator
+ key expressions:
+ expr: foo
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: foo
+ type: int
+ tag: 1
+ value expressions:
+ expr: foo
+ type: int
+ expr: bar
+ type: int
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Left Outer Join0 to 1
+ condition expressions:
+ 0 {VALUE._col0}
+ 1 {VALUE._col0} {VALUE._col1}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col5, _col6
+ Filter Operator
+ predicate:
+ expr: (_col6 = 3)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col5
+ type: int
+ expr: _col6
+ type: int
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: -- Q2: predicate should not be pushed on the right side of a left outer join
+explain
+SELECT * FROM
+ (SELECT a.foo as foo1, b.foo as foo2, b.bar
+ FROM pokes a LEFT OUTER JOIN pokes2 b
+ ON a.foo=b.foo) a
+WHERE a.bar=3
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Q2: predicate should not be pushed on the right side of a left outer join
+explain
+SELECT * FROM
+ (SELECT a.foo as foo1, b.foo as foo2, b.bar
+ FROM pokes a LEFT OUTER JOIN pokes2 b
+ ON a.foo=b.foo) a
+WHERE a.bar=3
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF (TOK_TABNAME pokes) a) (TOK_TABREF (TOK_TABNAME pokes2) b) (= (. (TOK_TABLE_OR_COL a) foo) (. (TOK_TABLE_OR_COL b) foo)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) foo) foo1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) foo) foo2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) bar))))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL a) bar) 3))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ a:a
+ TableScan
+ alias: a
+ Reduce Output Operator
+ key expressions:
+ expr: foo
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: foo
+ type: int
+ tag: 0
+ value expressions:
+ expr: foo
+ type: int
+ a:b
+ TableScan
+ alias: b
+ Reduce Output Operator
+ key expressions:
+ expr: foo
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: foo
+ type: int
+ tag: 1
+ value expressions:
+ expr: foo
+ type: int
+ expr: bar
+ type: int
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Left Outer Join0 to 1
+ condition expressions:
+ 0 {VALUE._col0}
+ 1 {VALUE._col0} {VALUE._col1}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col5, _col6
+ Filter Operator
+ predicate:
+ expr: (_col6 = 3)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col5
+ type: int
+ expr: _col6
+ type: int
+ outputColumnNames: _col0, _col1, _col2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col1
+ type: int
+ expr: _col2
+ type: int
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: -- Q3: predicate should be pushed
+explain
+SELECT * FROM
+ (SELECT a.foo as foo1, b.foo as foo2, a.bar
+ FROM pokes a JOIN pokes2 b
+ ON a.foo=b.foo) a
+WHERE a.bar=3
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Q3: predicate should be pushed
+explain
+SELECT * FROM
+ (SELECT a.foo as foo1, b.foo as foo2, a.bar
+ FROM pokes a JOIN pokes2 b
+ ON a.foo=b.foo) a
+WHERE a.bar=3
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME pokes) a) (TOK_TABREF (TOK_TABNAME pokes2) b) (= (. (TOK_TABLE_OR_COL a) foo) (. (TOK_TABLE_OR_COL b) foo)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) foo) foo1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) foo) foo2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) bar))))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL a) bar) 3))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ a:a
+ TableScan
+ alias: a
+ Filter Operator
+ predicate:
+ expr: (bar = 3)
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: foo
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: foo
+ type: int
+ tag: 0
+ value expressions:
+ expr: foo
+ type: int
+ expr: bar
+ type: int
+ a:b
+ TableScan
+ alias: b
+ Reduce Output Operator
+ key expressions:
+ expr: foo
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: foo
+ type: int
+ tag: 1
+ value expressions:
+ expr: foo
+ type: int
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col1, _col5
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col5
+ type: int
+ expr: _col1
+ type: int
+ outputColumnNames: _col0, _col1, _col2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col1
+ type: int
+ expr: _col2
+ type: int
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: -- Q4: here, the filter c.bar should be created under the first join but above the second
+explain select c.foo, d.bar from (select c.foo, b.bar, c.blah from pokes c left outer join pokes b on c.foo=b.foo) c left outer join pokes d where d.foo=1 and c.bar=2
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Q4: here, the filter c.bar should be created under the first join but above the second
+explain select c.foo, d.bar from (select c.foo, b.bar, c.blah from pokes c left outer join pokes b on c.foo=b.foo) c left outer join pokes d where d.foo=1 and c.bar=2
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF (TOK_TABNAME pokes) c) (TOK_TABREF (TOK_TABNAME pokes) b) (= (. (TOK_TABLE_OR_COL c) foo) (. (TOK_TABLE_OR_COL b) foo)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) foo)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) bar)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) blah))))) c) (TOK_TABREF (TOK_TABNAME pokes) d))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) foo)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL d) bar))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL d) foo) 1) (= (. (TOK_TABLE_OR_COL c) bar) 2)))))
+
+STAGE DEPENDENCIES:
+ Stage-2 is a root stage
+ Stage-1 depends on stages: Stage-2
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+ c:b
+ TableScan
+ alias: b
+ Reduce Output Operator
+ key expressions:
+ expr: foo
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: foo
+ type: int
+ tag: 1
+ value expressions:
+ expr: bar
+ type: int
+ c:c
+ TableScan
+ alias: c
+ Reduce Output Operator
+ key expressions:
+ expr: foo
+ type: int
+ sort order: +
+ Map-reduce partition columns:
+ expr: foo
+ type: int
+ tag: 0
+ value expressions:
+ expr: foo
+ type: int
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Left Outer Join0 to 1
+ condition expressions:
+ 0 {VALUE._col0}
+ 1 {VALUE._col1}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col6
+ Filter Operator
+ predicate:
+ expr: (_col6 = 2)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col6
+ type: int
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ $INTNAME
+ Reduce Output Operator
+ sort order:
+ tag: 0
+ value expressions:
+ expr: _col0
+ type: int
+ expr: _col1
+ type: int
+ d
+ TableScan
+ alias: d
+ Reduce Output Operator
+ sort order:
+ tag: 1
+ value expressions:
+ expr: foo
+ type: int
+ expr: bar
+ type: int
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Left Outer Join0 to 1
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col1, _col3, _col4
+ Filter Operator
+ predicate:
+ expr: (_col3 = 1)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ expr: _col4
+ type: int
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: drop table pokes
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@pokes
+PREHOOK: Output: default@pokes
+POSTHOOK: query: drop table pokes
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@pokes
+POSTHOOK: Output: default@pokes
+PREHOOK: query: drop table pokes2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@pokes2
+PREHOOK: Output: default@pokes2
+POSTHOOK: query: drop table pokes2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@pokes2
+POSTHOOK: Output: default@pokes2
Modified: hive/trunk/ql/src/test/results/clientpositive/udf_bitmap_empty.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/udf_bitmap_empty.q.out?rev=1163869&r1=1163868&r2=1163869&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/udf_bitmap_empty.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/udf_bitmap_empty.q.out Wed Aug 31 23:41:11 2011
@@ -1,18 +1,18 @@
PREHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,0,0)) from src limit 1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/mwang/hive_2011-03-31_14-47-15_927_3803114174833312754/-mr-10000
+PREHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_21-53-10_432_2740171013501038928/-mr-10000
POSTHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,0,0)) from src limit 1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/mwang/hive_2011-03-31_14-47-15_927_3803114174833312754/-mr-10000
+POSTHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_21-53-10_432_2740171013501038928/-mr-10000
true
PREHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,4096,0)) from src limit 1
PREHOOK: type: QUERY
PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/mwang/hive_2011-03-31_14-47-19_658_5617273293846354183/-mr-10000
+PREHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_21-53-20_267_7328220720920988696/-mr-10000
POSTHOOK: query: select ewah_bitmap_empty(array(13,2,4,8589934592,4096,0)) from src limit 1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/mwang/hive_2011-03-31_14-47-19_658_5617273293846354183/-mr-10000
+POSTHOOK: Output: file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_21-53-20_267_7328220720920988696/-mr-10000
false
Modified: hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml?rev=1163869&r1=1163868&r2=1163869&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml (original)
+++ hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml Wed Aug 31 23:41:11 2011
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<java version="1.6.0_07" class="java.beans.XMLDecoder">
+<java version="1.6.0_26" class="java.beans.XMLDecoder">
<object class="org.apache.hadoop.hive.ql.exec.MapRedTask">
<void property="id">
<string>Stage-3</string>
@@ -62,11 +62,11 @@
</void>
<void method="put">
<string>location</string>
- <string>pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src</string>
+ <string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1304060229</string>
+ <string>1314756952</string>
</void>
</object>
</void>
@@ -124,11 +124,11 @@
</void>
<void method="put">
<string>location</string>
- <string>pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src</string>
+ <string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1304060229</string>
+ <string>1314756952</string>
</void>
</object>
</void>
@@ -194,11 +194,11 @@
</void>
<void method="put">
<string>location</string>
- <string>pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src</string>
+ <string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1304060229</string>
+ <string>1314756952</string>
</void>
</object>
</void>
@@ -256,11 +256,11 @@
</void>
<void method="put">
<string>location</string>
- <string>pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src</string>
+ <string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1304060229</string>
+ <string>1314756952</string>
</void>
</object>
</void>
@@ -437,21 +437,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_RS_493_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_RS_6_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_493_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_RS_6_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_493_TIME_TAKEN</string>
+ <string>CNTR_NAME_RS_6_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_493_FATAL_ERROR</string>
+ <string>CNTR_NAME_RS_6_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>RS_493</string>
+ <string>RS_6</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -557,21 +557,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_SEL_492_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_5_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_492_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_492_TIME_TAKEN</string>
+ <string>CNTR_NAME_SEL_5_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_492_FATAL_ERROR</string>
+ <string>CNTR_NAME_SEL_5_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>SEL_492</string>
+ <string>SEL_5</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -623,43 +623,94 @@
<void property="childExprs">
<object class="java.util.ArrayList">
<void method="add">
- <object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc">
- <void property="column">
- <string>key</string>
+ <object class="org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc">
+ <void property="childExprs">
+ <object class="java.util.ArrayList">
+ <void method="add">
+ <object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc">
+ <void property="column">
+ <string>key</string>
+ </void>
+ <void property="tabAlias">
+ <string>src1</string>
+ </void>
+ <void property="typeInfo">
+ <object idref="PrimitiveTypeInfo0"/>
+ </void>
+ </object>
+ </void>
+ <void method="add">
+ <object class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc">
+ <void property="typeInfo">
+ <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
+ <void property="typeName">
+ <string>int</string>
+ </void>
+ </object>
+ </void>
+ <void property="value">
+ <int>10</int>
+ </void>
+ </object>
+ </void>
+ </object>
</void>
- <void property="tabAlias">
- <string>src1</string>
+ <void property="genericUDF">
+ <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan"/>
</void>
<void property="typeInfo">
- <object idref="PrimitiveTypeInfo0"/>
+ <object id="PrimitiveTypeInfo2" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
+ <void property="typeName">
+ <string>boolean</string>
+ </void>
+ </object>
</void>
</object>
</void>
<void method="add">
- <object class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc">
- <void property="typeInfo">
- <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="typeName">
- <string>int</string>
+ <object class="org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc">
+ <void property="childExprs">
+ <object class="java.util.ArrayList">
+ <void method="add">
+ <object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc">
+ <void property="column">
+ <string>key</string>
+ </void>
+ <void property="tabAlias">
+ <string>src1</string>
+ </void>
+ <void property="typeInfo">
+ <object idref="PrimitiveTypeInfo0"/>
+ </void>
+ </object>
+ </void>
+ <void method="add">
+ <object class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc">
+ <void property="typeInfo">
+ <object idref="PrimitiveTypeInfo1"/>
+ </void>
+ <void property="value">
+ <int>20</int>
+ </void>
+ </object>
</void>
</object>
</void>
- <void property="value">
- <int>10</int>
+ <void property="genericUDF">
+ <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan"/>
+ </void>
+ <void property="typeInfo">
+ <object idref="PrimitiveTypeInfo2"/>
</void>
</object>
</void>
</object>
</void>
<void property="genericUDF">
- <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan"/>
+ <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd"/>
</void>
<void property="typeInfo">
- <object id="PrimitiveTypeInfo2" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
- <void property="typeName">
- <string>boolean</string>
- </void>
- </object>
+ <object idref="PrimitiveTypeInfo2"/>
</void>
</object>
</void>
@@ -680,20 +731,10 @@
</void>
</object>
</void>
- <void method="add">
- <object class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc">
- <void property="typeInfo">
- <object idref="PrimitiveTypeInfo1"/>
- </void>
- <void property="value">
- <int>20</int>
- </void>
- </object>
- </void>
</object>
</void>
<void property="genericUDF">
- <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan"/>
+ <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull"/>
</void>
<void property="typeInfo">
<object idref="PrimitiveTypeInfo2"/>
@@ -715,21 +756,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_FIL_501_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_14_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_501_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_14_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_501_TIME_TAKEN</string>
+ <string>CNTR_NAME_FIL_14_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_501_FATAL_ERROR</string>
+ <string>CNTR_NAME_FIL_14_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>FIL_501</string>
+ <string>FIL_14</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -825,16 +866,16 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_TS_490_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_TS_3_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_490_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_TS_3_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_490_TIME_TAKEN</string>
+ <string>CNTR_NAME_TS_3_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_490_FATAL_ERROR</string>
+ <string>CNTR_NAME_TS_3_FATAL_ERROR</string>
</void>
</object>
</void>
@@ -849,7 +890,7 @@
</object>
</void>
<void property="operatorId">
- <string>TS_490</string>
+ <string>TS_3</string>
</void>
<void property="schema">
<object class="org.apache.hadoop.hive.ql.exec.RowSchema">
@@ -1022,21 +1063,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_RS_494_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_RS_7_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_494_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_RS_7_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_494_TIME_TAKEN</string>
+ <string>CNTR_NAME_RS_7_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_RS_494_FATAL_ERROR</string>
+ <string>CNTR_NAME_RS_7_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>RS_494</string>
+ <string>RS_7</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -1142,21 +1183,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_SEL_489_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_2_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_489_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_489_TIME_TAKEN</string>
+ <string>CNTR_NAME_SEL_2_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_489_FATAL_ERROR</string>
+ <string>CNTR_NAME_SEL_2_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>SEL_489</string>
+ <string>SEL_2</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -1292,21 +1333,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_FIL_502_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_15_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_502_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_15_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_502_TIME_TAKEN</string>
+ <string>CNTR_NAME_FIL_15_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_502_FATAL_ERROR</string>
+ <string>CNTR_NAME_FIL_15_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>FIL_502</string>
+ <string>FIL_15</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -1398,16 +1439,16 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_TS_487_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_TS_0_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_487_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_TS_0_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_487_TIME_TAKEN</string>
+ <string>CNTR_NAME_TS_0_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_TS_487_FATAL_ERROR</string>
+ <string>CNTR_NAME_TS_0_FATAL_ERROR</string>
</void>
</object>
</void>
@@ -1422,7 +1463,7 @@
</object>
</void>
<void property="operatorId">
- <string>TS_487</string>
+ <string>TS_0</string>
</void>
<void property="schema">
<object class="org.apache.hadoop.hive.ql.exec.RowSchema">
@@ -1450,7 +1491,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src</string>
+ <string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
<object class="java.util.ArrayList">
<void method="add">
<string>c:a:src1</string>
@@ -1465,7 +1506,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src</string>
+ <string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
<object class="org.apache.hadoop.hive.ql.plan.PartitionDesc">
<void property="baseFileName">
<string>src</string>
@@ -1522,11 +1563,11 @@
</void>
<void method="put">
<string>location</string>
- <string>pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src</string>
+ <string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1304060229</string>
+ <string>1314756952</string>
</void>
</object>
</void>
@@ -1584,11 +1625,11 @@
</void>
<void method="put">
<string>location</string>
- <string>pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src</string>
+ <string>pfile:/Users/charleschen/hive-trunk-f/build/ql/test/data/warehouse/src</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1304060229</string>
+ <string>1314756952</string>
</void>
</object>
</void>
@@ -1622,13 +1663,13 @@
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.FileSinkDesc">
<void property="dirName">
- <string>file:/tmp/sdong/hive_2011-04-28_23-57-11_965_3434682925105898020/-ext-10001</string>
+ <string>file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-15-53_467_7462858773918170539/-ext-10001</string>
</void>
<void property="numFiles">
<int>1</int>
</void>
<void property="statsAggPrefix">
- <string>file:/tmp/sdong/hive_2011-04-28_23-57-11_965_3434682925105898020/-ext-10001/</string>
+ <string>file:/var/folders/nt/ng21tg0n1jl4547lw0k8lg6hq_nw87/T/charleschen/hive_2011-08-30_19-15-53_467_7462858773918170539/-ext-10001/</string>
</void>
<void property="tableInfo">
<object class="org.apache.hadoop.hive.ql.plan.TableDesc">
@@ -1667,21 +1708,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_FS_499_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_FS_12_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FS_499_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_FS_12_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FS_499_TIME_TAKEN</string>
+ <string>CNTR_NAME_FS_12_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_FS_499_FATAL_ERROR</string>
+ <string>CNTR_NAME_FS_12_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>FS_499</string>
+ <string>FS_12</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -1841,21 +1882,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_SEL_498_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_11_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_498_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_11_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_498_TIME_TAKEN</string>
+ <string>CNTR_NAME_SEL_11_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_498_FATAL_ERROR</string>
+ <string>CNTR_NAME_SEL_11_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>SEL_498</string>
+ <string>SEL_11</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -2039,21 +2080,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_SEL_496_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_9_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_496_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_SEL_9_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_496_TIME_TAKEN</string>
+ <string>CNTR_NAME_SEL_9_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_SEL_496_FATAL_ERROR</string>
+ <string>CNTR_NAME_SEL_9_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>SEL_496</string>
+ <string>SEL_9</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -2121,63 +2162,12 @@
<void property="childExprs">
<object class="java.util.ArrayList">
<void method="add">
- <object class="org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc">
- <void property="childExprs">
- <object class="java.util.ArrayList">
- <void method="add">
- <object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc">
- <void property="column">
- <string>_col2</string>
- </void>
- <void property="tabAlias">
- <string>b</string>
- </void>
- <void property="typeInfo">
- <object idref="PrimitiveTypeInfo0"/>
- </void>
- </object>
- </void>
- </object>
- </void>
- <void property="genericUDF">
- <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull"/>
- </void>
- <void property="typeInfo">
- <object idref="PrimitiveTypeInfo2"/>
- </void>
- </object>
- </void>
- <void method="add">
- <object class="org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc">
- <void property="childExprs">
- <object class="java.util.ArrayList">
- <void method="add">
- <object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc">
- <void property="column">
- <string>_col0</string>
- </void>
- <void property="tabAlias">
- <string>a</string>
- </void>
- <void property="typeInfo">
- <object idref="PrimitiveTypeInfo0"/>
- </void>
- </object>
- </void>
- </object>
- </void>
- <void property="genericUDF">
- <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull"/>
- </void>
- <void property="typeInfo">
- <object idref="PrimitiveTypeInfo2"/>
- </void>
- </object>
+ <object idref="ExprNodeColumnDesc13"/>
</void>
</object>
</void>
<void property="genericUDF">
- <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd"/>
+ <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull"/>
</void>
<void property="typeInfo">
<object idref="PrimitiveTypeInfo2"/>
@@ -2189,21 +2179,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_FIL_500_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_13_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_500_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_FIL_13_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_500_TIME_TAKEN</string>
+ <string>CNTR_NAME_FIL_13_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_FIL_500_FATAL_ERROR</string>
+ <string>CNTR_NAME_FIL_13_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>FIL_500</string>
+ <string>FIL_13</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">
@@ -2441,21 +2431,21 @@
<void property="counterNames">
<object class="java.util.ArrayList">
<void method="add">
- <string>CNTR_NAME_JOIN_495_NUM_INPUT_ROWS</string>
+ <string>CNTR_NAME_JOIN_8_NUM_INPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_JOIN_495_NUM_OUTPUT_ROWS</string>
+ <string>CNTR_NAME_JOIN_8_NUM_OUTPUT_ROWS</string>
</void>
<void method="add">
- <string>CNTR_NAME_JOIN_495_TIME_TAKEN</string>
+ <string>CNTR_NAME_JOIN_8_TIME_TAKEN</string>
</void>
<void method="add">
- <string>CNTR_NAME_JOIN_495_FATAL_ERROR</string>
+ <string>CNTR_NAME_JOIN_8_FATAL_ERROR</string>
</void>
</object>
</void>
<void property="operatorId">
- <string>JOIN_495</string>
+ <string>JOIN_8</string>
</void>
<void property="parentOperators">
<object class="java.util.ArrayList">