You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2015/01/28 07:25:50 UTC
svn commit: r1655226 [3/30] - in /hive/trunk:
contrib/src/test/results/clientpositive/
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/optimizer/
ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/ ql/src/java...
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Wed Jan 28 06:25:44 2015
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.parse;
import java.io.Serializable;
import java.util.HashMap;
import java.util.HashSet;
-import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -70,7 +69,6 @@ public class ParseContext {
private HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
private Map<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner;
private HashMap<String, Operator<? extends OperatorDesc>> topOps;
- private LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx;
private Set<JoinOperator> joinOps;
private Set<MapJoinOperator> mapJoinOps;
private Set<SMBMapJoinOperator> smbMapJoinOps;
@@ -152,7 +150,6 @@ public class ParseContext {
HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner,
HashMap<TableScanOperator, PrunedPartitionList> opToPartList,
HashMap<String, Operator<? extends OperatorDesc>> topOps,
- LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx,
Set<JoinOperator> joinOps,
Set<SMBMapJoinOperator> smbMapJoinOps,
List<LoadTableDesc> loadTableWork, List<LoadFileDesc> loadFileWork,
@@ -177,7 +174,6 @@ public class ParseContext {
this.smbMapJoinOps = smbMapJoinOps;
this.loadFileWork = loadFileWork;
this.loadTableWork = loadTableWork;
- this.opParseCtx = opParseCtx;
this.topOps = topOps;
this.ctx = ctx;
this.idToTableNameMap = idToTableNameMap;
@@ -303,43 +299,6 @@ public class ParseContext {
this.topOps = topOps;
}
- /**
- * @return the opParseCtx
- */
- public LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> getOpParseCtx() {
- return opParseCtx;
- }
-
- /**
- * Remove the OpParseContext of a specific operator op
- * @param op
- * @return
- */
- public OpParseContext removeOpParseCtx(Operator<? extends OperatorDesc> op) {
- return opParseCtx.remove(op);
- }
-
- /**
- * Update the OpParseContext of operator op to newOpParseContext.
- * If op is not in opParseCtx, a new entry will be added into opParseCtx.
- * The key is op, and the value is newOpParseContext.
- * @param op
- * @param newOpParseContext
- */
- public void updateOpParseCtx(Operator<? extends OperatorDesc> op,
- OpParseContext newOpParseContext) {
- opParseCtx.put(op, newOpParseContext);
- }
-
- /**
- * @param opParseCtx
- * the opParseCtx to set
- */
- public void setOpParseCtx(
- LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx) {
- this.opParseCtx = opParseCtx;
- }
-
public HashMap<String, SplitSample> getNameToSplitSample() {
return nameToSplitSample;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java Wed Jan 28 06:25:44 2015
@@ -54,7 +54,6 @@ public class RowResolver implements Seri
// TODO: Refactor this and do in a more object oriented manner
private boolean isExprResolver;
- @SuppressWarnings("unused")
private static final Log LOG = LogFactory.getLog(RowResolver.class.getName());
public RowResolver() {
@@ -97,6 +96,14 @@ public class RowResolver implements Seri
public void put(String tab_alias, String col_alias, ColumnInfo colInfo) {
if (!addMappingOnly(tab_alias, col_alias, colInfo)) {
+ //Make sure that the table alias and column alias are stored
+ //in the column info
+ if (tab_alias != null) {
+ colInfo.setTabAlias(tab_alias.toLowerCase());
+ }
+ if (col_alias != null) {
+ colInfo.setAlias(col_alias.toLowerCase());
+ }
rowSchema.getSignature().add(colInfo);
}
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Jan 28 06:25:44 2015
@@ -381,7 +381,6 @@ public class SemanticAnalyzer extends Ba
opToPartList = pctx.getOpToPartList();
opToSamplePruner = pctx.getOpToSamplePruner();
topOps = pctx.getTopOps();
- opParseCtx = pctx.getOpParseCtx();
loadTableWork = pctx.getLoadTableWork();
loadFileWork = pctx.getLoadFileWork();
ctx = pctx.getContext();
@@ -397,8 +396,7 @@ public class SemanticAnalyzer extends Ba
}
public ParseContext getParseContext() {
- return new ParseContext(conf, qb, ast, opToPartPruner, opToPartList,
- topOps, opParseCtx,
+ return new ParseContext(conf, qb, ast, opToPartPruner, opToPartList, topOps,
new HashSet<JoinOperator>(joinContext.keySet()),
new HashSet<SMBMapJoinOperator>(smbMapJoinContext.keySet()),
loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
@@ -7138,7 +7136,7 @@ public class SemanticAnalyzer extends Ba
ColumnInfo newColInfo = new ColumnInfo(colInfo);
newColInfo.setInternalName(Utilities.ReduceField.KEY + ".reducesinkkey" + kindex);
newColInfo.setTabAlias(nm[0]);
- rsRR.addMappingOnly(nm[0], nm[1], newColInfo);
+ rsRR.put(nm[0], nm[1], newColInfo);
if (nm2 != null) {
rsRR.addMappingOnly(nm2[0], nm2[1], newColInfo);
}
@@ -7389,7 +7387,7 @@ public class SemanticAnalyzer extends Ba
ColumnInfo newColInfo = new ColumnInfo(colInfo);
newColInfo.setInternalName(Utilities.ReduceField.KEY + ".reducesinkkey" + kindex);
newColInfo.setTabAlias(nm[0]);
- outputRR.addMappingOnly(nm[0], nm[1], newColInfo);
+ outputRR.put(nm[0], nm[1], newColInfo);
if (nm2 != null) {
outputRR.addMappingOnly(nm2[0], nm2[1], newColInfo);
}
@@ -10161,7 +10159,7 @@ public class SemanticAnalyzer extends Ba
// 4. Generate Parse Context for Optimizer & Physical compiler
ParseContext pCtx = new ParseContext(conf, qb, plannerCtx.child,
- opToPartPruner, opToPartList, topOps, opParseCtx,
+ opToPartPruner, opToPartList, topOps,
new HashSet<JoinOperator>(joinContext.keySet()),
new HashSet<SMBMapJoinOperator>(smbMapJoinContext.keySet()),
loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java Wed Jan 28 06:25:44 2015
@@ -387,7 +387,7 @@ public abstract class TaskCompiler {
ParseContext clone = new ParseContext(conf,
pCtx.getQB(), pCtx.getParseTree(),
pCtx.getOpToPartPruner(), pCtx.getOpToPartList(), pCtx.getTopOps(),
- pCtx.getOpParseCtx(), pCtx.getJoinOps(), pCtx.getSmbMapJoinOps(),
+ pCtx.getJoinOps(), pCtx.getSmbMapJoinOps(),
pCtx.getLoadTableWork(), pCtx.getLoadFileWork(), pCtx.getContext(),
pCtx.getIdToTableNameMap(), pCtx.getDestTableId(), pCtx.getUCtx(),
pCtx.getListMapJoinOpsNoReducer(), pCtx.getGroupOpToInputTables(),
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java Wed Jan 28 06:25:44 2015
@@ -39,7 +39,6 @@ import org.apache.hadoop.hive.ql.exec.Op
import org.apache.hadoop.hive.ql.exec.OperatorUtils;
import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.BucketCol;
import org.apache.hadoop.hive.ql.optimizer.physical.BucketingSortingCtx.SortCol;
-import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.parse.SplitSample;
import org.apache.hadoop.mapred.JobConf;
@@ -96,7 +95,6 @@ public class MapWork extends BaseWork {
private Long minSplitSize;
private Long minSplitSizePerNode;
private Long minSplitSizePerRack;
- private final int tag = 0;
//use sampled partitioning
private int samplingType;
@@ -105,7 +103,6 @@ public class MapWork extends BaseWork {
public static final int SAMPLING_ON_START = 2; // sampling on task running
// the following two are used for join processing
- private LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap;
private boolean leftInputJoin;
private String[] baseSrc;
private List<String> mapAliases;
@@ -457,16 +454,6 @@ public class MapWork extends BaseWork {
return new ArrayList<PartitionDesc>(aliasToPartnInfo.values());
}
- public
- LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> getOpParseCtxMap() {
- return opParseCtxMap;
- }
-
- public void setOpParseCtxMap(
- LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap) {
- this.opParseCtxMap = opParseCtxMap;
- }
-
public Path getTmpHDFSPath() {
return tmpHDFSPath;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java Wed Jan 28 06:25:44 2015
@@ -27,7 +27,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
@@ -62,7 +61,6 @@ public class ExprWalkerInfo implements N
protected static final Log LOG = LogFactory.getLog(OpProcFactory.class
.getName());;
private Operator<? extends OperatorDesc> op = null;
- private RowResolver toRR = null;
/**
* Values the expression sub-trees (predicates) that can be pushed down for
@@ -104,10 +102,8 @@ public class ExprWalkerInfo implements N
newToOldExprMap = new HashMap<ExprNodeDesc, ExprNodeDesc>();
}
- public ExprWalkerInfo(Operator<? extends OperatorDesc> op,
- final RowResolver toRR) {
+ public ExprWalkerInfo(Operator<? extends OperatorDesc> op) {
this.op = op;
- this.toRR = toRR;
pushdownPreds = new HashMap<String, List<ExprNodeDesc>>();
exprInfoMap = new HashMap<ExprNodeDesc, ExprInfo>();
@@ -123,13 +119,6 @@ public class ExprWalkerInfo implements N
}
/**
- * @return the row resolver of the operator of this expression.
- */
- public RowResolver getToRR() {
- return toRR;
- }
-
- /**
* @return the new expression to old expression map
*/
public Map<ExprNodeDesc, ExprNodeDesc> getNewToOldExprMap() {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java Wed Jan 28 06:25:44 2015
@@ -26,8 +26,10 @@ import java.util.Stack;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
@@ -37,7 +39,6 @@ import org.apache.hadoop.hive.ql.lib.Nod
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
-import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -69,9 +70,13 @@ public final class ExprWalkerProcFactory
Object... nodeOutputs) throws SemanticException {
ExprWalkerInfo ctx = (ExprWalkerInfo) procCtx;
ExprNodeColumnDesc colref = (ExprNodeColumnDesc) nd;
- RowResolver toRR = ctx.getToRR();
+ RowSchema toRS = ctx.getOp().getSchema();
Operator<? extends OperatorDesc> op = ctx.getOp();
- String[] colAlias = toRR.reverseLookup(colref.getColumn());
+ ColumnInfo ci = toRS.getColumnInfo(colref.getColumn());
+ String tabAlias = null;
+ if (ci != null) {
+ tabAlias = ci.getTabAlias();
+ }
boolean isCandidate = true;
if (op.getColumnExprMap() != null) {
@@ -87,19 +92,19 @@ public final class ExprWalkerProcFactory
if (exp instanceof ExprNodeGenericFuncDesc) {
isCandidate = false;
}
- if (exp instanceof ExprNodeColumnDesc && colAlias == null) {
+ if (exp instanceof ExprNodeColumnDesc && ci == null) {
ExprNodeColumnDesc column = (ExprNodeColumnDesc)exp;
- colAlias = new String[]{column.getTabAlias(), column.getColumn()};
+ tabAlias = column.getTabAlias();
}
}
ctx.addConvertedNode(colref, exp);
ctx.setIsCandidate(exp, isCandidate);
- ctx.addAlias(exp, colAlias[0]);
+ ctx.addAlias(exp, tabAlias);
} else {
- if (colAlias == null) {
+ if (ci == null) {
return false;
}
- ctx.addAlias(colref, colAlias[0]);
+ ctx.addAlias(colref, tabAlias);
}
ctx.setIsCandidate(colref, isCandidate);
return isCandidate;
@@ -256,8 +261,7 @@ public final class ExprWalkerProcFactory
Operator<? extends OperatorDesc> op, List<ExprNodeDesc> preds)
throws SemanticException {
// Create the walker, the rules dispatcher and the context.
- ExprWalkerInfo exprContext = new ExprWalkerInfo(op, opContext
- .getRowResolver(op));
+ ExprWalkerInfo exprContext = new ExprWalkerInfo(op);
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack. The dispatcher
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Wed Jan 28 06:25:44 2015
@@ -47,8 +47,6 @@ import org.apache.hadoop.hive.ql.lib.Nod
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.OpParseContext;
-import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.Direction;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
@@ -459,7 +457,7 @@ public final class OpProcFactory {
LOG.info("Processing for " + nd.getName() + "("
+ ((Operator) nd).getIdentifier() + ")");
OpWalkerInfo owi = (OpWalkerInfo) procCtx;
- Set<String> aliases = getAliases(nd, owi);
+ Set<String> aliases = getAliases(nd);
// we pass null for aliases here because mergeWithChildrenPred filters
// aliases in the children node context and we need to filter them in
// the current JoinOperator's context
@@ -495,8 +493,8 @@ public final class OpProcFactory {
return null;
}
- protected Set<String> getAliases(Node nd, OpWalkerInfo owi) throws SemanticException {
- return owi.getRowResolver(nd).getTableNames();
+ protected Set<String> getAliases(Node nd) throws SemanticException {
+ return ((Operator)nd).getSchema().getTableNames();
}
protected Object handlePredicates(Node nd, ExprWalkerInfo prunePreds, OpWalkerInfo owi)
@@ -512,8 +510,8 @@ public final class OpProcFactory {
public static class JoinPPD extends JoinerPPD {
@Override
- protected Set<String> getAliases(Node nd, OpWalkerInfo owi) {
- return getQualifiedAliases((JoinOperator) nd, owi.getRowResolver(nd));
+ protected Set<String> getAliases(Node nd) {
+ return getQualifiedAliases((JoinOperator) nd, ((JoinOperator)nd).getSchema());
}
/**
@@ -540,7 +538,7 @@ public final class OpProcFactory {
* Row resolver
* @return set of qualified aliases
*/
- private Set<String> getQualifiedAliases(JoinOperator op, RowResolver rr) {
+ private Set<String> getQualifiedAliases(JoinOperator op, RowSchema rs) {
Set<String> aliases = new HashSet<String>();
JoinCondDesc[] conds = op.getConf().getConds();
Map<Integer, Set<String>> posToAliasMap = op.getPosToAliasMap();
@@ -560,7 +558,7 @@ public final class OpProcFactory {
if(i == -1){
aliases.addAll(posToAliasMap.get(0));
}
- Set<String> aliases2 = rr.getTableNames();
+ Set<String> aliases2 = rs.getTableNames();
aliases.retainAll(aliases2);
return aliases;
}
@@ -691,7 +689,7 @@ public final class OpProcFactory {
String[] aliases = ((ReduceSinkOperator)operator).getInputAliases();
return new HashSet<String>(Arrays.asList(aliases));
}
- Set<String> includes = owi.getRowResolver(operator).getTableNames();
+ Set<String> includes = operator.getSchema().getTableNames();
if (includes.size() == 1 && includes.contains("")) {
// Reduce sink of group by operator
return null;
@@ -794,7 +792,7 @@ public final class OpProcFactory {
protected static Object createFilter(Operator op,
Map<String, List<ExprNodeDesc>> predicates, OpWalkerInfo owi) {
- RowResolver inputRR = owi.getRowResolver(op);
+ RowSchema inputRS = op.getSchema();
// combine all predicates into a single expression
List<ExprNodeDesc> preds = new ArrayList<ExprNodeDesc>();
@@ -834,7 +832,7 @@ public final class OpProcFactory {
.getChildOperators();
op.setChildOperators(null);
Operator<FilterDesc> output = OperatorFactory.getAndMakeChild(
- new FilterDesc(condn, false), new RowSchema(inputRR.getColumnInfos()),
+ new FilterDesc(condn, false), new RowSchema(inputRS.getSignature()),
op);
output.setChildOperators(originalChilren);
for (Operator<? extends OperatorDesc> ch : originalChilren) {
@@ -845,8 +843,6 @@ public final class OpProcFactory {
parentOperators.remove(pos);
parentOperators.add(pos, output); // add the new op as the old
}
- OpParseContext ctx = new OpParseContext(inputRR);
- owi.put(output, ctx);
if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java Wed Jan 28 06:25:44 2015
@@ -20,15 +20,11 @@ package org.apache.hadoop.hive.ql.ppd;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
-import java.util.Map;
import org.apache.hadoop.hive.ql.exec.FilterOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.parse.ParseContext;
-import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
/**
@@ -41,13 +37,11 @@ public class OpWalkerInfo implements Nod
*/
private final HashMap<Operator<? extends OperatorDesc>, ExprWalkerInfo>
opToPushdownPredMap;
- private final Map<Operator<? extends OperatorDesc>, OpParseContext> opToParseCtxMap;
private final ParseContext pGraphContext;
private final List<FilterOperator> candidateFilterOps;
public OpWalkerInfo(ParseContext pGraphContext) {
this.pGraphContext = pGraphContext;
- opToParseCtxMap = pGraphContext.getOpParseCtx();
opToPushdownPredMap = new HashMap<Operator<? extends OperatorDesc>, ExprWalkerInfo>();
candidateFilterOps = new ArrayList<FilterOperator>();
}
@@ -61,15 +55,6 @@ public class OpWalkerInfo implements Nod
return opToPushdownPredMap.put(op, value);
}
- public RowResolver getRowResolver(Node op) {
- return opToParseCtxMap.get(op).getRowResolver();
- }
-
- public OpParseContext put(Operator<? extends OperatorDesc> key,
- OpParseContext value) {
- return opToParseCtxMap.put(key, value);
- }
-
public ParseContext getParseContext() {
return pGraphContext;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java Wed Jan 28 06:25:44 2015
@@ -44,9 +44,7 @@ import org.apache.hadoop.hive.ql.lib.Pre
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.optimizer.Transform;
-import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.parse.ParseContext;
-import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
@@ -97,20 +95,19 @@ public class PredicateTransitivePropagat
((FilterOperator)parent).getConf().setPredicate(merged);
} else {
ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs);
- RowResolver parentRR = pGraphContext.getOpParseCtx().get(parent).getRowResolver();
- Operator<FilterDesc> newFilter = createFilter(reducer, parent, parentRR, merged);
- pGraphContext.getOpParseCtx().put(newFilter, new OpParseContext(parentRR));
+ RowSchema parentRS = parent.getSchema();
+ Operator<FilterDesc> newFilter = createFilter(reducer, parent, parentRS, merged);
}
}
return pGraphContext;
}
- // insert filter operator between target(chilld) and input(parent)
+ // insert filter operator between target(child) and input(parent)
private Operator<FilterDesc> createFilter(Operator<?> target, Operator<?> parent,
- RowResolver parentRR, ExprNodeDesc filterExpr) {
+ RowSchema parentRS, ExprNodeDesc filterExpr) {
Operator<FilterDesc> filter = OperatorFactory.get(new FilterDesc(filterExpr, false),
- new RowSchema(parentRR.getColumnInfos()));
+ new RowSchema(parentRS.getSignature()));
filter.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>());
filter.setChildOperators(new ArrayList<Operator<? extends OperatorDesc>>());
filter.getParentOperators().add(parent);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java Wed Jan 28 06:25:44 2015
@@ -47,9 +47,7 @@ import org.apache.hadoop.hive.ql.lib.Pre
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.optimizer.Transform;
-import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.parse.ParseContext;
-import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDynamicListDesc;
@@ -96,9 +94,9 @@ public class SyntheticJoinPredicate impl
// insert filter operator between target(child) and input(parent)
private static Operator<FilterDesc> createFilter(Operator<?> target, Operator<?> parent,
- RowResolver parentRR, ExprNodeDesc filterExpr) {
+ RowSchema parentRS, ExprNodeDesc filterExpr) {
Operator<FilterDesc> filter = OperatorFactory.get(new FilterDesc(filterExpr, false),
- new RowSchema(parentRR.getColumnInfos()));
+ new RowSchema(parentRS.getSignature()));
filter.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>());
filter.setChildOperators(new ArrayList<Operator<? extends OperatorDesc>>());
filter.getParentOperators().add(parent);
@@ -139,7 +137,7 @@ public class SyntheticJoinPredicate impl
int[][] targets = getTargets(join);
Operator<? extends OperatorDesc> parent = source.getParentOperators().get(0);
- RowResolver parentRR = pCtx.getOpParseCtx().get(parent).getRowResolver();
+ RowSchema parentRS = parent.getSchema();
// don't generate for null-safes.
if (join.getConf().getNullSafes() != null) {
@@ -194,8 +192,7 @@ public class SyntheticJoinPredicate impl
}
}
- Operator<FilterDesc> newFilter = createFilter(source, parent, parentRR, syntheticExpr);
- pCtx.getOpParseCtx().put(newFilter, new OpParseContext(parentRR));
+ Operator<FilterDesc> newFilter = createFilter(source, parent, parentRS, syntheticExpr);
parent = newFilter;
}
Modified: hive/trunk/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/alter_partition_coltype.q.out?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/alter_partition_coltype.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/alter_partition_coltype.q.out Wed Jan 28 06:25:44 2015
@@ -241,31 +241,27 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: bigint)
- outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 0
-#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- columns _col0
- columns.types bigint
- escape.delim \
- hive.serialization.extend.nesting.levels true
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
Stage: Stage-0
Fetch Operator
@@ -423,31 +419,27 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: bigint)
- outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 0
-#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- columns _col0
- columns.types bigint
- escape.delim \
- hive.serialization.extend.nesting.levels true
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
Stage: Stage-0
Fetch Operator
@@ -584,31 +576,27 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: bigint)
- outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+#### A masked pattern was here ####
+ NumFilesPerFileSink: 1
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 0
-#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- columns _col0
- columns.types bigint
- escape.delim \
- hive.serialization.extend.nesting.levels true
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
+#### A masked pattern was here ####
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0
+ columns.types bigint
+ escape.delim \
+ hive.serialization.extend.nesting.levels true
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
Stage: Stage-0
Fetch Operator
Modified: hive/trunk/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out Wed Jan 28 06:25:44 2015
@@ -212,17 +212,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 7 Data size: 686 Basic stats: COMPLETE Column stats: PARTIAL
- Select Operator
- expressions: _col0 (type: string), _col1 (type: bigint), _col2 (type: int)
- outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
Statistics: Num rows: 7 Data size: 686 Basic stats: COMPLETE Column stats: PARTIAL
- File Output Operator
- compressed: false
- Statistics: Num rows: 7 Data size: 686 Basic stats: COMPLETE Column stats: PARTIAL
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -277,17 +273,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0
Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int)
- outputColumnNames: _col0
+ File Output Operator
+ compressed: false
Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -334,17 +326,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 720 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 8 Data size: 720 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 8 Data size: 720 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -392,17 +380,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 32 Data size: 2880 Basic stats: COMPLETE Column stats: COMPLETE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 32 Data size: 2880 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 32 Data size: 2880 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -450,17 +434,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 24 Data size: 2160 Basic stats: COMPLETE Column stats: COMPLETE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 24 Data size: 2160 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 24 Data size: 2160 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -508,17 +488,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 720 Basic stats: COMPLETE Column stats: COMPLETE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 8 Data size: 720 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 8 Data size: 720 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -566,17 +542,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 16 Data size: 1440 Basic stats: COMPLETE Column stats: COMPLETE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 16 Data size: 1440 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 16 Data size: 1440 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -624,17 +596,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 24 Data size: 2160 Basic stats: COMPLETE Column stats: COMPLETE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 24 Data size: 2160 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 24 Data size: 2160 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -682,17 +650,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 32 Data size: 2880 Basic stats: COMPLETE Column stats: COMPLETE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 32 Data size: 2880 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 32 Data size: 2880 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -743,17 +707,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0
Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int)
- outputColumnNames: _col0
+ File Output Operator
+ compressed: false
Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -801,17 +761,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 16 Data size: 1440 Basic stats: COMPLETE Column stats: COMPLETE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 16 Data size: 1440 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 16 Data size: 1440 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -860,17 +816,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0, _col1
Statistics: Num rows: 4 Data size: 344 Basic stats: COMPLETE Column stats: PARTIAL
- Select Operator
- expressions: _col0 (type: string), _col1 (type: bigint)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 4 Data size: 344 Basic stats: COMPLETE Column stats: PARTIAL
- File Output Operator
- compressed: false
- Statistics: Num rows: 4 Data size: 344 Basic stats: COMPLETE Column stats: PARTIAL
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -918,17 +870,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 16 Data size: 1592 Basic stats: COMPLETE Column stats: NONE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 16 Data size: 1592 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 16 Data size: 1592 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -976,17 +924,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 12 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 12 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 12 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -1034,17 +978,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 4 Data size: 398 Basic stats: COMPLETE Column stats: NONE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 4 Data size: 398 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 4 Data size: 398 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -1092,17 +1032,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: NONE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -1150,17 +1086,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 12 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 12 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 12 Data size: 1194 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -1208,17 +1140,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 16 Data size: 1592 Basic stats: COMPLETE Column stats: NONE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 16 Data size: 1592 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 16 Data size: 1592 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -1265,17 +1193,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0
Statistics: Num rows: 4 Data size: 398 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: int)
- outputColumnNames: _col0
+ File Output Operator
+ compressed: false
Statistics: Num rows: 4 Data size: 398 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 4 Data size: 398 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -1323,17 +1247,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 16 Data size: 1592 Basic stats: COMPLETE Column stats: NONE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 16 Data size: 1592 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 16 Data size: 1592 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
Modified: hive/trunk/ql/src/test/results/clientpositive/annotate_stats_groupby2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/annotate_stats_groupby2.q.out?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/annotate_stats_groupby2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/annotate_stats_groupby2.q.out Wed Jan 28 06:25:44 2015
@@ -109,17 +109,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0, _col1
Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -167,17 +163,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 40 Data size: 400 Basic stats: COMPLETE Column stats: NONE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 40 Data size: 400 Basic stats: COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- Statistics: Num rows: 40 Data size: 400 Basic stats: COMPLETE Column stats: NONE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -228,17 +220,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0, _col1
Statistics: Num rows: 2 Data size: 346 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 2 Data size: 346 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 2 Data size: 346 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -287,17 +275,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0, _col1
Statistics: Num rows: 2 Data size: 172 Basic stats: COMPLETE Column stats: PARTIAL
- Select Operator
- expressions: _col0 (type: string), _col1 (type: bigint)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 2 Data size: 172 Basic stats: COMPLETE Column stats: PARTIAL
- File Output Operator
- compressed: false
- Statistics: Num rows: 2 Data size: 172 Basic stats: COMPLETE Column stats: PARTIAL
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -345,17 +329,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 1384 Basic stats: COMPLETE Column stats: COMPLETE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 8 Data size: 1384 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 8 Data size: 1384 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -402,17 +382,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0, _col1
Statistics: Num rows: 2 Data size: 346 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 2 Data size: 346 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 2 Data size: 346 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -460,17 +436,13 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 1384 Basic stats: COMPLETE Column stats: COMPLETE
pruneGroupingSetId: true
- Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
- outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
Statistics: Num rows: 8 Data size: 1384 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 8 Data size: 1384 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
Modified: hive/trunk/ql/src/test/results/clientpositive/annotate_stats_join.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/annotate_stats_join.q.out?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/annotate_stats_join.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/annotate_stats_join.q.out Wed Jan 28 06:25:44 2015
@@ -202,18 +202,14 @@ STAGE PLANS:
0 _col1 (type: int)
1 _col0 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 41 Data size: 4059 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 41 Data size: 4059 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 41 Data size: 4059 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Statistics: Num rows: 41 Data size: 7954 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 41 Data size: 7954 Basic stats: COMPLETE Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -276,18 +272,14 @@ STAGE PLANS:
0 _col1 (type: int), _col0 (type: string)
1 _col0 (type: int), _col1 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 6 Data size: 594 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 6 Data size: 594 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 6 Data size: 594 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Statistics: Num rows: 6 Data size: 1164 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 6 Data size: 1164 Basic stats: COMPLETE Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -346,18 +338,14 @@ STAGE PLANS:
0 _col1 (type: int), _col0 (type: string)
1 _col0 (type: int), _col1 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 6 Data size: 594 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 6 Data size: 594 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 6 Data size: 594 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Statistics: Num rows: 6 Data size: 1164 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 6 Data size: 1164 Basic stats: COMPLETE Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -420,18 +408,14 @@ STAGE PLANS:
0 _col1 (type: int), _col0 (type: string), _col0 (type: string)
1 _col0 (type: int), _col1 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 11 Data size: 1089 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 11 Data size: 1089 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 11 Data size: 1089 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Statistics: Num rows: 11 Data size: 2134 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 11 Data size: 2134 Basic stats: COMPLETE Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -513,18 +497,14 @@ STAGE PLANS:
1 _col0 (type: int)
2 _col1 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
- Statistics: Num rows: 1536 Data size: 152064 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: string), _col5 (type: string), _col6 (type: int), _col7 (type: int)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
- Statistics: Num rows: 1536 Data size: 152064 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 1536 Data size: 152064 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Statistics: Num rows: 658 Data size: 192794 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 658 Data size: 192794 Basic stats: COMPLETE Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -604,18 +584,14 @@ STAGE PLANS:
1 _col0 (type: int)
2 _col1 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
- Statistics: Num rows: 47 Data size: 4794 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: string), _col5 (type: string), _col6 (type: int), _col7 (type: bigint), _col8 (type: int)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
- Statistics: Num rows: 47 Data size: 4794 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 47 Data size: 4794 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Statistics: Num rows: 47 Data size: 13912 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 47 Data size: 13912 Basic stats: COMPLETE Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -696,18 +672,14 @@ STAGE PLANS:
1 _col0 (type: int), _col1 (type: string)
2 _col1 (type: int), _col0 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
- Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: string), _col5 (type: string), _col6 (type: int), _col7 (type: bigint), _col8 (type: int)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
- Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
Modified: hive/trunk/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out Wed Jan 28 06:25:44 2015
@@ -1001,7 +1001,7 @@ STAGE PLANS:
0 _col0 (type: int)
1 _col0 (type: int)
outputColumnNames: _col1
- Statistics: Num rows: 1017 Data size: 4068 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
compressed: false
table:
@@ -1017,7 +1017,7 @@ STAGE PLANS:
key expressions: _col1 (type: int)
sort order: +
Map-reduce partition columns: _col1 (type: int)
- Statistics: Num rows: 1017 Data size: 4068 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: COMPLETE
TableScan
alias: s
Statistics: Num rows: 12 Data size: 3143 Basic stats: COMPLETE Column stats: COMPLETE
@@ -1041,14 +1041,14 @@ STAGE PLANS:
0 _col1 (type: int)
1 _col0 (type: int)
outputColumnNames: _col3
- Statistics: Num rows: 1017 Data size: 4068 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: _col3 (type: int)
outputColumnNames: _col0
- Statistics: Num rows: 1017 Data size: 4068 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
compressed: false
- Statistics: Num rows: 1017 Data size: 4068 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: COMPLETE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Modified: hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out?rev=1655226&r1=1655225&r2=1655226&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out Wed Jan 28 06:25:44 2015
@@ -712,17 +712,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: bigint)
- outputColumnNames: _col0
+ File Output Operator
+ compressed: false
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -766,17 +762,13 @@ STAGE PLANS:
mode: mergepartial
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: bigint)
- outputColumnNames: _col0
+ File Output Operator
+ compressed: false
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
Stage: Stage-0
Fetch Operator
@@ -1070,17 +1062,17 @@ STAGE PLANS:
Select Operator
expressions: VALUE._col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
Limit
Number of rows: 10
- Statistics: Num rows: 2 Data size: 178 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
expressions: _col0 (type: string), 11.0 (type: double)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 2 Data size: 194 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
File Output Operator
compressed: false
- Statistics: Num rows: 2 Data size: 194 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat