You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2009/08/11 04:06:32 UTC
svn commit: r802976 [2/17] - in /hadoop/hive/trunk: ./
common/src/java/org/apache/hadoop/hive/conf/
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/lib/
ql/src/java/org/apache/hadoop/hive/ql/optimizer/ ql/src/java/org/...
Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java?rev=802976&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java Tue Aug 11 02:06:23 2009
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import java.util.Set;
+
+import org.apache.hadoop.hive.ql.metadata.Partition;
+
+/**
+ * The list of pruned partitions.
+ */
+public class PrunedPartitionList {
+ // confirmed partitions - satisfy the partition criteria
+ private Set<Partition> confirmedPartns;
+
+ // unknown partitions - may/may not satisfy the partition criteria
+ private Set<Partition> unknownPartns;
+
+ // denied partitions - do not satisfy the partition criteria
+ private Set<Partition> deniedPartns;
+
+ /**
+ * @param confirmedPartns confirmed paritions
+ * @param unknownPartns unknown partitions
+ */
+ public PrunedPartitionList(Set<Partition> confirmedPartns, Set<Partition> unknownPartns, Set<Partition> deniedPartns) {
+ this.confirmedPartns = confirmedPartns;
+ this.unknownPartns = unknownPartns;
+ this.deniedPartns = deniedPartns;
+ }
+
+ /**
+ * get confirmed partitions
+ * @return confirmedPartns confirmed paritions
+ */
+ public Set<Partition> getConfirmedPartns() {
+ return confirmedPartns;
+ }
+
+ /**
+ * get unknown partitions
+ * @return unknownPartns unknown paritions
+ */
+ public Set<Partition> getUnknownPartns() {
+ return unknownPartns;
+ }
+
+ /**
+ * get denied partitions
+ * @return deniedPartns denied paritions
+ */
+ public Set<Partition> getDeniedPartns() {
+ return deniedPartns;
+ }
+
+ /**
+ * set confirmed partitions
+ * @param confirmedPartns confirmed paritions
+ */
+ public void setConfirmedPartns(Set<Partition> confirmedPartns) {
+ this.confirmedPartns = confirmedPartns;
+ }
+
+ /**
+ * set unknown partitions
+ * @param unknownPartns unknown partitions
+ */
+ public void setUnknownPartns(Set<Partition> unknownPartns) {
+ this.unknownPartns = unknownPartns;
+ }
+}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Tue Aug 11 02:06:23 2009
@@ -51,8 +51,6 @@
import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
-import org.apache.hadoop.hive.ql.exec.UDAF;
-import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
@@ -111,7 +109,6 @@
import org.apache.hadoop.hive.ql.plan.unionDesc;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
import org.apache.hadoop.hive.serde2.Deserializer;
@@ -136,7 +133,8 @@
*/
public class SemanticAnalyzer extends BaseSemanticAnalyzer {
- private HashMap<String, PartitionPruner> aliasToPruner;
+ private HashMap<String, org.apache.hadoop.hive.ql.parse.ASTPartitionPruner> aliasToPruner;
+ private HashMap<TableScanOperator, exprNodeDesc> opToPartPruner;
private HashMap<String, SamplePruner> aliasToSamplePruner;
private HashMap<String, Operator<? extends Serializable>> topOps;
private HashMap<String, Operator<? extends Serializable>> topSelOps;
@@ -144,6 +142,7 @@
private List<loadTableDesc> loadTableWork;
private List<loadFileDesc> loadFileWork;
private Map<JoinOperator, QBJoinTree> joinContext;
+ private HashMap<TableScanOperator, Table> topToTable;
private QB qb;
private ASTNode ast;
private int destTableId;
@@ -168,7 +167,8 @@
super(conf);
- this.aliasToPruner = new HashMap<String, PartitionPruner>();
+ this.aliasToPruner = new HashMap<String, org.apache.hadoop.hive.ql.parse.ASTPartitionPruner>();
+ this.opToPartPruner = new HashMap<TableScanOperator, exprNodeDesc>();
this.aliasToSamplePruner = new HashMap<String, SamplePruner>();
this.topOps = new HashMap<String, Operator<? extends Serializable>>();
this.topSelOps = new HashMap<String, Operator<? extends Serializable>>();
@@ -176,6 +176,7 @@
this.loadFileWork = new ArrayList<loadFileDesc>();
opParseCtx = new LinkedHashMap<Operator<? extends Serializable>, OpParseContext>();
joinContext = new HashMap<JoinOperator, QBJoinTree>();
+ topToTable = new HashMap<TableScanOperator, Table>();
this.destTableId = 1;
this.uCtx = null;
this.listMapJoinOpsNoReducer = new ArrayList<MapJoinOperator>();
@@ -205,6 +206,7 @@
public void init(ParseContext pctx) {
aliasToPruner = pctx.getAliasToPruner();
+ opToPartPruner = pctx.getOpToPartPruner();
aliasToSamplePruner = pctx.getAliasToSamplePruner();
topOps = pctx.getTopOps();
topSelOps = pctx.getTopSelOps();
@@ -221,8 +223,8 @@
}
public ParseContext getParseContext() {
- return new ParseContext(conf, qb, ast, aliasToPruner, aliasToSamplePruner, topOps,
- topSelOps, opParseCtx, joinContext, loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
+ return new ParseContext(conf, qb, ast, aliasToPruner, opToPartPruner, aliasToSamplePruner, topOps,
+ topSelOps, opParseCtx, joinContext, topToTable, loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
listMapJoinOpsNoReducer);
}
@@ -232,7 +234,7 @@
assert (ast.getToken() != null);
switch (ast.getToken().getType()) {
- case HiveParser.TOK_QUERY: {
+ case HiveParser.TOK_QUERY: {
QB qb = new QB(id, alias, true);
doPhase1(ast, qb, initPhase1Ctx());
qbexpr.setOpcode(QBExpr.Opcode.NULLOP);
@@ -603,7 +605,9 @@
for (String alias : qb.getTabAliases()) {
String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias);
- PartitionPruner pruner = new PartitionPruner(alias, qb.getMetaData());
+ org.apache.hadoop.hive.ql.parse.ASTPartitionPruner pruner =
+ new org.apache.hadoop.hive.ql.parse.ASTPartitionPruner(alias, qb.getMetaData());
+
// Pass each where clause to the pruner
for(String clause: qbp.getClauseNames()) {
@@ -622,7 +626,8 @@
for (String alias : qb.getQbJoinTree().getBaseSrc()) {
if (alias != null) {
String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias);
- PartitionPruner pruner = this.aliasToPruner.get(alias_id);
+ org.apache.hadoop.hive.ql.parse.ASTPartitionPruner pruner =
+ this.aliasToPruner.get(alias_id);
if(pruner == null) {
// this means that the alias is a subquery
pos++;
@@ -649,7 +654,7 @@
for (String alias : qb.getTabAliases()) {
String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias);
- PartitionPruner pruner = this.aliasToPruner.get(alias_id);
+ org.apache.hadoop.hive.ql.parse.ASTPartitionPruner pruner = this.aliasToPruner.get(alias_id);
if (joinPartnPruner.get(alias_id) == null) {
// Pass each where clause to the pruner
for(String clause: qbp.getClauseNames()) {
@@ -1003,7 +1008,7 @@
RowResolver inputRR = inputCtx.getRR();
Operator output = putOpInsertMap(
OperatorFactory.getAndMakeChild(
- new filterDesc(genExprNodeDesc(condn, inputRR)),
+ new filterDesc(genExprNodeDesc(condn, inputRR), false),
new RowSchema(inputRR.getColumnInfos()), input), inputRR);
LOG.debug("Created Filter Plan for " + qb.getId() + " row schema: " + inputRR.toString());
@@ -1014,7 +1019,7 @@
private Integer genColListRegex(String colRegex, String tabAlias, String alias, ASTNode sel,
ArrayList<exprNodeDesc> col_list, RowResolver input, Integer pos,
RowResolver output) throws SemanticException {
-
+
// The table alias should exist
if (tabAlias != null && !input.hasTableAlias(tabAlias))
throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(sel));
@@ -1045,9 +1050,13 @@
continue;
}
- exprNodeColumnDesc expr = new exprNodeColumnDesc(colInfo.getType(), name);
+ exprNodeColumnDesc expr = new exprNodeColumnDesc(colInfo.getType(), name,
+ colInfo.getTabAlias(),
+ colInfo.getIsPartitionCol());
col_list.add(expr);
- output.put(tmp[0], tmp[1], new ColumnInfo(getColumnInternalName(pos), colInfo.getType()));
+ output.put(tmp[0], tmp[1],
+ new ColumnInfo(getColumnInternalName(pos), colInfo.getType(),
+ colInfo.getTabAlias(), colInfo.getIsPartitionCol()));
pos = Integer.valueOf(pos.intValue() + 1);
matched ++;
}
@@ -1120,7 +1129,7 @@
out_rwsch.put(
qb.getParseInfo().getAlias(),
outputColList.get(i),
- new ColumnInfo(outputColList.get(i), TypeInfoFactory.stringTypeInfo) // Script output is always a string
+ new ColumnInfo(outputColList.get(i), TypeInfoFactory.stringTypeInfo, null, false) // Script output is always a string
);
}
@@ -1317,7 +1326,7 @@
}
out_rwsch.put(tabAlias, colAlias,
new ColumnInfo(getColumnInternalName(pos),
- exp.getTypeInfo()));
+ exp.getTypeInfo(), tabAlias, false));
pos = Integer.valueOf(pos.intValue() + 1);
}
}
@@ -1479,11 +1488,12 @@
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
}
- groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), exprInfo.getInternalName()));
+ groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(),
+ exprInfo.getInternalName(), "", false));
String field = getColumnInternalName(i);
outputColumnNames.add(field);
groupByOutputRowResolver.put("",grpbyExpr.toStringTree(),
- new ColumnInfo(field, exprInfo.getType()));
+ new ColumnInfo(field, exprInfo.getType(), null, false));
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
// For each aggregation
@@ -1509,7 +1519,10 @@
String paraExpression = paraExprInfo.getInternalName();
assert(paraExpression != null);
- aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExprInfo.getInternalName()));
+ aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(),
+ paraExprInfo.getInternalName(),
+ paraExprInfo.getTabAlias(),
+ paraExprInfo.getIsPartitionCol()));
}
boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
@@ -1523,7 +1536,7 @@
outputColumnNames.add(field);
groupByOutputRowResolver.put("",value.toStringTree(),
new ColumnInfo(field,
- udaf.returnType));
+ udaf.returnType, "", false));
// Save the evaluator so that it can be used by the next-stage GroupByOperators
if (genericUDAFEvaluators != null) {
genericUDAFEvaluators.put(entry.getKey(), genericUDAFEvaluator);
@@ -1572,11 +1585,14 @@
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
}
- groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), exprInfo.getInternalName()));
+ groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(),
+ exprInfo.getInternalName(),
+ exprInfo.getTabAlias(),
+ exprInfo.getIsPartitionCol()));
String field = getColumnInternalName(i);
outputColumnNames.add(field);
groupByOutputRowResolver.put("",grpbyExpr.toStringTree(),
- new ColumnInfo(field, exprInfo.getType()));
+ new ColumnInfo(field, exprInfo.getType(), "", false));
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
@@ -1606,7 +1622,10 @@
String paraExpression = paraExprInfo.getInternalName();
assert(paraExpression != null);
- aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExprInfo.getInternalName()));
+ aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(),
+ paraExprInfo.getInternalName(),
+ paraExprInfo.getTabAlias(),
+ paraExprInfo.getIsPartitionCol()));
}
}
else {
@@ -1617,7 +1636,9 @@
}
String paraExpression = paraExprInfo.getInternalName();
assert(paraExpression != null);
- aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression));
+ aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression,
+ paraExprInfo.getTabAlias(),
+ paraExprInfo.getIsPartitionCol()));
}
boolean isDistinct = (value.getType() == HiveParser.TOK_FUNCTIONDI);
Mode amode = groupByDescModeToUDAFMode(mode, isDistinct);
@@ -1638,7 +1659,9 @@
(mode != groupByDesc.Mode.FINAL && isDistinct), amode));
String field = getColumnInternalName(groupByKeys.size() + aggregations.size() - 1);
outputColumnNames.add(field);
- groupByOutputRowResolver.put("", value.toStringTree(), new ColumnInfo(field, udaf.returnType));
+ groupByOutputRowResolver.put("", value.toStringTree(),
+ new ColumnInfo(field,
+ udaf.returnType, "", false));
}
Operator op = putOpInsertMap(
@@ -1681,7 +1704,7 @@
String field = getColumnInternalName(i);
outputColumnNames.add(field);
groupByOutputRowResolver.put("",grpbyExpr.toStringTree(),
- new ColumnInfo(field, grpByExprNode.getTypeInfo()));
+ new ColumnInfo(field, grpByExprNode.getTypeInfo(), "", false));
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
@@ -1699,7 +1722,7 @@
numDistn++;
String field = getColumnInternalName(grpByExprs.size() + numDistn -1);
outputColumnNames.add(field);
- groupByOutputRowResolver.put("", text, new ColumnInfo(field, distExprNode.getTypeInfo()));
+ groupByOutputRowResolver.put("", text, new ColumnInfo(field, distExprNode.getTypeInfo(), "", false));
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
}
@@ -1735,7 +1758,7 @@
outputColumnNames.add(field);
groupByOutputRowResolver.put("",value.toStringTree(),
new ColumnInfo(field,
- udaf.returnType));
+ udaf.returnType, "", false));
// Save the evaluator so that it can be used by the next-stage GroupByOperators
if (genericUDAFEvaluators != null) {
genericUDAFEvaluators.put(entry.getKey(), genericUDAFEvaluator);
@@ -1787,7 +1810,7 @@
outputColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
String field = Utilities.ReduceField.KEY.toString() + "." + getColumnInternalName(reduceKeys.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field,
- reduceKeys.get(reduceKeys.size()-1).getTypeInfo());
+ reduceKeys.get(reduceKeys.size()-1).getTypeInfo(), null, false);
reduceSinkOutputRowResolver.put("", text, colInfo);
colExprMap.put(colInfo.getInternalName(), inputExpr);
} else {
@@ -1807,7 +1830,7 @@
outputColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
String field = Utilities.ReduceField.KEY.toString() + "." + getColumnInternalName(reduceKeys.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field,
- reduceKeys.get(reduceKeys.size()-1).getTypeInfo());
+ reduceKeys.get(reduceKeys.size()-1).getTypeInfo(), null, false);
reduceSinkOutputRowResolver.put("", text, colInfo);
colExprMap.put(colInfo.getInternalName(), reduceKeys.get(reduceKeys.size()-1));
}
@@ -1831,7 +1854,8 @@
String field = Utilities.ReduceField.VALUE.toString() + "." + getColumnInternalName(reduceValues.size() - 1);
reduceSinkOutputRowResolver.put("", text,
new ColumnInfo(field,
- reduceValues.get(reduceValues.size()-1).getTypeInfo()));
+ reduceValues.get(reduceValues.size()-1).getTypeInfo(),
+ null, false));
}
}
}
@@ -1844,13 +1868,14 @@
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
TypeInfo type = reduceSinkInputRowResolver.getColumnInfos().get(inputField).getType();
- reduceValues.add(new exprNodeColumnDesc(type, getColumnInternalName(inputField)));
+ reduceValues.add(new exprNodeColumnDesc(type, getColumnInternalName(inputField),
+ "", false));
inputField++;
outputColumnNames.add(getColumnInternalName(reduceValues.size() - 1));
String field = Utilities.ReduceField.VALUE.toString() + "." + getColumnInternalName(reduceValues.size() - 1);
reduceSinkOutputRowResolver.put("", ((ASTNode)entry.getValue()).toStringTree(),
new ColumnInfo(field,
- type));
+ type, null, false));
}
}
@@ -1895,10 +1920,10 @@
String field = getColumnInternalName(i);
outputColumnNames.add(field);
TypeInfo typeInfo = reduceSinkInputRowResolver2.get("", grpbyExpr.toStringTree()).getType();
- exprNodeColumnDesc inputExpr = new exprNodeColumnDesc(typeInfo, field);
+ exprNodeColumnDesc inputExpr = new exprNodeColumnDesc(typeInfo, field, "", false);
reduceKeys.add(inputExpr);
ColumnInfo colInfo = new ColumnInfo(Utilities.ReduceField.KEY.toString() + "." + field,
- typeInfo);
+ typeInfo, "", false);
reduceSinkOutputRowResolver2.put("", grpbyExpr.toStringTree(),
colInfo);
colExprMap.put(colInfo.getInternalName(), inputExpr);
@@ -1912,13 +1937,13 @@
String field = getColumnInternalName(inputField);
ASTNode t = entry.getValue();
TypeInfo typeInfo = reduceSinkInputRowResolver2.get("", t.toStringTree()).getType();
- reduceValues.add(new exprNodeColumnDesc(typeInfo, field));
+ reduceValues.add(new exprNodeColumnDesc(typeInfo, field, "", false));
inputField++;
String col = getColumnInternalName(reduceValues.size()-1);
outputColumnNames.add(col);
reduceSinkOutputRowResolver2.put("", t.toStringTree(),
new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + col,
- typeInfo));
+ typeInfo, "", false));
}
ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
@@ -1966,11 +1991,13 @@
}
String expression = exprInfo.getInternalName();
- groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), expression));
+ groupByKeys.add(new exprNodeColumnDesc(exprInfo.getType(), expression,
+ exprInfo.getTabAlias(),
+ exprInfo.getIsPartitionCol()));
String field = getColumnInternalName(i);
outputColumnNames.add(field);
groupByOutputRowResolver2.put("",grpbyExpr.toStringTree(),
- new ColumnInfo(field, exprInfo.getType()));
+ new ColumnInfo(field, exprInfo.getType(), "", false));
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
HashMap<String, ASTNode> aggregationTrees = parseInfo
@@ -1985,7 +2012,9 @@
}
String paraExpression = paraExprInfo.getInternalName();
assert(paraExpression != null);
- aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression));
+ aggParameters.add(new exprNodeColumnDesc(paraExprInfo.getType(), paraExpression,
+ paraExprInfo.getTabAlias(),
+ paraExprInfo.getIsPartitionCol()));
String aggName = value.getChild(0).getText();
@@ -2001,7 +2030,7 @@
outputColumnNames.add(field);
groupByOutputRowResolver2.put("", value.toStringTree(),
new ColumnInfo(field,
- udaf.returnType));
+ udaf.returnType, "", false));
}
Operator op = putOpInsertMap(
@@ -2528,7 +2557,8 @@
List<? extends StructField> fields = rowObjectInspector.getAllStructFieldRefs();
for (int i=0; i<fields.size(); i++)
vecCol.add(new ColumnInfo(fields.get(i).getFieldName(),
- TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i).getFieldObjectInspector())));
+ TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i).getFieldObjectInspector()),
+ "", false));
} catch (Exception e)
{
throw new SemanticException(e.getMessage());
@@ -2587,7 +2617,8 @@
ObjectInspector tableFieldOI = tableFields.get(i).getFieldObjectInspector();
TypeInfo tableFieldTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(tableFieldOI);
TypeInfo rowFieldTypeInfo = rowFields.get(i).getType();
- exprNodeDesc column = new exprNodeColumnDesc(rowFieldTypeInfo, rowFields.get(i).getInternalName());
+ exprNodeDesc column = new exprNodeColumnDesc(rowFieldTypeInfo,
+ rowFields.get(i).getInternalName(), "", false);
// LazySimpleSerDe can convert any types to String type using JSON-format.
if (!tableFieldTypeInfo.equals(rowFieldTypeInfo)
&& !(isLazySimpleSerDe && tableFieldTypeInfo.getCategory().equals(Category.PRIMITIVE)
@@ -2617,7 +2648,7 @@
ArrayList<String> colName = new ArrayList<String>();
for (int i=0; i<expressions.size(); i++) {
String name = getColumnInternalName(i);
- rowResolver.put("", name, new ColumnInfo(name, expressions.get(i).getTypeInfo()));
+ rowResolver.put("", name, new ColumnInfo(name, expressions.get(i).getTypeInfo(), "", false));
colName.add(name);
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
@@ -2734,7 +2765,8 @@
Map<String, exprNodeDesc> colExprMap = new HashMap<String, exprNodeDesc>();
ArrayList<exprNodeDesc> valueCols = new ArrayList<exprNodeDesc>();
for(ColumnInfo colInfo: inputRR.getColumnInfos()) {
- valueCols.add(new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName()));
+ valueCols.add(new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(),
+ colInfo.getTabAlias(), colInfo.getIsPartitionCol()));
colExprMap.put(colInfo.getInternalName(), valueCols.get(valueCols.size() - 1));
}
@@ -2756,13 +2788,15 @@
for(ColumnInfo colInfo: interim_rwsch.getColumnInfos()) {
String [] info = interim_rwsch.reverseLookup(colInfo.getInternalName());
out_rwsch.put(info[0], info[1],
- new ColumnInfo(getColumnInternalName(pos), colInfo.getType()));
+ new ColumnInfo(getColumnInternalName(pos), colInfo.getType(), info[0], false));
pos = Integer.valueOf(pos.intValue() + 1);
}
Operator output = putOpInsertMap(
OperatorFactory.getAndMakeChild(
- new extractDesc(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, Utilities.ReduceField.VALUE.toString())),
+ new extractDesc(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
+ Utilities.ReduceField.VALUE.toString(),
+ "", false)),
new RowSchema(out_rwsch.getColumnInfos()),
interim), out_rwsch);
@@ -2808,14 +2842,17 @@
{
String field = fNamesIter.next();
ColumnInfo valueInfo = inputRS.get(key, field);
- keyDesc.add(new exprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName()));
+ keyDesc.add(new exprNodeColumnDesc(valueInfo.getType(),
+ valueInfo.getInternalName(),
+ valueInfo.getTabAlias(),
+ valueInfo.getIsPartitionCol()));
if (outputRS.get(key, field) == null) {
String colName = getColumnInternalName(outputPos);
outputPos++;
outputColumnNames.add(colName);
colExprMap.put(colName, keyDesc.get(keyDesc.size() - 1));
outputRS.put(key, field, new ColumnInfo(colName,
- valueInfo.getType()));
+ valueInfo.getType(), key, false));
reversedExprs.put(colName, tag);
}
}
@@ -2866,14 +2903,17 @@
for (Map.Entry<String, ColumnInfo> entry : fMap.entrySet()) {
String field = entry.getKey();
ColumnInfo valueInfo = entry.getValue();
- exprNodeColumnDesc inputExpr = new exprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName());
+ exprNodeColumnDesc inputExpr = new exprNodeColumnDesc(valueInfo.getType(),
+ valueInfo.getInternalName(),
+ valueInfo.getTabAlias(),
+ valueInfo.getIsPartitionCol());
reduceValues.add(inputExpr);
if (outputRS.get(src, field) == null) {
String col = getColumnInternalName(reduceValues.size() - 1);
outputColumns.add(col);
ColumnInfo newColInfo = new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." +
col,
- valueInfo.getType());
+ valueInfo.getType(), src, false);
colExprMap.put(newColInfo.getInternalName(), inputExpr);
outputRS.put(src, field, newColInfo);
}
@@ -3306,7 +3346,8 @@
ArrayList<String> columnNames = new ArrayList<String>();
for (int i = 0; i < columns.size(); i++) {
ColumnInfo col = columns.get(i);
- colList.add(new exprNodeColumnDesc(col.getType(), col.getInternalName()));
+ colList.add(new exprNodeColumnDesc(col.getType(), col.getInternalName(),
+ col.getTabAlias(), col.getIsPartitionCol()));
columnNames.add(col.getInternalName());
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
@@ -3405,7 +3446,7 @@
outputColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
String field = Utilities.ReduceField.KEY.toString() + "." + getColumnInternalName(reduceKeys.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field,
- reduceKeys.get(reduceKeys.size()-1).getTypeInfo());
+ reduceKeys.get(reduceKeys.size()-1).getTypeInfo(), "", false);
reduceSinkOutputRowResolver.put("", text, colInfo);
colExprMap.put(colInfo.getInternalName(), distExpr);
}
@@ -3423,7 +3464,7 @@
exprNodeDesc grpByExprNode = genExprNodeDesc(grpbyExpr, inputRR);
reduceValues.add(grpByExprNode);
String field = Utilities.ReduceField.VALUE.toString() + "." + getColumnInternalName(reduceValues.size() - 1);
- ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(reduceValues.size()-1).getTypeInfo());
+ ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(reduceValues.size()-1).getTypeInfo(), "", false);
reduceSinkOutputRowResolver.put("", text, colInfo);
outputColumnNames.add(getColumnInternalName(reduceValues.size() - 1));
}
@@ -3446,7 +3487,7 @@
exprNodeDesc paraExprNode = genExprNodeDesc(paraExpr, inputRR);
reduceValues.add(paraExprNode);
String field = Utilities.ReduceField.VALUE.toString() + "." + getColumnInternalName(reduceValues.size() - 1);
- ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(reduceValues.size()-1).getTypeInfo());
+ ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(reduceValues.size()-1).getTypeInfo(), "", false);
reduceSinkOutputRowResolver.put("", text, colInfo);
outputColumnNames.add(getColumnInternalName(reduceValues.size() - 1));
}
@@ -3727,7 +3768,8 @@
for (String col : bucketCols) {
ColumnInfo ci = rwsch.get(alias, col);
// TODO: change type to the one in the table schema
- args.add(new exprNodeColumnDesc(ci.getType(), ci.getInternalName()));
+ args.add(new exprNodeColumnDesc(ci.getType(), ci.getInternalName(),
+ ci.getTabAlias(), ci.getIsPartitionCol()));
}
}
else {
@@ -3777,7 +3819,8 @@
for (int i=0; i<fields.size(); i++) {
rwsch.put(alias, fields.get(i).getFieldName(),
new ColumnInfo(fields.get(i).getFieldName(),
- TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i).getFieldObjectInspector())));
+ TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i).getFieldObjectInspector()),
+ alias, false));
}
} catch (SerDeException e) {
throw new RuntimeException(e);
@@ -3787,14 +3830,18 @@
for(FieldSchema part_col: tab.getPartCols()) {
LOG.trace("Adding partition col: " + part_col);
// TODO: use the right type by calling part_col.getType() instead of String.class
- rwsch.put(alias, part_col.getName(), new ColumnInfo(part_col.getName(), TypeInfoFactory.stringTypeInfo));
+ rwsch.put(alias, part_col.getName(),
+ new ColumnInfo(part_col.getName(), TypeInfoFactory.stringTypeInfo, alias, true));
}
// Create the root of the operator tree
- top = putOpInsertMap(OperatorFactory.get(tableScanDesc.class, new RowSchema(rwsch.getColumnInfos())), rwsch);
+ top = putOpInsertMap(OperatorFactory.get(new tableScanDesc(alias), new RowSchema(rwsch.getColumnInfos())), rwsch);
// Add this to the list of top operators - we always start from a table scan
this.topOps.put(alias_id, top);
+
+ // Add a mapping from the table scan operator to Table
+ this.topToTable.put((TableScanOperator)top, tab);
}
else {
rwsch = opParseCtx.get(top).getRR();
@@ -3855,7 +3902,7 @@
// compilation of sampling to use the operator tree
exprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols, colsEqual, alias, rwsch, qb.getMetaData(), null);
tableOp = OperatorFactory.getAndMakeChild(
- new filterDesc(samplePredicate),
+ new filterDesc(samplePredicate, true),
top);
}
else {
@@ -3864,7 +3911,7 @@
LOG.info("Need sample filter");
exprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols, colsEqual, alias, rwsch, qb.getMetaData(), null);
tableOp = OperatorFactory.getAndMakeChild(
- new filterDesc(samplePredicate),
+ new filterDesc(samplePredicate, true),
top);
}
}
@@ -3900,7 +3947,7 @@
LOG.info("Need sample filter");
exprNodeDesc randFunc = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("rand", new exprNodeConstantDesc(Integer.valueOf(460476415)));
exprNodeDesc samplePred = genSamplePredicate(tsSample, null, false, alias, rwsch, qb.getMetaData(), randFunc);
- tableOp = OperatorFactory.getAndMakeChild(new filterDesc(samplePred), top);
+ tableOp = OperatorFactory.getAndMakeChild(new filterDesc(samplePred, true), top);
}
}
}
@@ -4013,12 +4060,14 @@
}
else {
if (aliasToPruner.size() == 1) {
- Iterator<Map.Entry<String, PartitionPruner>> iterP = aliasToPruner.entrySet().iterator();
- PartitionPruner pr = ((Map.Entry<String, PartitionPruner>)iterP.next()).getValue();
+ Iterator<Map.Entry<String, org.apache.hadoop.hive.ql.parse.ASTPartitionPruner>> iterP =
+ aliasToPruner.entrySet().iterator();
+ org.apache.hadoop.hive.ql.parse.ASTPartitionPruner pr =
+ ((Map.Entry<String, org.apache.hadoop.hive.ql.parse.ASTPartitionPruner>)iterP.next()).getValue();
if (pr.onlyContainsPartitionCols()) {
List<String> listP = new ArrayList<String>();
List<partitionDesc> partP = new ArrayList<partitionDesc>();
- PartitionPruner.PrunedPartitionList partsList = null;
+ PrunedPartitionList partsList = null;
Set<Partition> parts = null;
try {
partsList = pr.prune();
@@ -4204,9 +4253,9 @@
genPlan(qb);
- ParseContext pCtx = new ParseContext(conf, qb, ast, aliasToPruner, aliasToSamplePruner, topOps,
- topSelOps, opParseCtx, joinContext, loadTableWork, loadFileWork,
- ctx, idToTableNameMap, destTableId, uCtx, listMapJoinOpsNoReducer);
+ ParseContext pCtx = new ParseContext(conf, qb, ast, aliasToPruner, opToPartPruner, aliasToSamplePruner, topOps,
+ topSelOps, opParseCtx, joinContext, topToTable, loadTableWork, loadFileWork,
+ ctx, idToTableNameMap, destTableId, uCtx, listMapJoinOpsNoReducer);
Optimizer optm = new Optimizer();
optm.setPctx(pCtx);
@@ -4218,7 +4267,7 @@
// Do any partition pruning
genPartitionPruners(qb);
LOG.info("Completed partition pruning");
-
+
// Do any sample pruning
genSamplePruners(qb);
LOG.info("Completed sample pruning");
@@ -4252,7 +4301,8 @@
// If the current subExpression is pre-calculated, as in Group-By etc.
ColumnInfo colInfo = input.get("", expr.toStringTree());
if (colInfo != null) {
- return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName());
+ return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(),
+ colInfo.getTabAlias(), colInfo.getIsPartitionCol());
}
// Create the walker, the rules dispatcher and the context.
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Tue Aug 11 02:06:23 2009
@@ -93,7 +93,8 @@
// If the current subExpression is pre-calculated, as in Group-By etc.
ColumnInfo colInfo = input.get("", expr.toStringTree());
if (colInfo != null) {
- desc = new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName());
+ desc = new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(),
+ colInfo.getTabAlias(), colInfo.getIsPartitionCol());
return desc;
}
return desc;
@@ -326,7 +327,8 @@
}
} else {
// It's a column.
- return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName());
+ return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(),
+ colInfo.getTabAlias(), colInfo.getIsPartitionCol());
}
}
@@ -627,7 +629,8 @@
ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1)));
return null;
}
- return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName());
+ return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(),
+ colInfo.getTabAlias(), colInfo.getIsPartitionCol());
}
// Return nulls for conversion operators
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java Tue Aug 11 02:06:23 2009
@@ -27,16 +27,36 @@
public class exprNodeColumnDesc extends exprNodeDesc implements Serializable {
private static final long serialVersionUID = 1L;
+
+ /**
+ * The column name.
+ */
private String column;
+
+ /**
+ * The alias of the table.
+ */
+ private String tabAlias;
+
+ /**
+ * Is the column a partitioned column.
+ */
+ private boolean isPartitionCol;
public exprNodeColumnDesc() {}
- public exprNodeColumnDesc(TypeInfo typeInfo, String column) {
+ public exprNodeColumnDesc(TypeInfo typeInfo, String column,
+ String tabAlias, boolean isPartitionCol) {
super(typeInfo);
this.column = column;
+ this.tabAlias = tabAlias;
+ this.isPartitionCol = isPartitionCol;
}
- public exprNodeColumnDesc(Class<?> c, String column) {
+ public exprNodeColumnDesc(Class<?> c, String column, String tabAlias,
+ boolean isPartitionCol) {
super(TypeInfoFactory.getPrimitiveTypeInfoFromJavaPrimitive(c));
this.column = column;
+ this.tabAlias = tabAlias;
+ this.isPartitionCol = isPartitionCol;
}
public String getColumn() {
return this.column;
@@ -45,6 +65,20 @@
this.column = column;
}
+ public String getTabAlias() {
+ return this.tabAlias;
+ }
+ public void setTabAlias(String tabAlias) {
+ this.tabAlias = tabAlias;
+ }
+
+ public boolean getIsParititonCol() {
+ return this.isPartitionCol;
+ }
+ public void setIsPartitionCol(boolean isPartitionCol) {
+ this.isPartitionCol = isPartitionCol;
+ }
+
public String toString() {
return "Column[" + column + "]";
}
@@ -62,7 +96,8 @@
}
@Override
public exprNodeDesc clone() {
- return new exprNodeColumnDesc(this.typeInfo, this.column);
+ return new exprNodeColumnDesc(this.typeInfo, this.column,
+ this.tabAlias, this.isPartitionCol);
}
@Override
public boolean isSame(Object o) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java Tue Aug 11 02:06:23 2009
@@ -24,10 +24,12 @@
public class filterDesc implements Serializable {
private static final long serialVersionUID = 1L;
private org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate;
+ private boolean isSamplingPred;
public filterDesc() { }
public filterDesc(
- final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate) {
+ final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate, boolean isSamplingPred) {
this.predicate = predicate;
+ this.isSamplingPred = isSamplingPred;
}
@explain(displayName="predicate")
public org.apache.hadoop.hive.ql.plan.exprNodeDesc getPredicate() {
@@ -36,4 +38,11 @@
public void setPredicate(final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate) {
this.predicate = predicate;
}
+ @explain(displayName="isSamplingPred", normalExplain=false)
+ public boolean getIsSamplingPred() {
+ return this.isSamplingPred;
+ }
+ public void setIsSamplingPred(final boolean isSamplingPred) {
+ this.isSamplingPred = isSamplingPred;
+ }
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableScanDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableScanDesc.java?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableScanDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableScanDesc.java Tue Aug 11 02:06:23 2009
@@ -28,8 +28,22 @@
@explain(displayName="TableScan")
public class tableScanDesc implements Serializable {
private static final long serialVersionUID = 1L;
+
+ private String alias;
+
@SuppressWarnings("nls")
- public tableScanDesc() {
- throw new RuntimeException("This class does not need to be instantiated");
+ public tableScanDesc() { }
+
+ public tableScanDesc(final String alias) {
+ this.alias = alias;
+ }
+
+ @explain(displayName="alias")
+ public String getAlias() {
+ return alias;
+ }
+
+ public void setAlias(String alias) {
+ this.alias = alias;
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Tue Aug 11 02:06:23 2009
@@ -135,7 +135,7 @@
List<Operator<? extends Serializable>> originalChilren = tsOp.getChildOperators();
tsOp.setChildOperators(null);
Operator<filterDesc> output =
- OperatorFactory.getAndMakeChild(new filterDesc(condn),
+ OperatorFactory.getAndMakeChild(new filterDesc(condn, false),
new RowSchema(inputRR.getColumnInfos()),
tsOp);
output.setChildOperators(originalChilren);
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Tue Aug 11 02:06:23 2009
@@ -155,7 +155,7 @@
private filterDesc getTestFilterDesc(String column) {
ArrayList<exprNodeDesc> children1 = new ArrayList<exprNodeDesc>();
- children1.add(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, column));
+ children1.add(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, column, "", false));
exprNodeDesc lhs = new exprNodeFuncDesc(
Constants.DOUBLE_TYPE_NAME,
TypeInfoFactory.doubleTypeInfo,
@@ -183,7 +183,7 @@
FunctionRegistry.getUDFMethod("<", TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.doubleTypeInfo),
children3);
- return new filterDesc(desc);
+ return new filterDesc(desc, false);
}
@SuppressWarnings("unchecked")
@@ -325,7 +325,7 @@
new exprNodeFieldDesc(TypeInfoFactory.stringTypeInfo,
new exprNodeColumnDesc(TypeInfoFactory.getListTypeInfo(
TypeInfoFactory.stringTypeInfo),
- Utilities.ReduceField.VALUE.toString()),
+ Utilities.ReduceField.VALUE.toString(), "", false),
"0",
false)), outputColumns), op4);
@@ -374,7 +374,7 @@
}
public static exprNodeColumnDesc getStringColumn(String columnName) {
- return new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, columnName);
+ return new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, columnName, "", false);
}
@SuppressWarnings("unchecked")
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java Tue Aug 11 02:06:23 2009
@@ -98,7 +98,7 @@
public void testExprNodeColumnEvaluator() throws Throwable {
try {
// get a evaluator for a simple field expression
- exprNodeDesc exprDesc = new exprNodeColumnDesc(colaType, "cola");
+ exprNodeDesc exprDesc = new exprNodeColumnDesc(colaType, "cola", "", false);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc);
// evaluate on row
@@ -128,8 +128,8 @@
public void testExprNodeFuncEvaluator() throws Throwable {
try {
// get a evaluator for a string concatenation expression
- exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1");
- exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola");
+ exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1", "", false);
+ exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola", "", false);
exprNodeDesc col11desc = getListIndexNode(col1desc, 1);
exprNodeDesc cola0desc = getListIndexNode(coladesc, 0);
exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", col11desc, cola0desc);
@@ -150,7 +150,7 @@
public void testExprNodeConversionEvaluator() throws Throwable {
try {
// get a evaluator for a string concatenation expression
- exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1");
+ exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1", "", false);
exprNodeDesc col11desc = getListIndexNode(col1desc, 1);
exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(Constants.DOUBLE_TYPE_NAME, col11desc);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
@@ -253,8 +253,8 @@
basetimes * 10,
ExprNodeEvaluatorFactory.get(
TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
- getListIndexNode(new exprNodeColumnDesc(col1Type, "col1"), constant1),
- getListIndexNode(new exprNodeColumnDesc(colaType, "cola"), constant1))),
+ getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant1),
+ getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant1))),
r,
"1b");
measureSpeed("concat(concat(col1[1], cola[1]), col1[2])",
@@ -262,9 +262,9 @@
ExprNodeEvaluatorFactory.get(
TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
- getListIndexNode(new exprNodeColumnDesc(col1Type, "col1"), constant1),
- getListIndexNode(new exprNodeColumnDesc(colaType, "cola"), constant1)),
- getListIndexNode(new exprNodeColumnDesc(col1Type, "col1"), constant2))),
+ getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant1),
+ getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant1)),
+ getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant2))),
r,
"1b2");
measureSpeed("concat(concat(concat(col1[1], cola[1]), col1[2]), cola[2])",
@@ -273,10 +273,10 @@
TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
- getListIndexNode(new exprNodeColumnDesc(col1Type, "col1"), constant1),
- getListIndexNode(new exprNodeColumnDesc(colaType, "cola"), constant1)),
- getListIndexNode(new exprNodeColumnDesc(col1Type, "col1"), constant2)),
- getListIndexNode(new exprNodeColumnDesc(colaType, "cola"), constant2))),
+ getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant1),
+ getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant1)),
+ getListIndexNode(new exprNodeColumnDesc(col1Type, "col1", "", false), constant2)),
+ getListIndexNode(new exprNodeColumnDesc(colaType, "cola", "", false), constant2))),
r,
"1b2c");
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Tue Aug 11 02:06:23 2009
@@ -90,7 +90,7 @@
exprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", col0, zero);
exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("&&", func1, func2);
assert(func3 != null);
- filterDesc filterCtx = new filterDesc(func3);
+ filterDesc filterCtx = new filterDesc(func3, false);
// Configuration
Operator<filterDesc> op = OperatorFactory.get(filterDesc.class);
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Tue Aug 11 02:06:23 2009
@@ -43,11 +43,11 @@
try {
// initialize a complete map reduce configuration
- exprNodeDesc expr1 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F1);
- exprNodeDesc expr2 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F2);
+ exprNodeDesc expr1 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F1, "", false);
+ exprNodeDesc expr2 = new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, F2, "", false);
exprNodeDesc filterExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", expr1, expr2);
- filterDesc filterCtx = new filterDesc(filterExpr);
+ filterDesc filterCtx = new filterDesc(filterExpr, false);
Operator<filterDesc> op = OperatorFactory.get(filterDesc.class);
op.setConf(filterCtx);
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby_map_ppr.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby_map_ppr.q?rev=802976&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby_map_ppr.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby_map_ppr.q Tue Aug 11 02:06:23 2009
@@ -0,0 +1,20 @@
+set hive.map.aggr=true;
+set hive.groupby.skewindata=false;
+set mapred.reduce.tasks=31;
+
+CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE;
+
+EXPLAIN EXTENDED
+FROM srcpart src
+INSERT OVERWRITE TABLE dest1
+SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5)))
+WHERE src.ds = '2008-04-08'
+GROUP BY substr(src.key,1,1);
+
+FROM srcpart src
+INSERT OVERWRITE TABLE dest1
+SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5)))
+WHERE src.ds = '2008-04-08'
+GROUP BY substr(src.key,1,1);
+
+SELECT dest1.* FROM dest1;
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby_ppr.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby_ppr.q?rev=802976&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby_ppr.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby_ppr.q Tue Aug 11 02:06:23 2009
@@ -0,0 +1,19 @@
+set hive.map.aggr=false;
+set hive.groupby.skewindata=false;
+
+CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE;
+
+EXPLAIN EXTENDED
+FROM srcpart src
+INSERT OVERWRITE TABLE dest1
+SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5)))
+WHERE src.ds = '2008-04-08'
+GROUP BY substr(src.key,1,1);
+
+FROM srcpart src
+INSERT OVERWRITE TABLE dest1
+SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5)))
+WHERE src.ds = '2008-04-08'
+GROUP BY substr(src.key,1,1);
+
+SELECT dest1.* FROM dest1;
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_map_ppr.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_map_ppr.q?rev=802976&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_map_ppr.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_map_ppr.q Tue Aug 11 02:06:23 2009
@@ -0,0 +1,19 @@
+CREATE TABLE dest_j1(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE;
+
+EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest_j1
+SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value
+FROM src1 x JOIN src y ON (x.key = y.key)
+JOIN srcpart z ON (x.key = z.key)
+WHERE z.ds='2008-04-08' and z.hr=11;
+
+INSERT OVERWRITE TABLE dest_j1
+SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value
+FROM src1 x JOIN src y ON (x.key = y.key)
+JOIN srcpart z ON (x.key = z.key)
+WHERE z.ds='2008-04-08' and z.hr=11;
+
+select * from dest_j1 x order by x.key;
+
+drop table dest_j1;
+
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/louter_join_ppr.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/louter_join_ppr.q?rev=802976&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/louter_join_ppr.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/louter_join_ppr.q Tue Aug 11 02:06:23 2009
@@ -0,0 +1,71 @@
+set hive.optimize.ppd=true;
+
+EXPLAIN EXTENDED
+ FROM
+ src a
+ LEFT OUTER JOIN
+ srcpart b
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25;
+
+ FROM
+ src a
+ LEFT OUTER JOIN
+ srcpart b
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25;
+
+EXPLAIN EXTENDED
+ FROM
+ srcpart a
+ LEFT OUTER JOIN
+ src b
+ ON (a.key = b.key AND a.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25;
+
+ FROM
+ srcpart a
+ LEFT OUTER JOIN
+ src b
+ ON (a.key = b.key AND a.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25;
+
+
+EXPLAIN EXTENDED
+ FROM
+ src a
+ LEFT OUTER JOIN
+ srcpart b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08';
+
+ FROM
+ src a
+ LEFT OUTER JOIN
+ srcpart b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08';
+
+EXPLAIN EXTENDED
+ FROM
+ srcpart a
+ LEFT OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08';
+
+ FROM
+ srcpart a
+ LEFT OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08';
+
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/outer_join_ppr.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/outer_join_ppr.q?rev=802976&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/outer_join_ppr.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/outer_join_ppr.q Tue Aug 11 02:06:23 2009
@@ -0,0 +1,35 @@
+set hive.optimize.ppd=true;
+
+EXPLAIN EXTENDED
+ FROM
+ src a
+ FULL OUTER JOIN
+ srcpart b
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25;
+
+ FROM
+ src a
+ FULL OUTER JOIN
+ srcpart b
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25;
+
+EXPLAIN EXTENDED
+ FROM
+ src a
+ FULL OUTER JOIN
+ srcpart b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08';
+
+ FROM
+ src a
+ FULL OUTER JOIN
+ srcpart b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08';
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/router_join_ppr.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/router_join_ppr.q?rev=802976&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/router_join_ppr.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/router_join_ppr.q Tue Aug 11 02:06:23 2009
@@ -0,0 +1,71 @@
+set hive.optimize.ppd=true;
+
+EXPLAIN EXTENDED
+ FROM
+ src a
+ RIGHT OUTER JOIN
+ srcpart b
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25;
+
+ FROM
+ src a
+ RIGHT OUTER JOIN
+ srcpart b
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25;
+
+EXPLAIN EXTENDED
+ FROM
+ srcpart a
+ RIGHT OUTER JOIN
+ src b
+ ON (a.key = b.key AND a.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25;
+
+ FROM
+ srcpart a
+ RIGHT OUTER JOIN
+ src b
+ ON (a.key = b.key AND a.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25;
+
+
+EXPLAIN EXTENDED
+ FROM
+ src a
+ RIGHT OUTER JOIN
+ srcpart b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08';
+
+ FROM
+ src a
+ RIGHT OUTER JOIN
+ srcpart b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08';
+
+EXPLAIN EXTENDED
+ FROM
+ srcpart a
+ RIGHT OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08';
+
+ FROM
+ srcpart a
+ RIGHT OUTER JOIN
+ src b
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08';
+
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform_ppr1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform_ppr1.q?rev=802976&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform_ppr1.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform_ppr1.q Tue Aug 11 02:06:23 2009
@@ -0,0 +1,19 @@
+set hive.optimize.ppd=true;
+
+EXPLAIN EXTENDED
+FROM (
+ FROM srcpart src
+ SELECT TRANSFORM(src.ds, src.key, src.value)
+ USING '/bin/cat' AS (ds, tkey, tvalue)
+ CLUSTER BY tkey
+) tmap
+SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 AND tmap.ds = '2008-04-08';
+
+FROM (
+ FROM srcpart src
+ SELECT TRANSFORM(src.ds, src.key, src.value)
+ USING '/bin/cat' AS (ds, tkey, tvalue)
+ CLUSTER BY tkey
+) tmap
+SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 AND tmap.ds = '2008-04-08';
+
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform_ppr2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform_ppr2.q?rev=802976&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform_ppr2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform_ppr2.q Tue Aug 11 02:06:23 2009
@@ -0,0 +1,21 @@
+set hive.optimize.ppd=true;
+
+EXPLAIN EXTENDED
+FROM (
+ FROM srcpart src
+ SELECT TRANSFORM(src.ds, src.key, src.value)
+ USING '/bin/cat' AS (ds, tkey, tvalue)
+ WHERE src.ds = '2008-04-08'
+ CLUSTER BY tkey
+) tmap
+SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100;
+
+FROM (
+ FROM srcpart src
+ SELECT TRANSFORM(src.ds, src.key, src.value)
+ USING '/bin/cat' AS (ds, tkey, tvalue)
+ WHERE src.ds = '2008-04-08'
+ CLUSTER BY tkey
+) tmap
+SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100;
+
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/union_ppr.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/union_ppr.q?rev=802976&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/union_ppr.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/union_ppr.q Tue Aug 11 02:06:23 2009
@@ -0,0 +1,16 @@
+EXPLAIN EXTENDED
+SELECT * FROM (
+ SELECT X.* FROM SRCPART X WHERE X.key < 100
+ UNION ALL
+ SELECT Y.* FROM SRCPART Y WHERE Y.key < 100
+) A
+WHERE A.ds = '2008-04-08'
+SORT BY A.key;
+
+SELECT * FROM (
+ SELECT X.* FROM SRCPART X WHERE X.key < 100
+ UNION ALL
+ SELECT Y.* FROM SRCPART Y WHERE Y.key < 100
+) A
+WHERE A.ds = '2008-04-08'
+SORT BY A.key;
Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out Tue Aug 11 02:06:23 2009
@@ -13,6 +13,8 @@
Map Reduce
Alias -> Map Operator Tree:
src
+ TableScan
+ alias: src
Select Operator
expressions:
expr: key
@@ -40,5 +42,5 @@
query: SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
FROM src
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/1098616602/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2091756058/10000
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
Files hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out (original) and hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out Tue Aug 11 02:06:23 2009 differ
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out Tue Aug 11 02:06:23 2009
@@ -15,6 +15,8 @@
Map Reduce
Alias -> Map Operator Tree:
src_thrift
+ TableScan
+ alias: src_thrift
Filter Operator
predicate:
expr: (lint[0] > 0)
@@ -45,10 +47,10 @@
Move Operator
files:
hdfs directory: true
- destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2103484307/10000
+ destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/150742864/10000
Map Reduce
Alias -> Map Operator Tree:
- file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2009660724/10002
+ file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2122837325/10002
Reduce Output Operator
sort order:
Map-reduce partition columns:
@@ -88,7 +90,7 @@
Output: default/dest1
query: SELECT DEST1.* FROM Dest1
Input: default/dest1
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1564334315/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1814455060/10000
2 1
4 8
6 27
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out Tue Aug 11 02:06:23 2009
@@ -14,6 +14,8 @@
Map Reduce
Alias -> Map Operator Tree:
src
+ TableScan
+ alias: src
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(86))
@@ -54,10 +56,10 @@
Move Operator
files:
hdfs directory: true
- destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/811676056/10000
+ destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/519069919/10000
Map Reduce
Alias -> Map Operator Tree:
- file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/2035347058/10002
+ file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/164262834/10002
Reduce Output Operator
sort order:
Map-reduce partition columns:
@@ -106,5 +108,5 @@
Output: default/dest1
query: select dest1.* FROM dest1
Input: default/dest1
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/855406378/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1857264488/10000
5 5.0 5.0 5.0 5 true 1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out Tue Aug 11 02:06:23 2009
@@ -12,6 +12,8 @@
Map Reduce
Alias -> Map Operator Tree:
x
+ TableScan
+ alias: x
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(10))
@@ -57,7 +59,7 @@
query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1924758530/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1851665085/10000
10 val_10
query: EXPLAIN
SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
@@ -73,6 +75,8 @@
Map Reduce
Alias -> Map Operator Tree:
x
+ TableScan
+ alias: x
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(20))
@@ -118,7 +122,7 @@
query: SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1123767134/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/577104330/10000
20 val_20
query: EXPLAIN
SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
@@ -134,6 +138,8 @@
Map Reduce
Alias -> Map Operator Tree:
x
+ TableScan
+ alias: x
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(20))
@@ -179,7 +185,7 @@
query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/861786563/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/716161226/10000
20 val_20
query: EXPLAIN
SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY x.key
@@ -195,6 +201,8 @@
Map Reduce
Alias -> Map Operator Tree:
x
+ TableScan
+ alias: x
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(20))
@@ -240,7 +248,7 @@
query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY x.key
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1270193772/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1938918851/10000
20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
@@ -256,6 +264,8 @@
Map Reduce
Alias -> Map Operator Tree:
x
+ TableScan
+ alias: x
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(20))
@@ -301,7 +311,7 @@
query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1442901621/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/111063159/10000
20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
@@ -317,6 +327,8 @@
Map Reduce
Alias -> Map Operator Tree:
x
+ TableScan
+ alias: x
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(20))
@@ -362,7 +374,7 @@
query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/153851399/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2088645304/10000
20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY v1
@@ -378,6 +390,8 @@
Map Reduce
Alias -> Map Operator Tree:
x
+ TableScan
+ alias: x
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(20))
@@ -423,7 +437,7 @@
query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY v1
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/558964860/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/43650880/10000
20 val_20
query: EXPLAIN
SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
@@ -439,6 +453,8 @@
Map Reduce
Alias -> Map Operator Tree:
y:x
+ TableScan
+ alias: x
Select Operator
expressions:
expr: key
@@ -487,7 +503,7 @@
query: SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1403526844/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2050273150/10000
20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
@@ -504,6 +520,8 @@
Map Reduce
Alias -> Map Operator Tree:
y
+ TableScan
+ alias: y
Reduce Output Operator
key expressions:
expr: key
@@ -517,6 +535,8 @@
expr: key
type: string
x
+ TableScan
+ alias: x
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(20))
@@ -566,7 +586,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/708887411/10002
+ file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/568111874/10002
Reduce Output Operator
key expressions:
expr: _col1
@@ -599,7 +619,7 @@
query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/102207247/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1907382390/10000
20 val_20 20
query: EXPLAIN
SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
@@ -616,6 +636,8 @@
Map Reduce
Alias -> Map Operator Tree:
y
+ TableScan
+ alias: y
Reduce Output Operator
key expressions:
expr: key
@@ -631,6 +653,8 @@
expr: value
type: string
x
+ TableScan
+ alias: x
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(20))
@@ -682,7 +706,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1638014428/10002
+ file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/36559589/10002
Reduce Output Operator
key expressions:
expr: _col1
@@ -717,7 +741,7 @@
query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1268473862/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/917788585/10000
20 val_20 20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
@@ -734,6 +758,8 @@
Map Reduce
Alias -> Map Operator Tree:
y
+ TableScan
+ alias: y
Reduce Output Operator
key expressions:
expr: key
@@ -749,6 +775,8 @@
expr: value
type: string
x
+ TableScan
+ alias: x
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(20))
@@ -800,7 +828,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/2041580763/10002
+ file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1989928864/10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -835,7 +863,7 @@
query: SELECT x.key, x.value as v1, y.* FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/781263409/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1491329699/10000
20 val_20 20 val_20
query: EXPLAIN
SELECT x.key, x.value as v1, y.key as yk FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
@@ -852,6 +880,8 @@
Map Reduce
Alias -> Map Operator Tree:
y
+ TableScan
+ alias: y
Reduce Output Operator
key expressions:
expr: key
@@ -865,6 +895,8 @@
expr: key
type: string
x
+ TableScan
+ alias: x
Filter Operator
predicate:
expr: (UDFToDouble(key) = UDFToDouble(20))
@@ -914,7 +946,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/919629862/10002
+ file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1994351843/10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -947,7 +979,7 @@
query: SELECT x.key, x.value as v1, y.key as yk FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1571336063/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1415898959/10000
20 val_20 20
query: EXPLAIN
SELECT unioninput.*
@@ -969,6 +1001,8 @@
Map Reduce
Alias -> Map Operator Tree:
null-subquery1:unioninput-subquery1:src
+ TableScan
+ alias: src
Filter Operator
predicate:
expr: (UDFToDouble(key) < UDFToDouble(100))
@@ -1007,6 +1041,8 @@
expr: _col1
type: string
null-subquery2:unioninput-subquery2:src
+ TableScan
+ alias: src
Filter Operator
predicate:
expr: (UDFToDouble(key) > UDFToDouble(100))
@@ -1057,6 +1093,7 @@
Fetch Operator
limit: -1
+
query: SELECT unioninput.*
FROM (
FROM src select src.key, src.value WHERE src.key < 100
@@ -1065,7 +1102,7 @@
) unioninput
CLUSTER BY unioninput.key
Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1313522351/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1319156082/10000
0 val_0
0 val_0
0 val_0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out Tue Aug 11 02:06:23 2009
@@ -15,6 +15,8 @@
Map Reduce
Alias -> Map Operator Tree:
src_thrift
+ TableScan
+ alias: src_thrift
Select Operator
expressions:
expr: lint
@@ -73,7 +75,7 @@
Output: default/columnarserde_create_shortcut
query: SELECT columnarserde_create_shortcut.* FROM columnarserde_create_shortcut DISTRIBUTE BY 1
Input: default/columnarserde_create_shortcut
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/2113912969/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1249909626/10000
[0,0,0] ["0","0","0"] {"key_0":"value_0"} 1712634731 record_0
[1,2,3] ["10","100","1000"] {"key_1":"value_1"} 465985200 record_1
[2,4,6] ["20","200","2000"] {"key_2":"value_2"} -751827638 record_2
@@ -87,7 +89,7 @@
null null {} 0 NULL
query: SELECT columnarserde_create_shortcut.a[0], columnarserde_create_shortcut.b[0], columnarserde_create_shortcut.c['key2'], columnarserde_create_shortcut.d, columnarserde_create_shortcut.e FROM columnarserde_create_shortcut DISTRIBUTE BY 1
Input: default/columnarserde_create_shortcut
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1302632919/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/467882709/10000
0 0 NULL 1712634731 record_0
1 10 NULL 465985200 record_1
2 20 NULL -751827638 record_2
@@ -111,7 +113,7 @@
value string from deserializer
query: SELECT columnShortcutTable.* FROM columnShortcutTable
Input: default/columnshortcuttable
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/756410828/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2017736818/10000
238 val_238
86 val_86
311 val_311
@@ -125,7 +127,7 @@
query: ALTER TABLE columnShortcutTable ADD COLUMNS (c string)
query: SELECT columnShortcutTable.* FROM columnShortcutTable
Input: default/columnshortcuttable
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/656879171/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/984890513/10000
238 val_238 NULL
86 val_86 NULL
311 val_311 NULL
@@ -139,7 +141,7 @@
query: ALTER TABLE columnShortcutTable REPLACE COLUMNS (key int)
query: SELECT columnShortcutTable.* FROM columnShortcutTable
Input: default/columnshortcuttable
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/90567567/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1805361378/10000
238
86
311
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudaf.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudaf.q.out?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudaf.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_genericudaf.q.out Tue Aug 11 02:06:23 2009
@@ -28,6 +28,8 @@
Map Reduce
Alias -> Map Operator Tree:
src
+ TableScan
+ alias: src
Select Operator
expressions:
expr: value
@@ -78,6 +80,6 @@
test_avg(substr(value,5))
FROM src
Input: default/src
-Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1744650408/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1700649603/10000
1.0 260.182
query: DROP TEMPORARY FUNCTIOn test_avg
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out?rev=802976&r1=802975&r2=802976&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out Tue Aug 11 02:06:23 2009
@@ -14,6 +14,8 @@
Map Reduce
Alias -> Map Operator Tree:
src
+ TableScan
+ alias: src
Select Operator
expressions:
expr: key
@@ -52,7 +54,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- invalidscheme:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/2040523337/10002
+ invalidscheme:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1292130777/10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -113,7 +115,7 @@
Output: default/dest_g1
query: SELECT dest_g1.* FROM dest_g1
Input: default/dest_g1
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_3/build/ql/tmp/2011356665/10000
+Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/216605822/10000
0 0.0
10 10.0
100 200.0