You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by he...@apache.org on 2009/12/05 07:57:36 UTC
svn commit: r887508 - in /hadoop/hive/trunk: ./
ql/src/java/org/apache/hadoop/hive/ql/optimizer/
ql/src/java/org/apache/hadoop/hive/ql/parse/
ql/src/java/org/apache/hadoop/hive/ql/plan/
ql/src/test/results/clientpositive/ ql/src/test/results/compiler/p...
Author: heyongqiang
Date: Sat Dec 5 06:57:35 2009
New Revision: 887508
URL: http://svn.apache.org/viewvc?rev=887508&view=rev
Log:
fix sampling in subquery for real
Added:
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SamplePruner.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java
hadoop/hive/trunk/ql/src/test/results/clientpositive/input31.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample9.q.out
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Sat Dec 5 06:57:35 2009
@@ -305,6 +305,9 @@
HIVE-962 "show functions" should work with unquoted string.
(Paul Yang via zshao)
+ HIVE-638 fix sampling in subquery for real
+ (namit via He Yongqiang)
+
Release 0.4.0 - Unreleased
INCOMPATIBLE CHANGES
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Sat Dec 5 06:57:35 2009
@@ -48,6 +48,7 @@
import org.apache.hadoop.hive.ql.plan.fileSinkDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.tableScanDesc;
+import org.apache.hadoop.hive.ql.plan.filterDesc.sampleDesc;
import org.apache.hadoop.hive.ql.metadata.*;
import org.apache.hadoop.hive.ql.parse.*;
import org.apache.hadoop.hive.ql.Context;
@@ -430,7 +431,6 @@
}
plan.getAliasToPartnInfo().put(alias_id, aliasPartnDesc);
- SamplePruner samplePruner = parseCtx.getAliasToSamplePruner().get(alias_id);
for (Partition part : parts) {
if (part.getTable().isPartitioned())
@@ -441,8 +441,10 @@
// Later the properties have to come from the partition as opposed
// to from the table in order to support versioning.
Path paths[];
- if (samplePruner != null) {
- paths = samplePruner.prune(part);
+ sampleDesc sampleDescr = parseCtx.getOpToSamplePruner().get(topOp);
+
+ if (sampleDescr != null) {
+ paths = SamplePruner.prune(part, sampleDescr);
}
else {
paths = part.getPath();
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java Sat Dec 5 06:57:35 2009
@@ -53,6 +53,7 @@
transformations.add(new GroupByOptimizer());
}
+ transformations.add(new SamplePruner());
transformations.add(new MapJoinProcessor());
transformations.add(new UnionProcessor());
transformations.add(new JoinReorder());
Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java?rev=887508&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java Sat Dec 5 06:57:35 2009
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.optimizer;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Stack;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
+import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
+import org.apache.hadoop.hive.ql.lib.Dispatcher;
+import org.apache.hadoop.hive.ql.lib.GraphWalker;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.lib.NodeProcessor;
+import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
+import org.apache.hadoop.hive.ql.lib.Rule;
+import org.apache.hadoop.hive.ql.lib.RuleRegExp;
+import org.apache.hadoop.hive.ql.optimizer.Transform;
+import org.apache.hadoop.hive.ql.parse.ParseContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.plan.filterDesc;
+import org.apache.hadoop.hive.ql.plan.filterDesc.sampleDesc;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.fs.Path;
+
+/**
+ * The transformation step that does sample pruning.
+ *
+ */
+public class SamplePruner implements Transform {
+
+ public static class SamplePrunerCtx implements NodeProcessorCtx {
+ HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
+
+ public SamplePrunerCtx(HashMap<TableScanOperator, sampleDesc> opToSamplePruner) {
+ this.opToSamplePruner = opToSamplePruner;
+ }
+
+ /**
+ * @return the opToSamplePruner
+ */
+ public HashMap<TableScanOperator, sampleDesc> getOpToSamplePruner() {
+ return opToSamplePruner;
+ }
+
+ /**
+ * @param opToSamplePruner
+ * the opToSamplePruner to set
+ */
+ public void setOpToSamplePruner(HashMap<TableScanOperator, sampleDesc> opToSamplePruner) {
+ this.opToSamplePruner = opToSamplePruner;
+ }
+ }
+
+ // The log
+ private static final Log LOG = LogFactory.getLog("hive.ql.optimizer.SamplePruner");
+
+ /* (non-Javadoc)
+ * @see org.apache.hadoop.hive.ql.optimizer.Transform#transform(org.apache.hadoop.hive.ql.parse.ParseContext)
+ */
+ @Override
+ public ParseContext transform(ParseContext pctx) throws SemanticException {
+
+ // create a the context for walking operators
+ SamplePrunerCtx samplePrunerCtx = new SamplePrunerCtx(pctx.getOpToSamplePruner());
+
+ Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
+ opRules.put(new RuleRegExp("R1", "(TS%FIL%FIL%)"), getFilterProc());
+
+ // The dispatcher fires the processor corresponding to the closest matching rule and passes the context along
+ Dispatcher disp = new DefaultRuleDispatcher(getDefaultProc(), opRules, samplePrunerCtx);
+ GraphWalker ogw = new DefaultGraphWalker(disp);
+
+ // Create a list of topop nodes
+ ArrayList<Node> topNodes = new ArrayList<Node>();
+ topNodes.addAll(pctx.getTopOps().values());
+ ogw.startWalking(topNodes, null);
+ return pctx;
+ }
+
+ // Filter processor
+ public static class FilterPPR implements NodeProcessor {
+
+ @Override
+ public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
+ Object... nodeOutputs) throws SemanticException {
+ FilterOperator filOp = (FilterOperator)nd;
+ filterDesc filOpDesc = filOp.getConf();
+ sampleDesc sampleDescr = filOpDesc.getSampleDescr();
+
+ if ((sampleDescr == null) || !sampleDescr.getInputPruning())
+ return null;
+
+ assert stack.size() == 3;
+ TableScanOperator tsOp = (TableScanOperator)stack.get(0);
+ ((SamplePrunerCtx)procCtx).getOpToSamplePruner().put(tsOp, sampleDescr);
+ return null;
+ }
+ }
+
+ public static NodeProcessor getFilterProc() {
+ return new FilterPPR();
+ }
+
+ // Default processor which does nothing
+ public static class DefaultPPR implements NodeProcessor {
+
+ @Override
+ public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
+ Object... nodeOutputs) throws SemanticException {
+ // Nothing needs to be done.
+ return null;
+ }
+ }
+
+ public static NodeProcessor getDefaultProc() {
+ return new DefaultPPR();
+ }
+
+ /**
+ * Prunes to get all the files in the partition that satisfy the TABLESAMPLE clause
+ *
+ * @param part The partition to prune
+ * @return Path[]
+ * @throws SemanticException
+ */
+ @SuppressWarnings("nls")
+ public static Path[] prune(Partition part, sampleDesc sampleDescr) throws SemanticException {
+ int num = sampleDescr.getNumerator();
+ int den = sampleDescr.getDenominator();
+ int bucketCount = part.getBucketCount();
+ String fullScanMsg = "";
+
+ // check if input pruning is possible
+ if (sampleDescr.getInputPruning()) {
+ LOG.trace("numerator = " + num);
+ LOG.trace("denominator = " + den);
+ LOG.trace("bucket count = " + bucketCount);
+ if (bucketCount == den) {
+ Path [] ret = new Path [1];
+ ret[0] = part.getBucketPath(num-1);
+ return(ret);
+ }
+ else if (bucketCount > den && bucketCount % den == 0) {
+ int numPathsInSample = bucketCount / den;
+ Path [] ret = new Path[numPathsInSample];
+ for (int i = 0; i < numPathsInSample; i++) {
+ ret[i] = part.getBucketPath(i*den+num-1);
+ }
+ return ret;
+ }
+ else if (bucketCount < den && den % bucketCount == 0) {
+ Path [] ret = new Path[1];
+ ret[0] = part.getBucketPath((num-1)%bucketCount);
+ return ret;
+ }
+ else {
+ // need to do full scan
+ fullScanMsg = "Tablesample denominator "
+ + den + " is not multiple/divisor of bucket count "
+ + bucketCount + " of table " + part.getTable().getName();
+ }
+ }
+ else {
+ // need to do full scan
+ fullScanMsg = "Tablesample not on clustered columns";
+ }
+ LOG.warn(fullScanMsg + ", using full table scan");
+ Path [] ret = part.getPath();
+ return ret;
+ }
+
+
+}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Sat Dec 5 06:57:35 2009
@@ -37,6 +37,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
+import org.apache.hadoop.hive.ql.plan.filterDesc.sampleDesc;
/**
* Parse Context: The current parse context. This is passed to the optimizer
@@ -52,7 +53,7 @@
private QB qb;
private ASTNode ast;
private HashMap<TableScanOperator, exprNodeDesc> opToPartPruner;
- private HashMap<String, SamplePruner> aliasToSamplePruner;
+ private HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
private HashMap<String, Operator<? extends Serializable>> topOps;
private HashMap<String, Operator<? extends Serializable>> topSelOps;
private LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtx;
@@ -85,8 +86,6 @@
* current parse tree
* @param opToPartPruner
* map from table scan operator to partition pruner
- * @param aliasToSamplePruner
- * sample pruner list
* @param topOps
* list of operators for the top query
* @param topSelOps
@@ -106,10 +105,10 @@
* @param uCtx
* @param listMapJoinOpsNoReducer
* list of map join operators with no reducer
+ * @param opToSamplePruner operator to sample pruner map
*/
public ParseContext(HiveConf conf, QB qb, ASTNode ast,
HashMap<TableScanOperator, exprNodeDesc> opToPartPruner,
- HashMap<String, SamplePruner> aliasToSamplePruner,
HashMap<String, Operator<? extends Serializable>> topOps,
HashMap<String, Operator<? extends Serializable>> topSelOps,
LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtx,
@@ -117,14 +116,14 @@
HashMap<TableScanOperator, Table> topToTable,
List<loadTableDesc> loadTableWork, List<loadFileDesc> loadFileWork,
Context ctx, HashMap<String, String> idToTableNameMap, int destTableId, UnionProcContext uCtx,
- List<MapJoinOperator> listMapJoinOpsNoReducer,
+ List<MapJoinOperator> listMapJoinOpsNoReducer,
Map<GroupByOperator, Set<String>> groupOpToInputTables,
- Map<String, PrunedPartitionList> prunedPartitions) {
+ Map<String, PrunedPartitionList> prunedPartitions,
+ HashMap<TableScanOperator, sampleDesc> opToSamplePruner) {
this.conf = conf;
this.qb = qb;
this.ast = ast;
this.opToPartPruner = opToPartPruner;
- this.aliasToSamplePruner = aliasToSamplePruner;
this.joinContext = joinContext;
this.topToTable = topToTable;
this.loadFileWork = loadFileWork;
@@ -141,6 +140,7 @@
this.groupOpToInputTables = new HashMap<GroupByOperator, Set<String>>();
this.groupOpToInputTables = groupOpToInputTables;
this.prunedPartitions = prunedPartitions;
+ this.opToSamplePruner = opToSamplePruner;
}
/**
@@ -232,21 +232,6 @@
public void setTopToTable(HashMap<TableScanOperator, Table> topToTable) {
this.topToTable = topToTable;
}
- /**
- * @return the aliasToSamplePruner
- */
- public HashMap<String, SamplePruner> getAliasToSamplePruner() {
- return aliasToSamplePruner;
- }
-
- /**
- * @param aliasToSamplePruner
- * the aliasToSamplePruner to set
- */
- public void setAliasToSamplePruner(
- HashMap<String, SamplePruner> aliasToSamplePruner) {
- this.aliasToSamplePruner = aliasToSamplePruner;
- }
/**
* @return the topOps
@@ -394,6 +379,21 @@
}
/**
+ * @return the opToSamplePruner
+ */
+ public HashMap<TableScanOperator, sampleDesc> getOpToSamplePruner() {
+ return opToSamplePruner;
+ }
+
+ /**
+ * @param opToSamplePruner
+ * the opToSamplePruner to set
+ */
+ public void setOpToSamplePruner(HashMap<TableScanOperator, sampleDesc> opToSamplePruner) {
+ this.opToSamplePruner = opToSamplePruner;
+ }
+
+ /**
* @return the groupOpToInputTables
*/
public Map<GroupByOperator, Set<String>> getGroupOpToInputTables() {
@@ -416,7 +416,7 @@
}
/**
- * @param prunedPartitions
+ * @param prunedPartitions
*/
public void setPrunedPartitions(
Map<String, PrunedPartitionList> prunedPartitions) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SamplePruner.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SamplePruner.java?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SamplePruner.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SamplePruner.java Sat Dec 5 06:57:35 2009
@@ -1,155 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.parse;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.metadata.Partition;
-import org.apache.hadoop.fs.Path;
-
-/**
- *
- * This class stores the mapping from table alias to the parse tree information of the table
- * sample clause(stored in the TableSample class).
- *
- */
-public class SamplePruner {
-
- /**
- * Table alias for the table e.g. in case of FROM t TABLESAMPLE(1 OUT OF 2 ON rand()) a
- * "a" is the table alias
- */
- private String tabAlias;
-
- /**
- * The parse tree corresponding to the TABLESAMPLE clause. e.g. in case of
- * FROM t TABLESAMPLE(1 OUT OF 2 ON rand()) a the parse tree of
- * "TABLESAMPLE(1 OUT OF 2 ON rand())" is parsed out and stored in tableSample
- */
- private TableSample tableSample;
-
- /**
- * The log handle for this class
- */
- @SuppressWarnings("nls")
- private static final Log LOG = LogFactory.getLog("hive.ql.parse.SamplePruner");
-
- public SamplePruner() {
-
- }
-
- /**
- * Constructs the SamplePruner given the table alias and the table sample
- *
- * @param alias The alias of the table specified in the query
- * @param tableSample The parse infromation of the TABLESAMPLE clause
- */
- public SamplePruner(String alias, TableSample tableSample) {
- this.tabAlias = alias;
- this.tableSample = tableSample;
- }
-
- /**
- * Gets the table alias
- *
- * @return String
- */
- public String getTabAlias() {
- return this.tabAlias;
- }
-
- /**
- * Sets the table alias
- *
- * @param tabAlias The table alias as specified in the query
- */
- public void setTabAlias(String tabAlias) {
- this.tabAlias = tabAlias;
- }
-
- /**
- * Gets the parse information of the associated table sample clause
- *
- * @return TableSample
- */
- public TableSample getTableSample() {
- return this.tableSample;
- }
-
- /**
- * Sets the parse information of the associated table sample clause
- *
- * @param tableSample Information related to the table sample clause
- */
- public void setTableSample(TableSample tableSample) {
- this.tableSample = tableSample;
- }
-
- /**
- * Prunes to get all the files in the partition that satisfy the TABLESAMPLE clause
- *
- * @param part The partition to prune
- * @return Path[]
- * @throws SemanticException
- */
- @SuppressWarnings("nls")
- public Path[] prune(Partition part) throws SemanticException {
- int num = this.tableSample.getNumerator();
- int den = this.tableSample.getDenominator();
- int bucketCount = part.getBucketCount();
- String fullScanMsg = "";
- // check if input pruning is possible
- if (this.tableSample.getInputPruning()) {
- LOG.trace("numerator = " + num);
- LOG.trace("denominator = " + den);
- LOG.trace("bucket count = " + bucketCount);
- if (bucketCount == den) {
- Path [] ret = new Path [1];
- ret[0] = part.getBucketPath(num-1);
- return(ret);
- }
- else if (bucketCount > den && bucketCount % den == 0) {
- int numPathsInSample = bucketCount / den;
- Path [] ret = new Path[numPathsInSample];
- for (int i = 0; i < numPathsInSample; i++) {
- ret[i] = part.getBucketPath(i*den+num-1);
- }
- return ret;
- }
- else if (bucketCount < den && den % bucketCount == 0) {
- Path [] ret = new Path[1];
- ret[0] = part.getBucketPath((num-1)%bucketCount);
- return ret;
- }
- else {
- // need to do full scan
- fullScanMsg = "Tablesample denominator "
- + den + " is not multiple/divisor of bucket count "
- + bucketCount + " of table " + this.tabAlias;
- }
- }
- else {
- // need to do full scan
- fullScanMsg = "Tablesample not on clustered columns";
- }
- LOG.warn(fullScanMsg + ", using full table scan");
- Path [] ret = part.getPath();
- return ret;
- }
-}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Sat Dec 5 06:57:35 2009
@@ -98,6 +98,7 @@
import org.apache.hadoop.hive.ql.plan.fetchWork;
import org.apache.hadoop.hive.ql.plan.fileSinkDesc;
import org.apache.hadoop.hive.ql.plan.filterDesc;
+import org.apache.hadoop.hive.ql.plan.filterDesc.sampleDesc;
import org.apache.hadoop.hive.ql.plan.forwardDesc;
import org.apache.hadoop.hive.ql.plan.groupByDesc;
import org.apache.hadoop.hive.ql.plan.joinDesc;
@@ -169,7 +170,6 @@
public class SemanticAnalyzer extends BaseSemanticAnalyzer {
private HashMap<TableScanOperator, exprNodeDesc> opToPartPruner;
- private HashMap<String, SamplePruner> aliasToSamplePruner;
private HashMap<String, Operator<? extends Serializable>> topOps;
private HashMap<String, Operator<? extends Serializable>> topSelOps;
private LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtx;
@@ -182,6 +182,7 @@
private int destTableId;
private UnionProcContext uCtx;
List<MapJoinOperator> listMapJoinOpsNoReducer;
+ private HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
Map<GroupByOperator, Set<String>> groupOpToInputTables;
Map<String, PrunedPartitionList> prunedPartitions;
@@ -195,7 +196,7 @@
super(conf);
this.opToPartPruner = new HashMap<TableScanOperator, exprNodeDesc>();
- this.aliasToSamplePruner = new HashMap<String, SamplePruner>();
+ this.opToSamplePruner = new HashMap<TableScanOperator, sampleDesc>();
this.topOps = new HashMap<String, Operator<? extends Serializable>>();
this.topSelOps = new HashMap<String, Operator<? extends Serializable>>();
this.loadTableWork = new ArrayList<loadTableDesc>();
@@ -222,7 +223,6 @@
qb = null;
ast = null;
uCtx = null;
- this.aliasToSamplePruner.clear();
this.joinContext.clear();
this.opParseCtx.clear();
this.groupOpToInputTables.clear();
@@ -231,7 +231,7 @@
public void init(ParseContext pctx) {
opToPartPruner = pctx.getOpToPartPruner();
- aliasToSamplePruner = pctx.getAliasToSamplePruner();
+ opToSamplePruner = pctx.getOpToSamplePruner();
topOps = pctx.getTopOps();
topSelOps = pctx.getTopSelOps();
opParseCtx = pctx.getOpParseCtx();
@@ -249,10 +249,11 @@
}
public ParseContext getParseContext() {
- return new ParseContext(conf, qb, ast, opToPartPruner, aliasToSamplePruner, topOps,
+ return new ParseContext(conf, qb, ast, opToPartPruner, topOps,
topSelOps, opParseCtx, joinContext, topToTable, loadTableWork,
loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
- listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions);
+ listMapJoinOpsNoReducer,
+ groupOpToInputTables, prunedPartitions, opToSamplePruner);
}
@SuppressWarnings("nls")
@@ -593,33 +594,6 @@
}
}
- private void genSamplePruners(QBExpr qbexpr) throws SemanticException {
- if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) {
- genSamplePruners(qbexpr.getQB());
- } else {
- genSamplePruners(qbexpr.getQBExpr1());
- genSamplePruners(qbexpr.getQBExpr2());
- }
- }
-
- @SuppressWarnings("nls")
- private void genSamplePruners(QB qb) throws SemanticException {
- // Recursively prune subqueries
- for (String alias : qb.getSubqAliases()) {
- QBExpr qbexpr = qb.getSubqForAlias(alias);
- genSamplePruners(qbexpr);
- }
- for (String alias : qb.getTabAliases()) {
- String alias_id = (qb.getId() == null ? alias : qb.getId() + ":" + alias);
- QBParseInfo qbp = qb.getParseInfo();
- TableSample tableSample = qbp.getTabSample(alias_id);
- if (tableSample != null) {
- SamplePruner pruner = new SamplePruner(alias, tableSample);
- this.aliasToSamplePruner.put(alias_id, pruner);
- }
- }
- }
-
private void getMetaData(QBExpr qbexpr) throws SemanticException {
if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) {
getMetaData(qbexpr.getQB());
@@ -1390,14 +1364,14 @@
if (isInTransform) {
trfm = (ASTNode) selExprList.getChild(posn).getChild(0);
}
-
- // Detect a UDTF by looking up the function name in the registry.
+
+ // Detect a UDTF by looking up the function name in the registry.
// Not as clean TRANSFORM due to the lack of a special token.
boolean isUDTF = false;
String udtfOutputColAlias = null;
ASTNode udtfExpr = (ASTNode) selExprList.getChild(posn).getChild(0);
GenericUDTF genericUDTF = null;
-
+
if (udtfExpr.getType() == HiveParser.TOK_FUNCTION) {
String funcName =
TypeCheckProcFactory.DefaultExprProcessor.getFunctionText(udtfExpr, true);
@@ -1407,20 +1381,20 @@
}
isUDTF = (genericUDTF != null);
}
-
+
if (isUDTF) {
- // Only support a single expression when it's a UDTF
+ // Only support a single expression when it's a UDTF
if (selExprList.getChildCount() > 1) {
throw new SemanticException(ErrorMsg.UDTF_MULTIPLE_EXPR.getMsg());
}
//Require an AS for UDTFs
if (((ASTNode) selExprList.getChild(posn)).getChildCount() != 2 ||
- selExprList.getChild(posn).getChild(1).getType() != HiveParser.Identifier ){
+ selExprList.getChild(posn).getChild(1).getType() != HiveParser.Identifier ){
throw new SemanticException(ErrorMsg.UDTF_REQUIRE_AS.getMsg());
}
udtfOutputColAlias = unescapeIdentifier(selExprList.getChild(posn).getChild(1).getText());
- }
-
+ }
+
// The list of expressions after SELECT or SELECT TRANSFORM.
ASTNode exprList;
if (isInTransform) {
@@ -1430,12 +1404,12 @@
} else {
exprList = selExprList;
}
-
+
LOG.debug("genSelectPlan: input = " + inputRR.toString());
// For UDTF's, skip the function name
int startPosn = isUDTF ? posn + 1 : posn;
-
+
// Iterate over all expression (either after SELECT, or in SELECT TRANSFORM)
for (int i = startPosn; i < exprList.getChildCount(); ++i) {
@@ -1496,7 +1470,7 @@
out_rwsch.put(tabAlias, colAlias,
new ColumnInfo(getColumnInternalName(pos),
exp.getTypeInfo(), tabAlias, false));
-
+
pos = Integer.valueOf(pos.intValue() + 1);
}
}
@@ -1530,7 +1504,7 @@
return output;
}
-
+
/**
* Class to store GenericUDAF related information.
*/
@@ -2540,7 +2514,7 @@
new LinkedHashMap<String, GenericUDAFEvaluator>();
GroupByOperator groupByOperatorInfo = (GroupByOperator)genGroupByPlanMapGroupByOperator(qb,
dest, inputOperatorInfo, groupByDesc.Mode.HASH, genericUDAFEvaluators);
-
+
this.groupOpToInputTables.put(groupByOperatorInfo, this.opParseCtx.get(
inputOperatorInfo).getRR().getTableNames());
// Optimize the scenario when there are no grouping keys and no distinct - 2 map-reduce jobs are not needed
@@ -2903,10 +2877,10 @@
return limitMap;
}
-
+
private Operator genUDTFPlan(GenericUDTF genericUDTF, String udtfOutputColumnAlias,
QB qb, Operator input) throws SemanticException {
-
+
// No GROUP BY / DISTRIBUTE BY / SORT BY / CLUSTER BY
QBParseInfo qbp = qb.getParseInfo();
if (!qbp.getDestToGroupBy().isEmpty()) {
@@ -2921,14 +2895,14 @@
if (!qbp.getDestToClusterBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_CLUSTER_BY.getMsg());
}
-
- // Use the RowResolver from the input operator to generate a input
- // ObjectInspector that can be used to initialize the UDTF. Then, the
+
+ // Use the RowResolver from the input operator to generate a input
+ // ObjectInspector that can be used to initialize the UDTF. Then, the
// resulting output object inspector can be used to make the RowResolver
// for the UDTF operator
RowResolver selectRR = opParseCtx.get(input).getRR();
Vector<ColumnInfo> inputCols = selectRR.getColumnInfos();
-
+
// Create the object inspector for the input columns and initialize the UDTF
ArrayList<String> colNames = new ArrayList<String>();
ObjectInspector [] colOIs = new ObjectInspector[inputCols.size()];
@@ -2938,11 +2912,11 @@
inputCols.get(i).getType());
}
ObjectInspector outputOI = genericUDTF.initialize(colOIs);
-
- ColumnInfo outputCol =
+
+ ColumnInfo outputCol =
new ColumnInfo(udtfOutputColumnAlias,
TypeInfoUtils.getTypeInfoFromObjectInspector(outputOI), null, false);
-
+
// Create the row resolver for this operator from the output columns
RowResolver out_rwsch = new RowResolver();
@@ -2950,16 +2924,16 @@
null,
outputCol.getInternalName(),
outputCol);
-
+
// Add the UDTFOperator to the operator DAG
Operator udtf =
putOpInsertMap(OperatorFactory.getAndMakeChild(
- new udtfDesc(genericUDTF, udtfOutputColumnAlias),
+ new udtfDesc(genericUDTF, udtfOutputColumnAlias),
new RowSchema(out_rwsch.getColumnInfos()),
input), out_rwsch);
return udtf;
}
-
+
@SuppressWarnings("nls")
private Operator genLimitMapRedPlan(String dest, QB qb, Operator input, int limit, boolean extraMRStep)
throws SemanticException {
@@ -4450,15 +4424,13 @@
// check if input pruning is enough
if ((sampleExprs == null || sampleExprs.size() == 0 || colsEqual)
&& (num == den || den <= numBuckets && numBuckets % den == 0)) {
- // input pruning is enough; no need for filter
+
+ // input pruning is enough; add the filter for the optimizer to use it later
LOG.info("No need for sample filter");
- // TODO sample predicate is not needed, but we are adding it anyway since
- // input pruning is broken for subqueries. will remove this once we move
- // compilation of sampling to use the operator tree
exprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols, colsEqual, alias, rwsch, qb.getMetaData(), null);
tableOp = OperatorFactory.getAndMakeChild(
- new filterDesc(samplePredicate, true),
- top);
+ new filterDesc(samplePredicate, true, new sampleDesc(ts.getNumerator(), ts.getDenominator(), tabBucketCols, true)),
+ top);
}
else {
// need to add filter
@@ -4491,6 +4463,11 @@
TableSample tsSample = new TableSample(1, numBuckets);
tsSample.setInputPruning(true);
qb.getParseInfo().setTabSample(alias, tsSample);
+ exprNodeDesc samplePred = genSamplePredicate(tsSample, tab.getBucketCols(), true, alias, rwsch, qb.getMetaData(), null);
+ tableOp = OperatorFactory.getAndMakeChild(
+ new filterDesc(samplePred, true,
+ new sampleDesc(tsSample.getNumerator(), tsSample.getDenominator(), tab.getBucketCols(), true)),
+ top);
LOG.info("No need for sample filter");
}
// The table is not bucketed, add a dummy filter :: rand()
@@ -4957,9 +4934,14 @@
genPlan(qb);
- ParseContext pCtx = new ParseContext(conf, qb, child, opToPartPruner, aliasToSamplePruner, topOps,
- topSelOps, opParseCtx, joinContext, topToTable, loadTableWork, loadFileWork,
- ctx, idToTableNameMap, destTableId, uCtx, listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions);
+ ParseContext pCtx = new ParseContext(conf, qb, child, opToPartPruner, topOps,
+ topSelOps, opParseCtx, joinContext, topToTable,
+ loadTableWork, loadFileWork,
+ ctx, idToTableNameMap, destTableId, uCtx,
+ listMapJoinOpsNoReducer,
+ groupOpToInputTables,
+ prunedPartitions,
+ opToSamplePruner);
Optimizer optm = new Optimizer();
optm.setPctx(pCtx);
@@ -4968,10 +4950,6 @@
init(pCtx);
qb = pCtx.getQB();
- // Do any sample pruning
- genSamplePruners(qb);
- LOG.info("Completed sample pruning");
-
// At this point we have the complete operator tree
// from which we want to find the reduce operator
genMapRedTasks(qb);
@@ -5011,7 +4989,7 @@
// create a walker which walks the tree in a DFS manner while maintaining the operator stack. The dispatcher
// generates the plan from the operator tree
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
-
+
opRules.put(new RuleRegExp("R1", HiveParser.TOK_NULL + "%"), TypeCheckProcFactory.getNullExprProcessor());
opRules.put(new RuleRegExp("R2", HiveParser.Number + "%"), TypeCheckProcFactory.getNumExprProcessor());
opRules.put(new RuleRegExp("R3", HiveParser.Identifier + "%|" +
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java Sat Dec 5 06:57:35 2009
@@ -19,18 +19,70 @@
package org.apache.hadoop.hive.ql.plan;
import java.io.Serializable;
+import java.util.List;
@explain(displayName="Filter Operator")
public class filterDesc implements Serializable {
+
+ /**
+ * sampleDesc is used to keep track of the sampling descriptor
+ */
+ public static class sampleDesc {
+ // The numerator of the TABLESAMPLE clause
+ private int numerator;
+
+ // The denominator of the TABLESAMPLE clause
+ private int denominator;
+
+ // buckets columns for the table
+ private List<String> tabBucketCols;
+
+ // Input files can be pruned
+ private boolean inputPruning;
+
+ public sampleDesc() {
+ }
+
+ public sampleDesc(int numerator, int denominator, List<String> tabBucketCols, boolean inputPruning) {
+ this.numerator = numerator;
+ this.denominator = denominator;
+ this.tabBucketCols = tabBucketCols;
+ this.inputPruning = inputPruning;
+ }
+
+ public int getNumerator() {
+ return this.numerator;
+ }
+
+ public int getDenominator() {
+ return this.denominator;
+ }
+
+ public boolean getInputPruning() {
+ return inputPruning;
+ }
+ }
+
private static final long serialVersionUID = 1L;
private org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate;
private boolean isSamplingPred;
+ private transient sampleDesc sampleDescr;
+
public filterDesc() { }
public filterDesc(
final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate, boolean isSamplingPred) {
this.predicate = predicate;
this.isSamplingPred = isSamplingPred;
+ this.sampleDescr = null;
}
+
+ public filterDesc(
+ final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate, boolean isSamplingPred, final sampleDesc sampleDescr) {
+ this.predicate = predicate;
+ this.isSamplingPred = isSamplingPred;
+ this.sampleDescr = sampleDescr;
+ }
+
@explain(displayName="predicate")
public org.apache.hadoop.hive.ql.plan.exprNodeDesc getPredicate() {
return this.predicate;
@@ -38,6 +90,7 @@
public void setPredicate(final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate) {
this.predicate = predicate;
}
+
@explain(displayName="isSamplingPred", normalExplain=false)
public boolean getIsSamplingPred() {
return this.isSamplingPred;
@@ -45,4 +98,13 @@
public void setIsSamplingPred(final boolean isSamplingPred) {
this.isSamplingPred = isSamplingPred;
}
+
+ @explain(displayName="sampleDesc", normalExplain=false)
+ public sampleDesc getSampleDescr() {
+ return this.sampleDescr;
+ }
+ public void setSampleDescr(final sampleDesc sampleDescr) {
+ this.sampleDescr = sampleDescr;
+ }
+
}
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input31.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input31.q.out?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input31.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input31.q.out Sat Dec 5 06:57:35 2009
@@ -38,19 +38,27 @@
srcbucket
TableScan
alias: srcbucket
- Select Operator
- Group By Operator
- aggregations:
- expr: count(1)
- bucketGroup: false
- mode: hash
- outputColumnNames: _col0
- Reduce Output Operator
- sort order:
- tag: -1
- value expressions:
- expr: _col0
- type: bigint
+ Filter Operator
+ predicate:
+ expr: (((hash(key) & 2147483647) % 2) = 0)
+ type: boolean
+ Filter Operator
+ predicate:
+ expr: (((hash(key) & 2147483647) % 2) = 0)
+ type: boolean
+ Select Operator
+ Group By Operator
+ aggregations:
+ expr: count(1)
+ bucketGroup: false
+ mode: hash
+ outputColumnNames: _col0
+ Reduce Output Operator
+ sort order:
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: bigint
Reduce Operator Tree:
Group By Operator
aggregations:
@@ -101,11 +109,11 @@
PREHOOK: query: select * from tst_dest31
PREHOOK: type: QUERY
PREHOOK: Input: default@tst_dest31
-PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1418882127/10000
+PREHOOK: Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/305456482/10000
POSTHOOK: query: select * from tst_dest31
POSTHOOK: type: QUERY
POSTHOOK: Input: default@tst_dest31
-POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk-commit/build/ql/tmp/1418882127/10000
+POSTHOOK: Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/305456482/10000
493
PREHOOK: query: drop table tst_dest31
PREHOOK: type: DROPTABLE
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out Sat Dec 5 06:57:35 2009
@@ -76,12 +76,12 @@
type: string
Needs Tagging: true
Path -> Alias:
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [t, s]
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [t]
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [t]
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [t]
+ file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [t, s]
+ file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [t]
+ file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [t]
+ file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [t]
Path -> Partition:
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
+ file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -99,8 +99,8 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11
- transient_lastDdlTime 1258006774
+ location file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart
+ transient_lastDdlTime 1259956735
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -116,12 +116,12 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart
- transient_lastDdlTime 1258006774
+ location file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart
+ transient_lastDdlTime 1259956735
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
name: srcpart
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
+ file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -139,8 +139,8 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12
- transient_lastDdlTime 1258006774
+ location file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart
+ transient_lastDdlTime 1259956735
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -156,12 +156,12 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart
- transient_lastDdlTime 1258006774
+ location file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart
+ transient_lastDdlTime 1259956735
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
name: srcpart
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
+ file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -179,8 +179,8 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11
- transient_lastDdlTime 1258006774
+ location file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart
+ transient_lastDdlTime 1259956735
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -196,12 +196,12 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart
- transient_lastDdlTime 1258006774
+ location file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart
+ transient_lastDdlTime 1259956735
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
name: srcpart
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
+ file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -219,8 +219,8 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12
- transient_lastDdlTime 1258006774
+ location file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart
+ transient_lastDdlTime 1259956735
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -236,8 +236,8 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcpart
- transient_lastDdlTime 1258006774
+ location file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart
+ transient_lastDdlTime 1259956735
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcpart
name: srcpart
@@ -268,7 +268,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/717250972/10002
+ directory: file:/data/users/njain/hive3/hive3/build/ql/tmp/1274643480/10002
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -280,7 +280,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/717250972/10002
+ file:/data/users/njain/hive3/hive3/build/ql/tmp/1274643480/10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -305,9 +305,9 @@
type: string
Needs Tagging: false
Path -> Alias:
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/717250972/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/717250972/10002]
+ file:/data/users/njain/hive3/hive3/build/ql/tmp/1274643480/10002 [file:/data/users/njain/hive3/hive3/build/ql/tmp/1274643480/10002]
Path -> Partition:
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/717250972/10002
+ file:/data/users/njain/hive3/hive3/build/ql/tmp/1274643480/10002
Partition
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -327,7 +327,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/717250972/10001
+ directory: file:/data/users/njain/hive3/hive3/build/ql/tmp/1274643480/10001
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -352,7 +352,7 @@
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2060636212/10000
+PREHOOK: Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1630113026/10000
POSTHOOK: query: SELECT s.key, s.value
FROM srcpart TABLESAMPLE (BUCKET 1 OUT OF 1 ON key) s
JOIN srcpart TABLESAMPLE (BUCKET 1 OUT OF 10 ON key) t
@@ -364,7 +364,7 @@
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2060636212/10000
+POSTHOOK: Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1630113026/10000
0 val_0
0 val_0
0 val_0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/sample9.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/sample9.q.out?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/sample9.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/sample9.q.out Sat Dec 5 06:57:35 2009
@@ -47,7 +47,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/486599627/10001
+ directory: file:/data/users/njain/hive3/hive3/build/ql/tmp/211159738/10001
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -57,9 +57,9 @@
columns.types int:string
Needs Tagging: false
Path -> Alias:
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket [s:a]
+ file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s:a]
Path -> Partition:
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -74,7 +74,8 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ location file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket
+ transient_lastDdlTime 1259893051
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -90,8 +91,8 @@
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
- transient_lastDdlTime 1258006794
+ location file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket
+ transient_lastDdlTime 1259893051
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcbucket
name: srcbucket
@@ -105,12 +106,12 @@
FROM (SELECT a.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) a) s
PREHOOK: type: QUERY
PREHOOK: Input: default@srcbucket
-PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2031403560/10000
+PREHOOK: Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/553111824/10000
POSTHOOK: query: SELECT s.*
FROM (SELECT a.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) a) s
POSTHOOK: type: QUERY
POSTHOOK: Input: default@srcbucket
-POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2031403560/10000
+POSTHOOK: Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/553111824/10000
474 val_475
62 val_63
468 val_469
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml Sat Dec 5 06:57:35 2009
@@ -30,7 +30,7 @@
<boolean>true</boolean>
</void>
<void property="sourceDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/57980319/10000</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/2038933084/10000</string>
</void>
<void property="table">
<object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc">
@@ -83,11 +83,11 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/dest1</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/dest1</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1259648347</string>
+ <string>1259959085</string>
</void>
</object>
</void>
@@ -97,7 +97,7 @@
</object>
</void>
<void property="tmpDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/57980319/10001</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/2038933084/10001</string>
</void>
</object>
</void>
@@ -125,10 +125,10 @@
<boolean>true</boolean>
</void>
<void property="sourceDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/862340814/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/516493643/10002</string>
</void>
<void property="targetDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/57980319/10000</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/2038933084/10000</string>
</void>
</object>
</void>
@@ -146,7 +146,7 @@
<void property="aliasToWork">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/862340814/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/516493643/10002</string>
<object id="TableScanOperator0" class="org.apache.hadoop.hive.ql.exec.TableScanOperator">
<void property="childOperators">
<object class="java.util.ArrayList">
@@ -385,10 +385,10 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/862340814/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/516493643/10002</string>
<object class="java.util.ArrayList">
<void method="add">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/862340814/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/516493643/10002</string>
</void>
</object>
</void>
@@ -397,7 +397,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/862340814/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/516493643/10002</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="deserializerClass">
<class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class>
@@ -430,7 +430,7 @@
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc">
<void property="dirName">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/57980319/10000</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/2038933084/10000</string>
</void>
<void property="tableInfo">
<object class="org.apache.hadoop.hive.ql.plan.tableDesc">
@@ -479,7 +479,7 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/dest1</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/dest1</string>
</void>
<void method="put">
<string>file.outputformat</string>
@@ -487,7 +487,7 @@
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1259648347</string>
+ <string>1259959085</string>
</void>
</object>
</void>
@@ -621,7 +621,7 @@
<void property="resolverCtx">
<object class="org.apache.hadoop.hive.ql.plan.ConditionalResolverMergeFiles$ConditionalResolverMergeFilesCtx">
<void property="dir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/862340814/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/516493643/10002</string>
</void>
<void property="listTasks">
<object idref="ArrayList0"/>
@@ -711,11 +711,11 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/srcbucket</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1259648346</string>
+ <string>1259959084</string>
</void>
</object>
</void>
@@ -771,7 +771,7 @@
<int>1</int>
</void>
<void property="dirName">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/862340814/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/516493643/10002</string>
</void>
<void property="tableInfo">
<object idref="tableDesc0"/>
@@ -1060,6 +1060,9 @@
</void>
</object>
</void>
+ <void property="sampleDescr">
+ <object class="org.apache.hadoop.hive.ql.plan.filterDesc$sampleDesc"/>
+ </void>
</object>
</void>
<void property="counterNames">
@@ -1324,7 +1327,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
<object class="java.util.ArrayList">
<void method="add">
<string>s</string>
@@ -1336,7 +1339,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="deserializerClass">
<class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class>
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml?rev=887508&r1=887507&r2=887508&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml Sat Dec 5 06:57:35 2009
@@ -30,7 +30,7 @@
<boolean>true</boolean>
</void>
<void property="sourceDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1585851093/10000</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/585759686/10000</string>
</void>
<void property="table">
<object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc">
@@ -83,11 +83,11 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/dest1</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/dest1</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1259648351</string>
+ <string>1259959089</string>
</void>
</object>
</void>
@@ -97,7 +97,7 @@
</object>
</void>
<void property="tmpDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1585851093/10001</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/585759686/10001</string>
</void>
</object>
</void>
@@ -125,10 +125,10 @@
<boolean>true</boolean>
</void>
<void property="sourceDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1787456022/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1673316015/10002</string>
</void>
<void property="targetDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1585851093/10000</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/585759686/10000</string>
</void>
</object>
</void>
@@ -146,7 +146,7 @@
<void property="aliasToWork">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1787456022/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1673316015/10002</string>
<object id="TableScanOperator0" class="org.apache.hadoop.hive.ql.exec.TableScanOperator">
<void property="childOperators">
<object class="java.util.ArrayList">
@@ -385,10 +385,10 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1787456022/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1673316015/10002</string>
<object class="java.util.ArrayList">
<void method="add">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1787456022/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1673316015/10002</string>
</void>
</object>
</void>
@@ -397,7 +397,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1787456022/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1673316015/10002</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="deserializerClass">
<class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class>
@@ -430,7 +430,7 @@
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc">
<void property="dirName">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1585851093/10000</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/585759686/10000</string>
</void>
<void property="tableInfo">
<object class="org.apache.hadoop.hive.ql.plan.tableDesc">
@@ -479,7 +479,7 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/dest1</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/dest1</string>
</void>
<void method="put">
<string>file.outputformat</string>
@@ -487,7 +487,7 @@
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1259648351</string>
+ <string>1259959089</string>
</void>
</object>
</void>
@@ -621,7 +621,7 @@
<void property="resolverCtx">
<object class="org.apache.hadoop.hive.ql.plan.ConditionalResolverMergeFiles$ConditionalResolverMergeFilesCtx">
<void property="dir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1787456022/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1673316015/10002</string>
</void>
<void property="listTasks">
<object idref="ArrayList0"/>
@@ -711,11 +711,11 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/srcbucket</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1259648350</string>
+ <string>1259959088</string>
</void>
</object>
</void>
@@ -771,7 +771,7 @@
<int>1</int>
</void>
<void property="dirName">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1787456022/10002</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1673316015/10002</string>
</void>
<void property="tableInfo">
<object idref="tableDesc0"/>
@@ -1060,6 +1060,9 @@
</void>
</object>
</void>
+ <void property="sampleDescr">
+ <object class="org.apache.hadoop.hive.ql.plan.filterDesc$sampleDesc"/>
+ </void>
</object>
</void>
<void property="counterNames">
@@ -1324,7 +1327,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
<object class="java.util.ArrayList">
<void method="add">
<string>s</string>
@@ -1336,7 +1339,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
+ <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="deserializerClass">
<class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class>