You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2010/10/13 18:06:38 UTC
svn commit: r1022162 [1/2] - in /hadoop/hive/trunk: ./
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/metadata/
ql/src/java/org/apache/hadoop/hive/ql/optimizer/
ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test...
Author: namit
Date: Wed Oct 13 16:06:37 2010
New Revision: 1022162
URL: http://svn.apache.org/viewvc?rev=1022162&view=rev
Log:
HIVE-1699 Incorrect partition pruning ANALYZE TABLE
(Ning Zhang via namit)
Added:
hadoop/hive/trunk/ql/src/test/queries/clientpositive/stats12.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/stats13.q
hadoop/hive/trunk/ql/src/test/results/clientpositive/stats12.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/stats13.q.out
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hadoop/hive/trunk/ql/src/test/results/clientpositive/stats6.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/stats7.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/stats8.q.out
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=1022162&r1=1022161&r2=1022162&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Oct 13 16:06:37 2010
@@ -354,6 +354,9 @@ Trunk - Unreleased
HIVE-1376 Simple UDAFs with more than 1 parameter crash on empty row query
(Ning Zhang via He Yongqiang)
+ HIVE-1699 Incorrect partition pruning ANALYZE TABLE
+ (Ning Zhang via namit)
+
TESTS
HIVE-1464. improve test query performance
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java?rev=1022162&r1=1022161&r2=1022162&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java Wed Oct 13 16:06:37 2010
@@ -406,9 +406,11 @@ public class StatsTask extends Task<Stat
return null;
}
// get all partitions that matches with the partition spec
- List<Partition> partitions = db.getPartitions(table, tblSpec.getPartSpec());
- for (Partition partn : partitions) {
- list.add(partn);
+ List<Partition> partitions = tblSpec.partitions;
+ if (partitions != null) {
+ for (Partition partn : partitions) {
+ list.add(partn);
+ }
}
} else if (work.getLoadTableDesc() != null) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1022162&r1=1022161&r2=1022162&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Wed Oct 13 16:06:37 2010
@@ -1279,6 +1279,45 @@ public class Hive {
}
/**
+ * get all the partitions of the table that matches the given partial
+ * specification. partition columns whose value is can be anything should be
+ * an empty string.
+ *
+ * @param tbl
+ * object for which partition is needed. Must be partitioned.
+ * @return list of partition objects
+ * @throws HiveException
+ */
+ public List<Partition> getPartitionsByNames(Table tbl,
+ Map<String, String> partialPartSpec)
+ throws HiveException {
+
+ if (!tbl.isPartitioned()) {
+ throw new HiveException("Partition spec should only be supplied for a " +
+ "partitioned table");
+ }
+
+ List<String> names = getPartitionNames(tbl.getDbName(), tbl.getTableName(),
+ partialPartSpec, (short)-1);
+
+ List<Partition> partitions = new ArrayList<Partition>();
+
+ for (String pval: names) {
+ try {
+ org.apache.hadoop.hive.metastore.api.Partition tpart =
+ getMSC().getPartition(tbl.getDbName(), tbl.getTableName(), pval);
+ if (tpart != null) {
+ Partition p = new Partition(tbl, tpart);
+ partitions.add(p);
+ }
+ } catch (Exception e) {
+ throw new HiveException(e);
+ }
+ }
+
+ return partitions;
+ }
+ /**
* Get the name of the current database
* @return
*/
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java?rev=1022162&r1=1022161&r2=1022162&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java Wed Oct 13 16:06:37 2010
@@ -19,7 +19,9 @@
package org.apache.hadoop.hive.ql.optimizer;
import java.io.Serializable;
+import java.util.HashSet;
import java.util.Map;
+import java.util.Set;
import java.util.Stack;
import org.apache.hadoop.hive.ql.exec.Operator;
@@ -29,13 +31,15 @@ import org.apache.hadoop.hive.ql.exec.Ta
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
+import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec;
import org.apache.hadoop.hive.ql.parse.ParseContext;
+import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
import org.apache.hadoop.hive.ql.parse.QBParseInfo;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.StatsWork;
-
/**
* Processor for the rule - table scan.
*/
@@ -85,7 +89,23 @@ public class GenMRTableScan1 implements
currTask.addDependentTask(statsTask);
ctx.getRootTasks().add(currTask);
currWork.setGatheringStats(true);
- GenMapRedUtils.setTaskPlan(currAliasId, currTopOp, currWork, false, ctx);
+ // NOTE: here we should use the new partition predicate pushdown API to get a list of pruned list,
+ // and pass it to setTaskPlan as the last parameter
+ Set<Partition> confirmedPartns = new HashSet<Partition>();
+ tableSpec tblSpec = parseInfo.getTableSpec();
+ if (tblSpec.specType == tableSpec.SpecType.STATIC_PARTITION) {
+ // static partition
+ confirmedPartns.add(tblSpec.partHandle);
+ } else if (tblSpec.specType == tableSpec.SpecType.DYNAMIC_PARTITION) {
+ // dynamic partition
+ confirmedPartns.addAll(tblSpec.partitions);
+ }
+ if (confirmedPartns.size() > 0) {
+ PrunedPartitionList partList = new PrunedPartitionList(confirmedPartns, new HashSet<Partition>(), null);
+ GenMapRedUtils.setTaskPlan(currAliasId, currTopOp, currWork, false, ctx, partList);
+ } else { // non-partitioned table
+ GenMapRedUtils.setTaskPlan(currAliasId, currTopOp, currWork, false, ctx);
+ }
}
return null;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=1022162&r1=1022161&r2=1022162&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Wed Oct 13 16:06:37 2010
@@ -506,6 +506,28 @@ public final class GenMapRedUtils {
public static void setTaskPlan(String alias_id,
Operator<? extends Serializable> topOp, MapredWork plan, boolean local,
GenMRProcContext opProcCtx) throws SemanticException {
+ setTaskPlan(alias_id, topOp, plan, local, opProcCtx, null);
+ }
+
+ /**
+ * set the current task in the mapredWork.
+ *
+ * @param alias_id
+ * current alias
+ * @param topOp
+ * the top operator of the stack
+ * @param plan
+ * current plan
+ * @param local
+ * whether you need to add to map-reduce or local work
+ * @param opProcCtx
+ * processing context
+ * @param pList
+ * pruned partition list. If it is null it will be computed on-the-fly.
+ */
+ public static void setTaskPlan(String alias_id,
+ Operator<? extends Serializable> topOp, MapredWork plan, boolean local,
+ GenMRProcContext opProcCtx, PrunedPartitionList pList) throws SemanticException {
ParseContext parseCtx = opProcCtx.getParseCtx();
Set<ReadEntity> inputs = opProcCtx.getInputs();
@@ -515,17 +537,19 @@ public final class GenMapRedUtils {
Path tblDir = null;
TableDesc tblDesc = null;
- PrunedPartitionList partsList = null;
+ PrunedPartitionList partsList = pList;
- try {
- partsList = PartitionPruner.prune(parseCtx.getTopToTable().get(topOp),
- parseCtx.getOpToPartPruner().get(topOp), opProcCtx.getConf(),
- alias_id, parseCtx.getPrunedPartitions());
- } catch (SemanticException e) {
- throw e;
- } catch (HiveException e) {
- LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
- throw new SemanticException(e.getMessage(), e);
+ if (partsList == null) {
+ try {
+ partsList = PartitionPruner.prune(parseCtx.getTopToTable().get(topOp),
+ parseCtx.getOpToPartPruner().get(topOp), opProcCtx.getConf(),
+ alias_id, parseCtx.getPrunedPartitions());
+ } catch (SemanticException e) {
+ throw e;
+ } catch (HiveException e) {
+ LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
+ throw new SemanticException(e.getMessage(), e);
+ }
}
// Generate the map work for this alias_id
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1022162&r1=1022161&r2=1022162&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Wed Oct 13 16:06:37 2010
@@ -579,7 +579,9 @@ public abstract class BaseSemanticAnalyz
public Map<String, String> partSpec; // has to use LinkedHashMap to enforce order
public Partition partHandle;
public int numDynParts; // number of dynamic partition columns
- private List<Partition> partitions; // involved partitions in TableScanOperator/FileSinkOperator
+ public List<Partition> partitions; // involved partitions in TableScanOperator/FileSinkOperator
+ public static enum SpecType {TABLE_ONLY, STATIC_PARTITION, DYNAMIC_PARTITION};
+ public SpecType specType;
public tableSpec(Hive db, HiveConf conf, ASTNode ast)
throws SemanticException {
@@ -610,6 +612,7 @@ public abstract class BaseSemanticAnalyz
if (ast.getChildCount() == 2) {
childIndex = 1;
ASTNode partspec = (ASTNode) ast.getChild(1);
+ partitions = new ArrayList<Partition>();
// partSpec is a mapping from partition column name to its value.
partSpec = new LinkedHashMap<String, String>(partspec.getChildCount());
for (int i = 0; i < partspec.getChildCount(); ++i) {
@@ -624,6 +627,7 @@ public abstract class BaseSemanticAnalyz
partSpec.put(colName, val);
}
+ // check if the columns specified in the partition() clause are actually partition columns
Utilities.validatePartSpec(tableHandle, partSpec);
// check if the partition spec is valid
@@ -646,15 +650,26 @@ public abstract class BaseSemanticAnalyz
}
}
partHandle = null;
+ specType = SpecType.DYNAMIC_PARTITION;
} else {
try {
- // this doesn't create partition. partition is created in MoveTask
- partHandle = new Partition(tableHandle, partSpec, null);
- } catch (HiveException e) {
- throw new SemanticException(
- ErrorMsg.INVALID_PARTITION.getMsg(ast.getChild(childIndex)));
- }
+ // this doesn't create partition.
+ partHandle = db.getPartition(tableHandle, partSpec, false);
+ if (partHandle == null) {
+ // if partSpec doesn't exists in DB, return a delegate one
+ // and the actual partition is created in MoveTask
+ partHandle = new Partition(tableHandle, partSpec, null);
+ } else {
+ partitions.add(partHandle);
+ }
+ } catch (HiveException e) {
+ throw new SemanticException(
+ ErrorMsg.INVALID_PARTITION.getMsg(ast.getChild(childIndex)));
+ }
+ specType = SpecType.STATIC_PARTITION;
}
+ } else {
+ specType = SpecType.TABLE_ONLY;
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1022162&r1=1022161&r2=1022162&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Oct 13 16:06:37 2010
@@ -106,6 +106,7 @@ import org.apache.hadoop.hive.ql.optimiz
import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalOptimizer;
import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec.SpecType;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableLikeDesc;
@@ -702,6 +703,7 @@ public class SemanticAnalyzer extends Ba
// Allow analyze the whole table and dynamic partitions
HiveConf.setVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict");
+
break;
case HiveParser.TOK_UNION:
@@ -784,6 +786,13 @@ public class SemanticAnalyzer extends Ba
if (qb.getParseInfo().isAnalyzeCommand()) {
tableSpec ts = new tableSpec(db, conf, (ASTNode) ast.getChild(0));
+ if (ts.specType == SpecType.DYNAMIC_PARTITION) { // dynamic partitions
+ try {
+ ts.partitions = db.getPartitionsByNames(ts.tableHandle, ts.partSpec);
+ } catch (HiveException e) {
+ throw new SemanticException("Cannot get partitions for " + ts.partSpec, e);
+ }
+ }
qb.getParseInfo().addTableSpec(alias, ts);
}
}
@@ -818,8 +827,8 @@ public class SemanticAnalyzer extends Ba
// tableSpec ts is got from the query (user specified),
// which means the user didn't specify partitions in their query,
// but whether the table itself is partitioned is not know.
- if (ts.partHandle == null) {
- // This is a table
+ if (ts.specType != SpecType.STATIC_PARTITION) {
+ // This is a table or dynamic partition
qb.getMetaData().setDestForAlias(name, ts.tableHandle);
// has dynamic as well as static partitions
if (ts.partSpec != null && ts.partSpec.size() > 0) {
@@ -3348,7 +3357,7 @@ public class SemanticAnalyzer extends Ba
}
dpCtx = qbm.getDPCtx(dest);
if (dpCtx == null) {
- // validatePartSpec(dest_tab, partSpec);
+ Utilities.validatePartSpec(dest_tab, partSpec);
dpCtx = new DynamicPartitionCtx(dest_tab, partSpec,
conf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME),
conf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTSPERNODE));
@@ -5744,16 +5753,13 @@ public class SemanticAnalyzer extends Ba
if (partSpec == null) {
throw new SemanticException(ErrorMsg.NEED_PARTITION_SPECIFICATION.getMsg());
}
- // get all partitions that matches with the partition spec
- try {
- List<Partition> partitions = db.getPartitions(tab, partSpec);
+ List<Partition> partitions = qbp.getTableSpec().partitions;
+ if (partitions != null) {
for (Partition partn : partitions) {
// inputs.add(new ReadEntity(partn)); // is this needed at all?
outputs.add(new WriteEntity(partn));
- }
- } catch (HiveException e) {
- throw new SemanticException(e);
- }
+ }
+ }
}
}
}
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/stats12.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/stats12.q?rev=1022162&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/stats12.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/stats12.q Wed Oct 13 16:06:37 2010
@@ -0,0 +1,19 @@
+set datanucleus.cache.collections=false;
+set hive.stats.autogather=false;
+set hive.exec.dynamic.partition=true;
+set hive.exec.dynamic.partition.mode=nonstrict;
+
+create table analyze_srcpart like srcpart;
+insert overwrite table analyze_srcpart partition (ds, hr) select * from srcpart where ds is not null;
+
+explain extended
+analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr) compute statistics;
+
+analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr) compute statistics;
+
+desc extended analyze_srcpart;
+desc extended analyze_srcpart partition (ds='2008-04-08', hr=11);
+desc extended analyze_srcpart partition (ds='2008-04-08', hr=12);
+desc extended analyze_srcpart partition (ds='2008-04-09', hr=11);
+desc extended analyze_srcpart partition (ds='2008-04-09', hr=12);
+
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/stats13.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/stats13.q?rev=1022162&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/stats13.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/stats13.q Wed Oct 13 16:06:37 2010
@@ -0,0 +1,19 @@
+set datanucleus.cache.collections=false;
+set hive.stats.autogather=false;
+set hive.exec.dynamic.partition=true;
+set hive.exec.dynamic.partition.mode=nonstrict;
+
+create table analyze_srcpart like srcpart;
+insert overwrite table analyze_srcpart partition (ds, hr) select * from srcpart where ds is not null;
+
+explain extended
+analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics;
+
+analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics;
+
+desc extended analyze_srcpart;
+desc extended analyze_srcpart partition (ds='2008-04-08', hr=11);
+desc extended analyze_srcpart partition (ds='2008-04-08', hr=12);
+desc extended analyze_srcpart partition (ds='2008-04-09', hr=11);
+desc extended analyze_srcpart partition (ds='2008-04-09', hr=12);
+
Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/stats12.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/stats12.q.out?rev=1022162&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/stats12.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/stats12.q.out Wed Oct 13 16:06:37 2010
@@ -0,0 +1,268 @@
+PREHOOK: query: create table analyze_srcpart like srcpart
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table analyze_srcpart like srcpart
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@analyze_srcpart
+PREHOOK: query: insert overwrite table analyze_srcpart partition (ds, hr) select * from srcpart where ds is not null
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+POSTHOOK: query: insert overwrite table analyze_srcpart partition (ds, hr) select * from srcpart where ds is not null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-09/hr=12
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: explain extended
+analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr) compute statistics
+PREHOOK: type: null
+POSTHOOK: query: explain extended
+analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr) compute statistics
+POSTHOOK: type: null
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+ (TOK_ANALYZE (TOK_TABTYPE analyze_srcpart (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr))))
+
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+ Stage-1 depends on stages: Stage-0
+
+STAGE PLANS:
+ Stage: Stage-0
+ Map Reduce
+ Alias -> Map Operator Tree:
+ analyze_srcpart
+ TableScan
+ alias: analyze_srcpart
+ Statistics Aggregation Key Prefix: analyze_srcpart/
+ GatherStats: true
+ Needs Tagging: false
+ Path -> Alias:
+ pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=11 [analyze_srcpart]
+ pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=12 [analyze_srcpart]
+ Path -> Partition:
+ pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=11
+ Partition
+ base file name: hr=11
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-08
+ hr 11
+ properties:
+ EXTERNAL FALSE
+ bucket_count -1
+ columns key,value
+ columns.types string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart
+ name analyze_srcpart
+ partition_columns ds/hr
+ serialization.ddl struct analyze_srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ transient_lastDdlTime 1286984927
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ EXTERNAL FALSE
+ bucket_count -1
+ columns key,value
+ columns.types string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart
+ name analyze_srcpart
+ partition_columns ds/hr
+ serialization.ddl struct analyze_srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ transient_lastDdlTime 1286984927
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: analyze_srcpart
+ name: analyze_srcpart
+ pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=12
+ Partition
+ base file name: hr=12
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-08
+ hr 12
+ properties:
+ EXTERNAL FALSE
+ bucket_count -1
+ columns key,value
+ columns.types string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart
+ name analyze_srcpart
+ partition_columns ds/hr
+ serialization.ddl struct analyze_srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ transient_lastDdlTime 1286984927
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ EXTERNAL FALSE
+ bucket_count -1
+ columns key,value
+ columns.types string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart
+ name analyze_srcpart
+ partition_columns ds/hr
+ serialization.ddl struct analyze_srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ transient_lastDdlTime 1286984927
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: analyze_srcpart
+ name: analyze_srcpart
+
+ Stage: Stage-1
+ Stats-Aggr Operator
+ Stats Aggregation Key Prefix: analyze_srcpart/
+
+
+PREHOOK: query: analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr) compute statistics
+PREHOOK: type: null
+PREHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@analyze_srcpart
+PREHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=11
+PREHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=12
+POSTHOOK: query: analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr) compute statistics
+POSTHOOK: type: null
+POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@analyze_srcpart
+POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc extended analyze_srcpart
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended analyze_srcpart
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+key string default
+value string default
+ds string
+hr string
+
+Detailed Table Information Table(tableName:analyze_srcpart, dbName:default, owner:null, createTime:1286984927, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{numPartitions=2, EXTERNAL=FALSE, numFile
s=2, transient_lastDdlTime=1286984952, numRows=1000, totalSize=11624}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11)
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+key string default
+value string default
+ds string
+hr string
+
+Detailed Partition Information Partition(values:[2008-04-08, 11], dbName:default, tableName:analyze_srcpart, createTime:1286984943, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1286984952, numRows=500, totalSize=5812})
+PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12)
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+key string default
+value string default
+ds string
+hr string
+
+Detailed Partition Information Partition(values:[2008-04-08, 12], dbName:default, tableName:analyze_srcpart, createTime:1286984943, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1286984952, numRows=500, totalSize=5812})
+PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11)
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+key string default
+value string default
+ds string
+hr string
+
+Detailed Partition Information Partition(values:[2008-04-09, 11], dbName:default, tableName:analyze_srcpart, createTime:1286984944, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-09/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1286984944})
+PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12)
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+key string default
+value string default
+ds string
+hr string
+
+Detailed Partition Information Partition(values:[2008-04-09, 12], dbName:default, tableName:analyze_srcpart, createTime:1286984944, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-09/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1286984944})
Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/stats13.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/stats13.q.out?rev=1022162&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/stats13.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/stats13.q.out Wed Oct 13 16:06:37 2010
@@ -0,0 +1,220 @@
+PREHOOK: query: create table analyze_srcpart like srcpart
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table analyze_srcpart like srcpart
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@analyze_srcpart
+PREHOOK: query: insert overwrite table analyze_srcpart partition (ds, hr) select * from srcpart where ds is not null
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+POSTHOOK: query: insert overwrite table analyze_srcpart partition (ds, hr) select * from srcpart where ds is not null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-09/hr=12
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: explain extended
+analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics
+PREHOOK: type: null
+POSTHOOK: query: explain extended
+analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics
+POSTHOOK: type: null
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+ (TOK_ANALYZE (TOK_TABTYPE analyze_srcpart (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr 11))))
+
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+ Stage-1 depends on stages: Stage-0
+
+STAGE PLANS:
+ Stage: Stage-0
+ Map Reduce
+ Alias -> Map Operator Tree:
+ analyze_srcpart
+ TableScan
+ alias: analyze_srcpart
+ Statistics Aggregation Key Prefix: analyze_srcpart/
+ GatherStats: true
+ Needs Tagging: false
+ Path -> Alias:
+ pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=11 [analyze_srcpart]
+ Path -> Partition:
+ pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=11
+ Partition
+ base file name: hr=11
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ partition values:
+ ds 2008-04-08
+ hr 11
+ properties:
+ EXTERNAL FALSE
+ bucket_count -1
+ columns key,value
+ columns.types string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart
+ name analyze_srcpart
+ partition_columns ds/hr
+ serialization.ddl struct analyze_srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ transient_lastDdlTime 1286984954
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ EXTERNAL FALSE
+ bucket_count -1
+ columns key,value
+ columns.types string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart
+ name analyze_srcpart
+ partition_columns ds/hr
+ serialization.ddl struct analyze_srcpart { string key, string value}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ transient_lastDdlTime 1286984954
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: analyze_srcpart
+ name: analyze_srcpart
+
+ Stage: Stage-1
+ Stats-Aggr Operator
+ Stats Aggregation Key Prefix: analyze_srcpart/
+
+
+PREHOOK: query: analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics
+PREHOOK: type: null
+PREHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=11
+PREHOOK: Output: default@analyze_srcpart
+PREHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=11
+POSTHOOK: query: analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics
+POSTHOOK: type: null
+POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@analyze_srcpart
+POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc extended analyze_srcpart
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended analyze_srcpart
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+key string default
+value string default
+ds string
+hr string
+
+Detailed Table Information Table(tableName:analyze_srcpart, dbName:default, owner:null, createTime:1286984954, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{numPartitions=1, EXTERNAL=FALSE, numFile
s=1, transient_lastDdlTime=1286984973, numRows=500, totalSize=5812}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11)
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=11)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+key string default
+value string default
+ds string
+hr string
+
+Detailed Partition Information Partition(values:[2008-04-08, 11], dbName:default, tableName:analyze_srcpart, createTime:1286984967, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1286984973, numRows=500, totalSize=5812})
+PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12)
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-08', hr=12)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+key string default
+value string default
+ds string
+hr string
+
+Detailed Partition Information Partition(values:[2008-04-08, 12], dbName:default, tableName:analyze_srcpart, createTime:1286984967, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1286984967})
+PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11)
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=11)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+key string default
+value string default
+ds string
+hr string
+
+Detailed Partition Information Partition(values:[2008-04-09, 11], dbName:default, tableName:analyze_srcpart, createTime:1286984967, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-09/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1286984967})
+PREHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12)
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended analyze_srcpart partition (ds='2008-04-09', hr=12)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+key string default
+value string default
+ds string
+hr string
+
+Detailed Partition Information Partition(values:[2008-04-09, 12], dbName:default, tableName:analyze_srcpart, createTime:1286984968, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-09/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1286984968})
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/stats6.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/stats6.q.out?rev=1022162&r1=1022161&r2=1022162&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/stats6.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/stats6.q.out Wed Oct 13 16:06:37 2010
@@ -30,17 +30,11 @@ POSTHOOK: Lineage: analyze_srcpart PARTI
PREHOOK: query: analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics
PREHOOK: type: null
PREHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=12
-PREHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=11
-PREHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=12
PREHOOK: Output: default@analyze_srcpart
PREHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=11
POSTHOOK: query: analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics
POSTHOOK: type: null
POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=11
-POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=12
POSTHOOK: Output: default@analyze_srcpart
POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=11
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -53,18 +47,12 @@ POSTHOOK: Lineage: analyze_srcpart PARTI
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
PREHOOK: query: analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics
PREHOOK: type: null
-PREHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=11
PREHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=12
-PREHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=11
-PREHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=12
PREHOOK: Output: default@analyze_srcpart
PREHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=12
POSTHOOK: query: analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics
POSTHOOK: type: null
-POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=11
POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=11
-POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=12
POSTHOOK: Output: default@analyze_srcpart
POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=12
POSTHOOK: Lineage: analyze_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -92,7 +80,7 @@ value string default
ds string
hr string
-Detailed Partition Information Partition(values:[2008-04-08, 11], dbName:default, tableName:analyze_srcpart, createTime:1286826366, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1286826369, numRows=500, totalSize=5812})
+Detailed Partition Information Partition(values:[2008-04-08, 11], dbName:default, tableName:analyze_srcpart, createTime:1286985835, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1286985841, numRows=500, totalSize=5812})
PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
PREHOOK: type: DESCTABLE
POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
@@ -110,7 +98,7 @@ value string default
ds string
hr string
-Detailed Partition Information Partition(values:[2008-04-08, 12], dbName:default, tableName:analyze_srcpart, createTime:1286826366, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1286826373, numRows=500, totalSize=5812})
+Detailed Partition Information Partition(values:[2008-04-08, 12], dbName:default, tableName:analyze_srcpart, createTime:1286985835, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1286985846, numRows=500, totalSize=5812})
PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
PREHOOK: type: DESCTABLE
POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
@@ -128,7 +116,7 @@ value string default
ds string
hr string
-Detailed Partition Information Partition(values:[2008-04-09, 11], dbName:default, tableName:analyze_srcpart, createTime:1286826366, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-09/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1286826366})
+Detailed Partition Information Partition(values:[2008-04-09, 11], dbName:default, tableName:analyze_srcpart, createTime:1286985835, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-09/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1286985835})
PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
PREHOOK: type: DESCTABLE
POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
@@ -146,7 +134,7 @@ value string default
ds string
hr string
-Detailed Partition Information Partition(values:[2008-04-09, 12], dbName:default, tableName:analyze_srcpart, createTime:1286826366, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-09/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1286826366})
+Detailed Partition Information Partition(values:[2008-04-09, 12], dbName:default, tableName:analyze_srcpart, createTime:1286985836, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-09/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1286985836})
PREHOOK: query: describe extended analyze_srcpart
PREHOOK: type: DESCTABLE
POSTHOOK: query: describe extended analyze_srcpart
@@ -164,4 +152,4 @@ value string default
ds string
hr string
-Detailed Table Information Table(tableName:analyze_srcpart, dbName:default, owner:null, createTime:1286826360, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/analyze_srcpart, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{numPartitions=2, EXTERNAL=FALSE, numFiles
=2, transient_lastDdlTime=1286826373, numRows=1000, totalSize=11624}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:analyze_srcpart, dbName:default, owner:null, createTime:1286985823, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{numPartitions=2, EXTERNAL=FALSE, numFile
s=2, transient_lastDdlTime=1286985846, numRows=1000, totalSize=11624}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/stats7.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/stats7.q.out?rev=1022162&r1=1022161&r2=1022162&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/stats7.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/stats7.q.out Wed Oct 13 16:06:37 2010
@@ -62,8 +62,6 @@ PREHOOK: query: analyze table analyze_sr
PREHOOK: type: null
PREHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=11
PREHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=12
-PREHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=11
-PREHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=12
PREHOOK: Output: default@analyze_srcpart
PREHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=11
PREHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=12
@@ -71,8 +69,6 @@ POSTHOOK: query: analyze table analyze_s
POSTHOOK: type: null
POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=11
POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=11
-POSTHOOK: Input: default@analyze_srcpart@ds=2008-04-09/hr=12
POSTHOOK: Output: default@analyze_srcpart
POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=11
POSTHOOK: Output: default@analyze_srcpart@ds=2008-04-08/hr=12
@@ -101,7 +97,7 @@ value string default
ds string
hr string
-Detailed Partition Information Partition(values:[2008-04-08, 11], dbName:default, tableName:analyze_srcpart, createTime:1286826409, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1286826413, numRows=500, totalSize=5812})
+Detailed Partition Information Partition(values:[2008-04-08, 11], dbName:default, tableName:analyze_srcpart, createTime:1286950601, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1286950608, numRows=500, totalSize=5812})
PREHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
PREHOOK: type: DESCTABLE
POSTHOOK: query: describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
@@ -119,7 +115,7 @@ value string default
ds string
hr string
-Detailed Partition Information Partition(values:[2008-04-08, 12], dbName:default, tableName:analyze_srcpart, createTime:1286826409, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1286826413, numRows=500, totalSize=5812})
+Detailed Partition Information Partition(values:[2008-04-08, 12], dbName:default, tableName:analyze_srcpart, createTime:1286950602, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart/ds=2008-04-08/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1286950608, numRows=500, totalSize=5812})
PREHOOK: query: describe extended analyze_srcpart
PREHOOK: type: DESCTABLE
POSTHOOK: query: describe extended analyze_srcpart
@@ -137,4 +133,4 @@ value string default
ds string
hr string
-Detailed Table Information Table(tableName:analyze_srcpart, dbName:default, owner:null, createTime:1286826404, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/analyze_srcpart, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{numPartitions=2, EXTERNAL=FALSE, numFiles
=2, transient_lastDdlTime=1286826413, numRows=1000, totalSize=11624}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:analyze_srcpart, dbName:default, owner:null, createTime:1286950591, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/analyze_srcpart, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{numPartitions=2, EXTERNAL=FALSE, numFile
s=2, transient_lastDdlTime=1286950608, numRows=1000, totalSize=11624}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)