You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by he...@apache.org on 2010/11/12 01:11:26 UTC
svn commit: r1034219 [1/5] - in /hive/trunk: ./
ql/src/java/org/apache/hadoop/hive/ql/
ql/src/java/org/apache/hadoop/hive/ql/hooks/
ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/
ql/src/java/org/apache/hadoop/hive/ql/metadata/ ql/src/java/org...
Author: heyongqiang
Date: Fri Nov 12 00:11:24 2010
New Revision: 1034219
URL: http://svn.apache.org/viewvc?rev=1034219&view=rev
Log:
HIVE-1781 outputs not populated for dynamic partitions at compile time.(namit via He Yongqiang)
Added:
hive/trunk/ql/src/test/queries/clientpositive/lock3.q
hive/trunk/ql/src/test/results/clientpositive/lock3.q.out
Modified:
hive/trunk/CHANGES.txt
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out
hive/trunk/ql/src/test/results/clientpositive/combine2.q.out
hive/trunk/ql/src/test/results/clientpositive/input13.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part1.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part10.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part11.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part12.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part13.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part14.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part15.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part2.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part3.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part4.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part5.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part6.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part8.q.out
hive/trunk/ql/src/test/results/clientpositive/load_dyn_part9.q.out
hive/trunk/ql/src/test/results/clientpositive/merge3.q.out
hive/trunk/ql/src/test/results/clientpositive/merge4.q.out
hive/trunk/ql/src/test/results/clientpositive/merge_dynamic_partition.q.out
hive/trunk/ql/src/test/results/clientpositive/mi.q.out
hive/trunk/ql/src/test/results/clientpositive/null_column.q.out
hive/trunk/ql/src/test/results/clientpositive/ppd_multi_insert.q.out
hive/trunk/ql/src/test/results/clientpositive/sample10.q.out
hive/trunk/ql/src/test/results/clientpositive/stats12.q.out
hive/trunk/ql/src/test/results/clientpositive/stats13.q.out
hive/trunk/ql/src/test/results/clientpositive/stats2.q.out
hive/trunk/ql/src/test/results/clientpositive/stats4.q.out
hive/trunk/ql/src/test/results/clientpositive/stats6.q.out
hive/trunk/ql/src/test/results/clientpositive/stats7.q.out
hive/trunk/ql/src/test/results/clientpositive/stats8.q.out
Modified: hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hive/trunk/CHANGES.txt?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/CHANGES.txt (original)
+++ hive/trunk/CHANGES.txt Fri Nov 12 00:11:24 2010
@@ -491,6 +491,9 @@ Trunk - Unreleased
HIVE-1583 Hive cannot overwrite HADOOP_CLASSPATH
(Thiruvel Thirumoolan via namit)
+ HIVE-1781 outputs not populated for dynamic partitions at compile time
+ (namit via He Yongqiang)
+
TESTS
HIVE-1464. improve test query performance
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Fri Nov 12 00:11:24 2010
@@ -24,6 +24,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
+import java.util.HashSet;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
@@ -65,6 +66,7 @@ import org.apache.hadoop.hive.ql.lockmgr
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject;
import org.apache.hadoop.hive.ql.lockmgr.LockException;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
@@ -430,7 +432,7 @@ public class Driver implements CommandPr
* Get the list of objects to be locked. If a partition needs to be locked (in any mode), all its parents
* should also be locked in SHARED mode.
**/
- private List<LockObject> getLockObjects(Table t, Partition p, HiveLockMode mode) {
+ private List<LockObject> getLockObjects(Table t, Partition p, HiveLockMode mode) throws SemanticException {
List<LockObject> locks = new LinkedList<LockObject>();
if (t != null) {
@@ -444,16 +446,27 @@ public class Driver implements CommandPr
// All the parents are locked in shared mode
mode = HiveLockMode.SHARED;
- String partName = p.getName();
+ // For summy partitions, only partition name is needed
+ String name = p.getName();
+ if (p instanceof DummyPartition) {
+ name = p.getName().split("@")[2];
+ }
+
+ String partName = name;
String partialName = "";
- String[] partns = p.getName().split("/");
+ String[] partns = name.split("/");
for (int idx = 0; idx < partns.length -1; idx++) {
String partn = partns[idx];
partialName += partialName + partn;
- locks.add(new LockObject(new HiveLockObject(
- new DummyPartition(p.getTable().getDbName() + "@" + p.getTable().getTableName() + "@" + partialName),
- plan.getQueryId()), mode));
- partialName += "/";
+ try {
+ locks.add(new LockObject(new HiveLockObject(
+ new DummyPartition(p.getTable(),
+ p.getTable().getDbName() + "@" + p.getTable().getTableName() + "@" + partialName),
+ plan.getQueryId()), mode));
+ partialName += "/";
+ } catch (HiveException e) {
+ throw new SemanticException(e.getMessage());
+ }
}
locks.add(new LockObject(new HiveLockObject(p.getTable(), plan.getQueryId()), mode));
@@ -493,11 +506,16 @@ public class Driver implements CommandPr
for (WriteEntity output : plan.getOutputs()) {
if (output.getTyp() == WriteEntity.Type.TABLE) {
- lockObjects.addAll(getLockObjects(output.getTable(), null, HiveLockMode.EXCLUSIVE));
+ lockObjects.addAll(getLockObjects(output.getTable(), null,
+ output.isComplete() ? HiveLockMode.EXCLUSIVE : HiveLockMode.SHARED));
}
else if (output.getTyp() == WriteEntity.Type.PARTITION) {
lockObjects.addAll(getLockObjects(null, output.getPartition(), HiveLockMode.EXCLUSIVE));
}
+ // In case of dynamic queries, it is possible to have incomplete dummy partitions
+ else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) {
+ lockObjects.addAll(getLockObjects(null, output.getPartition(), HiveLockMode.SHARED));
+ }
}
if (lockObjects.isEmpty() && !ctx.isNeedLockMgr()) {
@@ -809,6 +827,20 @@ public class Driver implements CommandPr
// the jobtracker setting to its initial value
ctx.restoreOriginalTracker();
+ // remove incomplete outputs.
+ // Some incomplete outputs may be added at the beginning, for eg: for dynamic partitions.
+ // remove them
+ HashSet<WriteEntity> remOutputs = new HashSet<WriteEntity>();
+ for (WriteEntity output : plan.getOutputs()) {
+ if (!output.isComplete()) {
+ remOutputs.add(output);
+ }
+ }
+
+ for (WriteEntity output : remOutputs) {
+ plan.getOutputs().remove(output);
+ }
+
// Get all the post execution hooks and execute them.
for (PostExecute peh : getPostExecHooks()) {
peh.run(SessionState.get(), plan.getInputs(), plan.getOutputs(),
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java Fri Nov 12 00:11:24 2010
@@ -22,6 +22,7 @@ import java.io.Serializable;
import java.net.URI;
import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.Table;
/**
@@ -35,7 +36,7 @@ public class WriteEntity implements Seri
* The type of the write entity.
*/
public static enum Type {
- TABLE, PARTITION, DFS_DIR, LOCAL_DIR
+ TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR
};
/**
@@ -64,6 +65,20 @@ public class WriteEntity implements Seri
*/
private String name;
+ /**
+ * Whether the output is complete or not. For eg, in case of dynamic partitions, the complete output
+ * may not be known
+ */
+ private boolean complete;
+
+ public boolean isComplete() {
+ return complete;
+ }
+
+ public void setComplete(boolean complete) {
+ this.complete = complete;;
+ }
+
public String getName() {
return name;
}
@@ -117,11 +132,16 @@ public class WriteEntity implements Seri
* Table that is written to.
*/
public WriteEntity(Table t) {
+ this(t, true);
+ }
+
+ public WriteEntity(Table t, boolean complete) {
d = null;
p = null;
this.t = t;
typ = Type.TABLE;
name = computeName();
+ this.complete = complete;
}
/**
@@ -131,11 +151,25 @@ public class WriteEntity implements Seri
* Partition that is written to.
*/
public WriteEntity(Partition p) {
+ this(p, true);
+ }
+
+ public WriteEntity(Partition p, boolean complete) {
d = null;
this.p = p;
t = p.getTable();
typ = Type.PARTITION;
name = computeName();
+ this.complete = complete;
+ }
+
+ public WriteEntity(DummyPartition p, boolean complete) {
+ d = null;
+ this.p = p;
+ t = p.getTable();
+ typ = Type.DUMMYPARTITION;
+ name = computeName();
+ this.complete = complete;
}
/**
@@ -147,6 +181,10 @@ public class WriteEntity implements Seri
* Flag to decide whether this directory is local or in dfs.
*/
public WriteEntity(String d, boolean islocal) {
+ this(d, islocal, true);
+ }
+
+ public WriteEntity(String d, boolean islocal, boolean complete) {
this.d = d;
p = null;
t = null;
@@ -156,6 +194,7 @@ public class WriteEntity implements Seri
typ = Type.DFS_DIR;
}
name = computeName();
+ this.complete = complete;
}
/**
@@ -212,6 +251,8 @@ public class WriteEntity implements Seri
return t.getDbName() + "@" + t.getTableName();
case PARTITION:
return t.getDbName() + "@" + t.getTableName() + "@" + p.getName();
+ case DUMMYPARTITION:
+ return p.getName();
default:
return d;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java Fri Nov 12 00:11:24 2010
@@ -330,7 +330,7 @@ public class ZooKeeperHiveLockManager im
}
if (partn == null) {
- return new HiveLockObject(new DummyPartition(
+ return new HiveLockObject(new DummyPartition(tab,
objName.split("/")[1].replaceAll(conf.getVar(HiveConf.ConfVars.DEFAULT_ZOOKEEPER_PARTITION_NAME), "/")),
data);
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java Fri Nov 12 00:11:24 2010
@@ -38,7 +38,8 @@ public class DummyPartition extends Part
public DummyPartition() {
}
- public DummyPartition(String name) {
+ public DummyPartition(Table tbl, String name) throws HiveException {
+ setTable(tbl);
this.name = name;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Nov 12 00:11:24 2010
@@ -87,6 +87,7 @@ import org.apache.hadoop.hive.ql.metadat
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.optimizer.GenMRFileSink1;
@@ -3513,6 +3514,28 @@ public class SemanticAnalyzer extends Ba
throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES
.getMsg(dest_tab.getTableName()));
}
+ if ((dpCtx != null) && (dpCtx.getNumDPCols() >= 0)) {
+ // No static partition specified
+ if (dpCtx.getNumSPCols() == 0) {
+ outputs.add(new WriteEntity(dest_tab, false));
+ }
+ // part of the partition specified
+ // Create a DummyPartition in this case. Since, the metastore does not store partial
+ // partitions currently, we need to store dummy partitions
+ else {
+ try {
+ String ppath = dpCtx.getSPPath();
+ ppath = ppath.substring(0, ppath.length()-1);
+ DummyPartition p = new DummyPartition(dest_tab,
+ dest_tab.getDbName() + "@" + dest_tab.getTableName() + "@" + ppath);
+
+ outputs.add(new WriteEntity(p, false));
+ } catch (HiveException e) {
+ throw new SemanticException(e.getMessage());
+ }
+ }
+ }
+
break;
}
case QBMetaData.DEST_PARTITION: {
Added: hive/trunk/ql/src/test/queries/clientpositive/lock3.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/lock3.q?rev=1034219&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/lock3.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/lock3.q Fri Nov 12 00:11:24 2010
@@ -0,0 +1,32 @@
+drop table tstsrcpart;
+create table tstsrcpart like srcpart;
+
+from srcpart
+insert overwrite table tstsrcpart partition (ds='2008-04-08',hr='11')
+select key, value where ds='2008-04-08' and hr='11';
+
+set hive.exec.dynamic.partition.mode=nonstrict;
+set hive.exec.dynamic.partition=true;
+
+
+from srcpart
+insert overwrite table tstsrcpart partition (ds, hr) select key, value, ds, hr where ds <= '2008-04-08';
+
+from srcpart
+insert overwrite table tstsrcpart partition (ds ='2008-04-08', hr) select key, value, hr where ds = '2008-04-08';
+
+
+SHOW LOCKS;
+SHOW LOCKS tstsrcpart;
+
+drop table tstsrcpart;
+
+drop table tst1;
+create table tst1 (key string, value string) partitioned by (a string, b string, c string, d string);
+
+
+from srcpart
+insert overwrite table tst1 partition (a='1', b='2', c, d) select key, value, ds, hr where ds = '2008-04-08';
+
+
+drop table tst1;
Modified: hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/dyn_part3.q.out Fri Nov 12 00:11:24 2010
@@ -6,4 +6,5 @@ POSTHOOK: Output: default@nzhang_part
PREHOOK: query: insert overwrite table nzhang_part partition(value) select key, value from src
PREHOOK: type: QUERY
PREHOOK: Input: default@src
+PREHOOK: Output: default@nzhang_part
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
Modified: hive/trunk/ql/src/test/results/clientpositive/combine2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/combine2.q.out?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/combine2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/combine2.q.out Fri Nov 12 00:11:24 2010
@@ -12,6 +12,7 @@ select * from (
select key, '2010-04-21 09:45:00' value from src where key = 19) s
PREHOOK: type: QUERY
PREHOOK: Input: default@src
+PREHOOK: Output: default@combine2
POSTHOOK: query: insert overwrite table combine2 partition(value)
select * from (
select key, value from src where key < 10
@@ -131,7 +132,7 @@ PREHOOK: Input: default@combine2@value=v
PREHOOK: Input: default@combine2@value=val_8
PREHOOK: Input: default@combine2@value=val_9
PREHOOK: Input: default@combine2@value=|
-PREHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-09-08_705_8639115732726601579/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-25-40_498_5843958235995711679/-mr-10000
POSTHOOK: query: select key, value from combine2 where value is not null order by key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00
@@ -142,7 +143,6 @@ POSTHOOK: Input: default@combine2@value=
POSTHOOK: Input: default@combine2@value=val_8
POSTHOOK: Input: default@combine2@value=val_9
POSTHOOK: Input: default@combine2@value=|
-POSTHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-09-08_705_8639115732726601579/-mr-10000
POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -212,16 +212,16 @@ STAGE PLANS:
type: bigint
Needs Tagging: false
Path -> Alias:
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00 [combine2]
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_0 [combine2]
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_2 [combine2]
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_4 [combine2]
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_5 [combine2]
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_8 [combine2]
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_9 [combine2]
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=| [combine2]
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00 [combine2]
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_0 [combine2]
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_2 [combine2]
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_4 [combine2]
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_5 [combine2]
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_8 [combine2]
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_9 [combine2]
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=| [combine2]
Path -> Partition:
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00
Partition
base file name: value=2010-04-21 09%3A45%3A00
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -234,7 +234,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -244,7 +244,7 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -255,7 +255,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -265,11 +265,11 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: combine2
name: combine2
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_0
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_0
Partition
base file name: value=val_0
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -282,7 +282,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -292,7 +292,7 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -303,7 +303,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -313,11 +313,11 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: combine2
name: combine2
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_2
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_2
Partition
base file name: value=val_2
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -330,7 +330,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -340,7 +340,7 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -351,7 +351,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -361,11 +361,11 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: combine2
name: combine2
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_4
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_4
Partition
base file name: value=val_4
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -378,7 +378,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -388,7 +388,7 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -399,7 +399,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -409,11 +409,11 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: combine2
name: combine2
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_5
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_5
Partition
base file name: value=val_5
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -426,7 +426,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -436,7 +436,7 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -447,7 +447,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -457,11 +457,11 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: combine2
name: combine2
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_8
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_8
Partition
base file name: value=val_8
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -474,7 +474,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -484,7 +484,7 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -495,7 +495,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -505,11 +505,11 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: combine2
name: combine2
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=val_9
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=val_9
Partition
base file name: value=val_9
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -522,7 +522,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -532,7 +532,7 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -543,7 +543,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -553,11 +553,11 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: combine2
name: combine2
- pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2/value=|
+ pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2/value=|
Partition
base file name: value=|
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -570,7 +570,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -580,7 +580,7 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -591,7 +591,7 @@ STAGE PLANS:
columns.types string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/combine2
+ location pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/combine2
name combine2
numFiles 8
numPartitions 8
@@ -601,7 +601,7 @@ STAGE PLANS:
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 26
- transient_lastDdlTime 1288390148
+ transient_lastDdlTime 1289453139
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: combine2
name: combine2
@@ -620,9 +620,9 @@ STAGE PLANS:
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/tmp/sdong/hive_2010-10-29_15-09-16_404_5207580937031777579/-ext-10001
+ directory: file:/tmp/njain/hive_2010-11-10_21-25-53_460_137319524724354518/-ext-10001
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: file:/tmp/sdong/hive_2010-10-29_15-09-16_404_5207580937031777579/-ext-10001/
+ Stats Publishing Key Prefix: file:/tmp/njain/hive_2010-11-10_21-25-53_460_137319524724354518/-ext-10001/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -649,7 +649,7 @@ PREHOOK: Input: default@combine2@value=v
PREHOOK: Input: default@combine2@value=val_8
PREHOOK: Input: default@combine2@value=val_9
PREHOOK: Input: default@combine2@value=|
-PREHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-09-16_652_1569732367286849449/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-25-53_911_2048118817573245123/-mr-10000
POSTHOOK: query: select count(1) from combine2 where value is not null
POSTHOOK: type: QUERY
POSTHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00
@@ -660,7 +660,6 @@ POSTHOOK: Input: default@combine2@value=
POSTHOOK: Input: default@combine2@value=val_8
POSTHOOK: Input: default@combine2@value=val_9
POSTHOOK: Input: default@combine2@value=|
-POSTHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-09-16_652_1569732367286849449/-mr-10000
POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -763,14 +762,13 @@ PREHOOK: Input: default@srcpart@ds=2008-
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-PREHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-09-25_320_2193048873902233496/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-26-09_276_219776477941227748/-mr-10000
POSTHOOK: query: select ds, count(1) from srcpart where ds is not null group by ds
POSTHOOK: type: QUERY
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-POSTHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-09-25_320_2193048873902233496/-mr-10000
POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ]
Modified: hive/trunk/ql/src/test/results/clientpositive/input13.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/input13.q.out?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/input13.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/input13.q.out Fri Nov 12 00:11:24 2010
@@ -156,7 +156,7 @@ STAGE PLANS:
Move Operator
files:
hdfs directory: true
- destination: pfile:/data/users/nzhang/work/784/apache-hive/build/ql/scratchdir/hive_2010-09-14_16-26-47_955_4620564381440920404/-ext-10000
+ destination: pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-11_09-14-34_733_8268759755961112825/-ext-10000
Stage: Stage-0
Move Operator
@@ -174,7 +174,7 @@ STAGE PLANS:
Stage: Stage-6
Map Reduce
Alias -> Map Operator Tree:
- pfile:/data/users/nzhang/work/784/apache-hive/build/ql/scratchdir/hive_2010-09-14_16-26-47_955_4620564381440920404/-ext-10007
+ pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-11_09-14-34_733_8268759755961112825/-ext-10007
File Output Operator
compressed: false
GlobalTableId: 0
@@ -191,7 +191,7 @@ STAGE PLANS:
Move Operator
files:
hdfs directory: true
- destination: pfile:/data/users/nzhang/work/784/apache-hive/build/ql/scratchdir/hive_2010-09-14_16-26-47_955_4620564381440920404/-ext-10002
+ destination: pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-11_09-14-34_733_8268759755961112825/-ext-10002
Stage: Stage-1
Move Operator
@@ -209,7 +209,7 @@ STAGE PLANS:
Stage: Stage-10
Map Reduce
Alias -> Map Operator Tree:
- pfile:/data/users/nzhang/work/784/apache-hive/build/ql/scratchdir/hive_2010-09-14_16-26-47_955_4620564381440920404/-ext-10008
+ pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-11_09-14-34_733_8268759755961112825/-ext-10008
File Output Operator
compressed: false
GlobalTableId: 0
@@ -226,7 +226,7 @@ STAGE PLANS:
Move Operator
files:
hdfs directory: true
- destination: pfile:/data/users/nzhang/work/784/apache-hive/build/ql/scratchdir/hive_2010-09-14_16-26-47_955_4620564381440920404/-ext-10004
+ destination: pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-11_09-14-34_733_8268759755961112825/-ext-10004
Stage: Stage-2
Move Operator
@@ -247,7 +247,7 @@ STAGE PLANS:
Stage: Stage-14
Map Reduce
Alias -> Map Operator Tree:
- pfile:/data/users/nzhang/work/784/apache-hive/build/ql/scratchdir/hive_2010-09-14_16-26-47_955_4620564381440920404/-ext-10009
+ pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-11_09-14-34_733_8268759755961112825/-ext-10009
File Output Operator
compressed: false
GlobalTableId: 0
@@ -264,7 +264,7 @@ STAGE PLANS:
Move Operator
files:
hdfs directory: true
- destination: file:/data/users/nzhang/work/784/apache-hive/build/ql/scratchdir/hive_2010-09-14_16-26-47_955_4620564381440920404/-ext-10006
+ destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-11_09-14-34_733_8268759755961112825/-ext-10006
Stage: Stage-3
Move Operator
@@ -275,7 +275,7 @@ STAGE PLANS:
Stage: Stage-17
Map Reduce
Alias -> Map Operator Tree:
- file:/data/users/nzhang/work/784/apache-hive/build/ql/scratchdir/hive_2010-09-14_16-26-47_955_4620564381440920404/-ext-10010
+ file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-11_09-14-34_733_8268759755961112825/-ext-10010
File Output Operator
compressed: false
GlobalTableId: 0
@@ -314,11 +314,11 @@ POSTHOOK: Lineage: dest3 PARTITION(ds=20
PREHOOK: query: SELECT dest1.* FROM dest1
PREHOOK: type: QUERY
PREHOOK: Input: default@dest1
-PREHOOK: Output: file:/tmp/nzhang/hive_2010-09-14_16-26-57_288_6309833081797050565/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-11_09-14-45_811_5072715276806685730/-mr-10000
POSTHOOK: query: SELECT dest1.* FROM dest1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@dest1
-POSTHOOK: Output: file:/tmp/nzhang/hive_2010-09-14_16-26-57_288_6309833081797050565/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-11-11_09-14-45_811_5072715276806685730/-mr-10000
POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -411,11 +411,11 @@ POSTHOOK: Lineage: dest3 PARTITION(ds=20
PREHOOK: query: SELECT dest2.* FROM dest2
PREHOOK: type: QUERY
PREHOOK: Input: default@dest2
-PREHOOK: Output: file:/tmp/nzhang/hive_2010-09-14_16-26-57_636_3380977971901935333/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-11_09-14-46_148_1275462776207288463/-mr-10000
POSTHOOK: query: SELECT dest2.* FROM dest2
POSTHOOK: type: QUERY
POSTHOOK: Input: default@dest2
-POSTHOOK: Output: file:/tmp/nzhang/hive_2010-09-14_16-26-57_636_3380977971901935333/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-11-11_09-14-46_148_1275462776207288463/-mr-10000
POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -529,11 +529,11 @@ POSTHOOK: Lineage: dest3 PARTITION(ds=20
PREHOOK: query: SELECT dest3.* FROM dest3
PREHOOK: type: QUERY
PREHOOK: Input: default@dest3@ds=2008-04-08/hr=12
-PREHOOK: Output: file:/tmp/nzhang/hive_2010-09-14_16-26-57_986_850981660460088519/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-11_09-14-46_464_8762219816637993727/-mr-10000
POSTHOOK: query: SELECT dest3.* FROM dest3
POSTHOOK: type: QUERY
POSTHOOK: Input: default@dest3@ds=2008-04-08/hr=12
-POSTHOOK: Output: file:/tmp/nzhang/hive_2010-09-14_16-26-57_986_850981660460088519/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-11-11_09-14-46_464_8762219816637993727/-mr-10000
POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
Modified: hive/trunk/ql/src/test/results/clientpositive/load_dyn_part1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part1.q.out?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/load_dyn_part1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/load_dyn_part1.q.out Fri Nov 12 00:11:24 2010
@@ -25,7 +25,7 @@ value string default
ds string
hr string
-Detailed Table Information Table(tableName:nzhang_part1, dbName:default, owner:null, createTime:1286798987, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/nzhang_part1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{EXTERNAL=FALSE, transient_lastDdlTime=128679898
7}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:nzhang_part1, dbName:default, owner:null, createTime:1289453198, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/nzhang_part1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{transient_lastDdlTime=1289453198}, viewOriginalText:null, vi
ewExpandedText:null, tableType:MANAGED_TABLE)
PREHOOK: query: explain
from srcpart
insert overwrite table nzhang_part1 partition (ds, hr) select key, value, ds, hr where ds <= '2008-04-08'
@@ -111,7 +111,7 @@ STAGE PLANS:
Move Operator
files:
hdfs directory: true
- destination: pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/scratchdir/hive_2010-10-11_05-09-47_279_3359280374458847412/-ext-10000
+ destination: pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-10_21-26-38_281_4710587136591193673/-ext-10000
Stage: Stage-0
Move Operator
@@ -132,7 +132,7 @@ STAGE PLANS:
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/scratchdir/hive_2010-10-11_05-09-47_279_3359280374458847412/-ext-10004
+ pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-10_21-26-38_281_4710587136591193673/-ext-10004
File Output Operator
compressed: false
GlobalTableId: 0
@@ -149,7 +149,7 @@ STAGE PLANS:
Move Operator
files:
hdfs directory: true
- destination: pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/scratchdir/hive_2010-10-11_05-09-47_279_3359280374458847412/-ext-10002
+ destination: pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-10_21-26-38_281_4710587136591193673/-ext-10002
Stage: Stage-1
Move Operator
@@ -170,7 +170,7 @@ STAGE PLANS:
Stage: Stage-8
Map Reduce
Alias -> Map Operator Tree:
- pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/scratchdir/hive_2010-10-11_05-09-47_279_3359280374458847412/-ext-10005
+ pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-10_21-26-38_281_4710587136591193673/-ext-10005
File Output Operator
compressed: false
GlobalTableId: 0
@@ -189,6 +189,8 @@ PREHOOK: Input: default@srcpart@ds=2008-
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+PREHOOK: Output: default@nzhang_part1
+PREHOOK: Output: default@nzhang_part2@ds=2008-12-31
POSTHOOK: query: from srcpart
insert overwrite table nzhang_part1 partition (ds, hr) select key, value, ds, hr where ds <= '2008-04-08'
insert overwrite table nzhang_part2 partition(ds='2008-12-31', hr) select key, value, hr where ds > '2008-04-08'
@@ -241,12 +243,11 @@ PREHOOK: query: select * from nzhang_par
PREHOOK: type: QUERY
PREHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=11
PREHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=12
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-09-55_752_5604915481568921978/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-27-03_627_6659113528230212839/-mr-10000
POSTHOOK: query: select * from nzhang_part1 where ds is not null and hr is not null
POSTHOOK: type: QUERY
POSTHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=11
POSTHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=12
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-09-55_752_5604915481568921978/-mr-10000
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -1259,12 +1260,11 @@ PREHOOK: query: select * from nzhang_par
PREHOOK: type: QUERY
PREHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=11
PREHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=12
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-09-55_992_218214971900824137/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-27-04_626_7101552493744042234/-mr-10000
POSTHOOK: query: select * from nzhang_part2 where ds is not null and hr is not null
POSTHOOK: type: QUERY
POSTHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=11
POSTHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=12
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-09-55_992_218214971900824137/-mr-10000
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part1 PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
Modified: hive/trunk/ql/src/test/results/clientpositive/load_dyn_part10.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part10.q.out?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/load_dyn_part10.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/load_dyn_part10.q.out Fri Nov 12 00:11:24 2010
@@ -20,7 +20,7 @@ value string default
ds string
hr string
-Detailed Table Information Table(tableName:nzhang_part10, dbName:default, owner:null, createTime:1288393073, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/nzhang_part10, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{transient_lastDdlTime=1288393073}, viewOriginalText:nul
l, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:nzhang_part10, dbName:default, owner:null, createTime:1289453228, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/nzhang_part10, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{transient_lastDdlTime=1289453228}, viewOriginalText:null,
viewExpandedText:null, tableType:MANAGED_TABLE)
PREHOOK: query: explain
from srcpart
insert overwrite table nzhang_part10 partition(ds='2008-12-31', hr) select key, value, hr where ds > '2008-04-08'
@@ -88,6 +88,7 @@ insert overwrite table nzhang_part10 par
PREHOOK: type: QUERY
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+PREHOOK: Output: default@nzhang_part10@ds=2008-12-31
POSTHOOK: query: from srcpart
insert overwrite table nzhang_part10 partition(ds='2008-12-31', hr) select key, value, hr where ds > '2008-04-08'
POSTHOOK: type: QUERY
@@ -113,12 +114,11 @@ PREHOOK: query: select * from nzhang_par
PREHOOK: type: QUERY
PREHOOK: Input: default@nzhang_part10@ds=2008-12-31/hr=11
PREHOOK: Input: default@nzhang_part10@ds=2008-12-31/hr=12
-PREHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-57-59_498_7952761244343054680/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-27-18_154_26666520749874874/-mr-10000
POSTHOOK: query: select * from nzhang_part10 where ds is not null and hr is not null
POSTHOOK: type: QUERY
POSTHOOK: Input: default@nzhang_part10@ds=2008-12-31/hr=11
POSTHOOK: Input: default@nzhang_part10@ds=2008-12-31/hr=12
-POSTHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-57-59_498_7952761244343054680/-mr-10000
POSTHOOK: Lineage: nzhang_part10 PARTITION(ds=2008-12-31,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part10 PARTITION(ds=2008-12-31,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part10 PARTITION(ds=2008-12-31,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
Modified: hive/trunk/ql/src/test/results/clientpositive/load_dyn_part11.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part11.q.out?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/load_dyn_part11.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/load_dyn_part11.q.out Fri Nov 12 00:11:24 2010
@@ -20,13 +20,14 @@ value string default
ds string
hr string
-Detailed Table Information Table(tableName:nzhang_part, dbName:default, owner:null, createTime:1286799666, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/nzhang_part, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{EXTERNAL=FALSE, transient_lastDdlTime=1286799666}
, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:nzhang_part, dbName:default, owner:null, createTime:1289453239, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/nzhang_part, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{transient_lastDdlTime=1289453239}, viewOriginalText:null, view
ExpandedText:null, tableType:MANAGED_TABLE)
PREHOOK: query: insert overwrite table nzhang_part partition (ds="2010-03-03", hr) select key, value, hr from srcpart where ds is not null and hr is not null
PREHOOK: type: QUERY
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+PREHOOK: Output: default@nzhang_part@ds=2010-03-03
POSTHOOK: query: insert overwrite table nzhang_part partition (ds="2010-03-03", hr) select key, value, hr from srcpart where ds is not null and hr is not null
POSTHOOK: type: QUERY
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
@@ -42,11 +43,10 @@ POSTHOOK: Lineage: nzhang_part PARTITION
PREHOOK: query: select * from nzhang_part where ds = '2010-03-03' and hr = '11'
PREHOOK: type: QUERY
PREHOOK: Input: default@nzhang_part@ds=2010-03-03/hr=11
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-21-11_272_2801336221684001752/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-27-36_014_3619304251939498199/-mr-10000
POSTHOOK: query: select * from nzhang_part where ds = '2010-03-03' and hr = '11'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@nzhang_part@ds=2010-03-03/hr=11
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-21-11_272_2801336221684001752/-mr-10000
POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-03-03,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-03-03,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-03-03,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -1054,11 +1054,10 @@ POSTHOOK: Lineage: nzhang_part PARTITION
PREHOOK: query: select * from nzhang_part where ds = '2010-03-03' and hr = '12'
PREHOOK: type: QUERY
PREHOOK: Input: default@nzhang_part@ds=2010-03-03/hr=12
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-21-11_487_7161669085122399362/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-27-36_621_1852795362976064707/-mr-10000
POSTHOOK: query: select * from nzhang_part where ds = '2010-03-03' and hr = '12'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@nzhang_part@ds=2010-03-03/hr=12
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-21-11_487_7161669085122399362/-mr-10000
POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-03-03,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-03-03,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-03-03,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
Modified: hive/trunk/ql/src/test/results/clientpositive/load_dyn_part12.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part12.q.out?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/load_dyn_part12.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/load_dyn_part12.q.out Fri Nov 12 00:11:24 2010
@@ -20,13 +20,14 @@ value string default
ds string
hr string
-Detailed Table Information Table(tableName:nzhang_part12, dbName:default, owner:null, createTime:1286800053, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/nzhang_part12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{EXTERNAL=FALSE, transient_lastDdlTime=1286800
053}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:nzhang_part12, dbName:default, owner:null, createTime:1289453258, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/nzhang_part12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{transient_lastDdlTime=1289453258}, viewOriginalText:null,
viewExpandedText:null, tableType:MANAGED_TABLE)
PREHOOK: query: insert overwrite table nzhang_part12 partition (ds="2010-03-03", hr) select key, value, cast(hr*2 as int) from srcpart where ds is not null and hr is not null
PREHOOK: type: QUERY
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+PREHOOK: Output: default@nzhang_part12@ds=2010-03-03
POSTHOOK: query: insert overwrite table nzhang_part12 partition (ds="2010-03-03", hr) select key, value, cast(hr*2 as int) from srcpart where ds is not null and hr is not null
POSTHOOK: type: QUERY
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
@@ -53,12 +54,11 @@ PREHOOK: query: select * from nzhang_par
PREHOOK: type: QUERY
PREHOOK: Input: default@nzhang_part12@ds=2010-03-03/hr=22
PREHOOK: Input: default@nzhang_part12@ds=2010-03-03/hr=24
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-27-38_551_8394516671014137879/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-27-49_907_1622008845309101203/-mr-10000
POSTHOOK: query: select * from nzhang_part12 where ds is not null and hr is not null
POSTHOOK: type: QUERY
POSTHOOK: Input: default@nzhang_part12@ds=2010-03-03/hr=22
POSTHOOK: Input: default@nzhang_part12@ds=2010-03-03/hr=24
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-27-38_551_8394516671014137879/-mr-10000
POSTHOOK: Lineage: nzhang_part12 PARTITION(ds=2010-03-03,hr=22).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part12 PARTITION(ds=2010-03-03,hr=22).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part12 PARTITION(ds=2010-03-03,hr=24).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
Modified: hive/trunk/ql/src/test/results/clientpositive/load_dyn_part13.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part13.q.out?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/load_dyn_part13.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/load_dyn_part13.q.out Fri Nov 12 00:11:24 2010
@@ -20,7 +20,7 @@ value string default
ds string
hr string
-Detailed Table Information Table(tableName:nzhang_part13, dbName:default, owner:null, createTime:1286798123, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/nzhang_part13, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{EXTERNAL=FALSE, transient_lastDdlTime=1286798
123}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:nzhang_part13, dbName:default, owner:null, createTime:1289453271, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/nzhang_part13, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{transient_lastDdlTime=1289453271}, viewOriginalText:null,
viewExpandedText:null, tableType:MANAGED_TABLE)
PREHOOK: query: explain
insert overwrite table nzhang_part13 partition (ds="2010-03-03", hr)
select * from (
@@ -160,6 +160,7 @@ select * from (
where key > 20 and key < 40) s
PREHOOK: type: QUERY
PREHOOK: Input: default@src
+PREHOOK: Output: default@nzhang_part13@ds=2010-03-03
POSTHOOK: query: insert overwrite table nzhang_part13 partition (ds="2010-03-03", hr)
select * from (
select key, value, '22'
@@ -191,12 +192,11 @@ PREHOOK: query: select * from nzhang_par
PREHOOK: type: QUERY
PREHOOK: Input: default@nzhang_part13@ds=2010-03-03/hr=22
PREHOOK: Input: default@nzhang_part13@ds=2010-03-03/hr=33
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_04-55-28_371_556231922920224175/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-28-04_222_1253169937473713651/-mr-10000
POSTHOOK: query: select * from nzhang_part13 where ds is not null and hr is not null
POSTHOOK: type: QUERY
POSTHOOK: Input: default@nzhang_part13@ds=2010-03-03/hr=22
POSTHOOK: Input: default@nzhang_part13@ds=2010-03-03/hr=33
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_04-55-28_371_556231922920224175/-mr-10000
POSTHOOK: Lineage: nzhang_part13 PARTITION(ds=2010-03-03,hr=22).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part13 PARTITION(ds=2010-03-03,hr=22).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part13 PARTITION(ds=2010-03-03,hr=33).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ]
Modified: hive/trunk/ql/src/test/results/clientpositive/load_dyn_part14.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part14.q.out?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/load_dyn_part14.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/load_dyn_part14.q.out Fri Nov 12 00:11:24 2010
@@ -12,7 +12,7 @@ POSTHOOK: type: DESCTABLE
key string
value string
-Detailed Table Information Table(tableName:nzhang_part14, dbName:default, owner:thiruvel, createTime:1286798715, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/test/data/warehouse/nzhang_part14, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:value, type:string, comment:null)], parameters:{transient_lastDdlTime=1286798715}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:nzhang_part14, dbName:default, owner:njain, createTime:1289453285, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/nzhang_part14, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:value, type:string, comment:null)], parameters:{transient_lastDdlTime=1289453285}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
PREHOOK: query: explain
insert overwrite table nzhang_part14 partition(value)
select key, value from (
@@ -83,7 +83,7 @@ STAGE PLANS:
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- file:/tmp/thiruvel/hive_2010-10-11_05-05-15_637_3528259232701729713/-mr-10002
+ file:/tmp/njain/hive_2010-11-10_21-28-06_008_7897968784053489362/-mr-10002
Union
Select Operator
expressions:
@@ -100,7 +100,7 @@ STAGE PLANS:
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: nzhang_part14
- file:/tmp/thiruvel/hive_2010-10-11_05-05-15_637_3528259232701729713/-mr-10004
+ file:/tmp/njain/hive_2010-11-10_21-28-06_008_7897968784053489362/-mr-10004
Union
Select Operator
expressions:
@@ -117,7 +117,7 @@ STAGE PLANS:
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: nzhang_part14
- file:/tmp/thiruvel/hive_2010-10-11_05-05-15_637_3528259232701729713/-mr-10005
+ file:/tmp/njain/hive_2010-11-10_21-28-06_008_7897968784053489362/-mr-10005
Union
Select Operator
expressions:
@@ -142,7 +142,7 @@ STAGE PLANS:
Move Operator
files:
hdfs directory: true
- destination: pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/scratchdir/hive_2010-10-11_05-05-15_637_3528259232701729713/-ext-10000
+ destination: pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-10_21-28-06_008_7897968784053489362/-ext-10000
Stage: Stage-0
Move Operator
@@ -162,7 +162,7 @@ STAGE PLANS:
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- pfile:/home/thiruvel/projects/hive/hive.unsecure/build/ql/scratchdir/hive_2010-10-11_05-05-15_637_3528259232701729713/-ext-10003
+ pfile:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-11-10_21-28-06_008_7897968784053489362/-ext-10003
File Output Operator
compressed: false
GlobalTableId: 0
@@ -247,6 +247,7 @@ select key, value from (
) T
PREHOOK: type: QUERY
PREHOOK: Input: default@src
+PREHOOK: Output: default@nzhang_part14
POSTHOOK: query: insert overwrite table nzhang_part14 partition(value)
select key, value from (
select 'k1' as key, cast(null as string) as value from src limit 2
@@ -274,13 +275,12 @@ order by key, value
PREHOOK: type: QUERY
PREHOOK: Input: default@nzhang_part14@value=
PREHOOK: Input: default@nzhang_part14@value=__HIVE_DEFAULT_PARTITION__
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-05-29_009_6719072167481153057/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-28-37_957_3443086512551503511/-mr-10000
POSTHOOK: query: select * from nzhang_part14 where value <> 'a'
order by key, value
POSTHOOK: type: QUERY
POSTHOOK: Input: default@nzhang_part14@value=
POSTHOOK: Input: default@nzhang_part14@value=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-05-29_009_6719072167481153057/-mr-10000
POSTHOOK: Lineage: nzhang_part14 PARTITION(value= ).key EXPRESSION []
POSTHOOK: Lineage: nzhang_part14 PARTITION(value=__HIVE_DEFAULT_PARTITION__).key EXPRESSION []
k1 __HIVE_DEFAULT_PARTITION__
Modified: hive/trunk/ql/src/test/results/clientpositive/load_dyn_part15.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part15.q.out?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/load_dyn_part15.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/load_dyn_part15.q.out Fri Nov 12 00:11:24 2010
@@ -13,6 +13,7 @@ PREHOOK: query: INSERT OVERWRITE TABLE l
SELECT key, part_key FROM src LATERAL VIEW explode(array("1","{2","3]")) myTable AS part_key
PREHOOK: type: QUERY
PREHOOK: Input: default@src
+PREHOOK: Output: default@load_dyn_part15_test
POSTHOOK: query: INSERT OVERWRITE TABLE load_dyn_part15_test PARTITION(part_key)
SELECT key, part_key FROM src LATERAL VIEW explode(array("1","{2","3]")) myTable AS part_key
POSTHOOK: type: QUERY
Modified: hive/trunk/ql/src/test/results/clientpositive/load_dyn_part2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part2.q.out?rev=1034219&r1=1034218&r2=1034219&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/load_dyn_part2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/load_dyn_part2.q.out Fri Nov 12 00:11:24 2010
@@ -16,7 +16,7 @@ value string
ds string
hr string
-Detailed Table Information Table(tableName:nzhang_part_bucket, dbName:default, owner:sdong, createTime:1288393145, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/sdong/www/hive-trunk/build/ql/test/data/warehouse/nzhang_part_bucket, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:10, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[key], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{transient_lastDdlTime=1288393145}, viewOriginal
Text:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:nzhang_part_bucket, dbName:default, owner:njain, createTime:1289453643, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:pfile:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/nzhang_part_bucket, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:10, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[key], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{transient_lastDdlTime=1289453643}, viewOriginalTex
t:null, viewExpandedText:null, tableType:MANAGED_TABLE)
PREHOOK: query: explain
insert overwrite table nzhang_part_bucket partition (ds='2010-03-23', hr) select key, value, hr from srcpart where ds is not null and hr is not null
PREHOOK: type: QUERY
@@ -98,6 +98,7 @@ PREHOOK: Input: default@srcpart@ds=2008-
PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+PREHOOK: Output: default@nzhang_part_bucket@ds=2010-03-23
POSTHOOK: query: insert overwrite table nzhang_part_bucket partition (ds='2010-03-23', hr) select key, value, hr from srcpart where ds is not null and hr is not null
POSTHOOK: type: QUERY
POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
@@ -123,11 +124,10 @@ ds=2010-03-23/hr=12
PREHOOK: query: select * from nzhang_part_bucket where ds='2010-03-23' and hr='11' order by key
PREHOOK: type: QUERY
PREHOOK: Input: default@nzhang_part_bucket@ds=2010-03-23/hr=11
-PREHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-59-19_519_8402831698340809056/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-34-22_267_1694715967576226862/-mr-10000
POSTHOOK: query: select * from nzhang_part_bucket where ds='2010-03-23' and hr='11' order by key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@nzhang_part_bucket@ds=2010-03-23/hr=11
-POSTHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-59-19_519_8402831698340809056/-mr-10000
POSTHOOK: Lineage: nzhang_part_bucket PARTITION(ds=2010-03-23,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part_bucket PARTITION(ds=2010-03-23,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part_bucket PARTITION(ds=2010-03-23,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -1135,11 +1135,10 @@ POSTHOOK: Lineage: nzhang_part_bucket PA
PREHOOK: query: select * from nzhang_part_bucket where ds='2010-03-23' and hr='12' order by key
PREHOOK: type: QUERY
PREHOOK: Input: default@nzhang_part_bucket@ds=2010-03-23/hr=12
-PREHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-59-25_376_748477169614908313/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-11-10_21-34-29_769_8327141585204390266/-mr-10000
POSTHOOK: query: select * from nzhang_part_bucket where ds='2010-03-23' and hr='12' order by key
POSTHOOK: type: QUERY
POSTHOOK: Input: default@nzhang_part_bucket@ds=2010-03-23/hr=12
-POSTHOOK: Output: file:/tmp/sdong/hive_2010-10-29_15-59-25_376_748477169614908313/-mr-10000
POSTHOOK: Lineage: nzhang_part_bucket PARTITION(ds=2010-03-23,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part_bucket PARTITION(ds=2010-03-23,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
POSTHOOK: Lineage: nzhang_part_bucket PARTITION(ds=2010-03-23,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]