You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by rm...@apache.org on 2009/09/29 03:25:30 UTC

svn commit: r819792 [9/24] - in /hadoop/hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/test/results/clientnegative/ contrib/src/test/results/clientpositive/ data/conf/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apac...

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input3_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input3_limit.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input3_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input3_limit.q.out Tue Sep 29 01:25:15 2009
@@ -1,11 +1,37 @@
-query: DROP TABLE T1
-query: CREATE TABLE T1(key STRING, value STRING) STORED AS TEXTFILE
-query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
-query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T1
-query: DROP TABLE T2
-query: CREATE TABLE T2(key STRING, value STRING)
-query: EXPLAIN 
+PREHOOK: query: DROP TABLE T1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE T1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE T1(key STRING, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE T1(key STRING, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+PREHOOK: type: LOAD
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@t1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T1
+PREHOOK: type: LOAD
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T1
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@t1
+PREHOOK: query: DROP TABLE T2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE T2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE T2(key STRING, value STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE T2(key STRING, value STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@T2
+PREHOOK: query: EXPLAIN 
 INSERT OVERWRITE TABLE T2 SELECT * FROM (SELECT * FROM T1 DISTRIBUTE BY key SORT BY key, value) T LIMIT 20
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN 
+INSERT OVERWRITE TABLE T2 SELECT * FROM (SELECT * FROM T1 DISTRIBUTE BY key SORT BY key, value) T LIMIT 20
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF T1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) T)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB T2)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 20)))
 
@@ -64,7 +90,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1953025922/10002 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/1571190278/10002 
             Reduce Output Operator
               sort order: 
               tag: -1
@@ -96,12 +122,22 @@
               name: t2
 
 
-query: INSERT OVERWRITE TABLE T2 SELECT * FROM (SELECT * FROM T1 DISTRIBUTE BY key SORT BY key, value) T LIMIT 20
-Input: default/t1
-Output: default/t2
-query: SELECT * FROM T2 SORT BY key, value
-Input: default/t2
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/741107383/10000
+PREHOOK: query: INSERT OVERWRITE TABLE T2 SELECT * FROM (SELECT * FROM T1 DISTRIBUTE BY key SORT BY key, value) T LIMIT 20
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+PREHOOK: Output: default@t2
+POSTHOOK: query: INSERT OVERWRITE TABLE T2 SELECT * FROM (SELECT * FROM T1 DISTRIBUTE BY key SORT BY key, value) T LIMIT 20
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+POSTHOOK: Output: default@t2
+PREHOOK: query: SELECT * FROM T2 SORT BY key, value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t2
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2048970411/10000
+POSTHOOK: query: SELECT * FROM T2 SORT BY key, value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t2
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2048970411/10000
 0	val_0
 0	val_0
 0	val_0
@@ -122,5 +158,13 @@
 104	val_104
 104	val_105
 104	val_105
-query: DROP TABLE T1
-query: DROP TABLE T2
+PREHOOK: query: DROP TABLE T1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE T1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@t1
+PREHOOK: query: DROP TABLE T2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE T2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@t2

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input4.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input4.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input4.q.out Tue Sep 29 01:25:15 2009
@@ -1,6 +1,14 @@
-query: CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@INPUT4
+PREHOOK: query: EXPLAIN
 LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
+PREHOOK: type: LOAD
+POSTHOOK: query: EXPLAIN
+LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
+POSTHOOK: type: LOAD
 ABSTRACT SYNTAX TREE:
   (TOK_LOAD '../data/files/kv1.txt' (TOK_TAB INPUT4) LOCAL)
 
@@ -11,8 +19,8 @@
 STAGE PLANS:
   Stage: Stage-0
     Copy
-      source: file:/data/users/njain/hive4/hive4/data/files/kv1.txt
-      destination: file:/data/users/njain/hive4/hive4/build/ql/tmp/1119122844/10000
+      source: file:/data/users/njain/hive5/hive5/data/files/kv1.txt
+      destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/1092488123/10000
 
   Stage: Stage-1
     Move Operator
@@ -25,10 +33,19 @@
               name: input4
 
 
-query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
-query: SELECT INPUT4.VALUE, INPUT4.KEY FROM INPUT4
-Input: default/input4
-Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/486784140/10000
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
+PREHOOK: type: LOAD
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@input4
+PREHOOK: query: SELECT INPUT4.VALUE, INPUT4.KEY FROM INPUT4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@input4
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/78834933/10000
+POSTHOOK: query: SELECT INPUT4.VALUE, INPUT4.KEY FROM INPUT4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@input4
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/78834933/10000
 val_238	238
 val_86	86
 val_311	311
@@ -529,4 +546,8 @@
 val_400	400
 val_200	200
 val_97	97
-query: DROP TABLE INPUT4
+PREHOOK: query: DROP TABLE INPUT4
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE INPUT4
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@input4

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input40.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input40.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input40.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input40.q.out Tue Sep 29 01:25:15 2009
@@ -1,10 +1,29 @@
-query: drop table tmp_insert_test
-query: drop table tmp_insert_test_p
-query: create table tmp_insert_test (key string, value string) stored as textfile
-query: load data local inpath '../data/files/kv1.txt' into table tmp_insert_test
-query: select * from tmp_insert_test
-Input: default/tmp_insert_test
-Output: file:/data/users/njain/hive_commit4/hive_commit4/build/ql/tmp/207650189/10000
+PREHOOK: query: drop table tmp_insert_test
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tmp_insert_test
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table tmp_insert_test_p
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tmp_insert_test_p
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tmp_insert_test (key string, value string) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tmp_insert_test (key string, value string) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp_insert_test
+PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table tmp_insert_test
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table tmp_insert_test
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tmp_insert_test
+PREHOOK: query: select * from tmp_insert_test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tmp_insert_test
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/304023366/10000
+POSTHOOK: query: select * from tmp_insert_test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tmp_insert_test
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/304023366/10000
 238	val_238
 86	val_86
 311	val_311
@@ -505,11 +524,24 @@
 400	val_400
 200	val_200
 97	val_97
-query: create table tmp_insert_test_p (key string, value string) partitioned by (ds string) stored as textfile
-query: load data local inpath '../data/files/kv1.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
-query: select * from tmp_insert_test_p where ds= '2009-08-01'
-Input: default/tmp_insert_test_p/ds=2009-08-01
-Output: file:/data/users/njain/hive_commit4/hive_commit4/build/ql/tmp/25985999/10000
+PREHOOK: query: create table tmp_insert_test_p (key string, value string) partitioned by (ds string) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tmp_insert_test_p (key string, value string) partitioned by (ds string) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp_insert_test_p
+PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tmp_insert_test_p@ds=2009-08-01
+PREHOOK: query: select * from tmp_insert_test_p where ds= '2009-08-01'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tmp_insert_test_p@ds=2009-08-01
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1730852318/10000
+POSTHOOK: query: select * from tmp_insert_test_p where ds= '2009-08-01'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tmp_insert_test_p@ds=2009-08-01
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1730852318/10000
 238	val_238	2009-08-01
 86	val_86	2009-08-01
 311	val_311	2009-08-01
@@ -1010,10 +1042,19 @@
 400	val_400	2009-08-01
 200	val_200	2009-08-01
 97	val_97	2009-08-01
-query: load data local inpath '../data/files/kv2.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
-query: select * from tmp_insert_test_p where ds= '2009-08-01'
-Input: default/tmp_insert_test_p/ds=2009-08-01
-Output: file:/data/users/njain/hive_commit4/hive_commit4/build/ql/tmp/461674796/10000
+PREHOOK: query: load data local inpath '../data/files/kv2.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/kv2.txt' into table tmp_insert_test_p partition (ds = '2009-08-01')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tmp_insert_test_p@ds=2009-08-01
+PREHOOK: query: select * from tmp_insert_test_p where ds= '2009-08-01'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tmp_insert_test_p@ds=2009-08-01
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1125925200/10000
+POSTHOOK: query: select * from tmp_insert_test_p where ds= '2009-08-01'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tmp_insert_test_p@ds=2009-08-01
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1125925200/10000
 238	val_238	2009-08-01
 86	val_86	2009-08-01
 311	val_311	2009-08-01
@@ -2014,5 +2055,13 @@
 227	val_228	2009-08-01
 395	val_396	2009-08-01
 244	val_245	2009-08-01
-query: drop table tmp_insert_test
-query: drop table tmp_insert_test_p
+PREHOOK: query: drop table tmp_insert_test
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tmp_insert_test
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@tmp_insert_test
+PREHOOK: query: drop table tmp_insert_test_p
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tmp_insert_test_p
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@tmp_insert_test_p

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input41.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input41.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input41.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input41.q.out Tue Sep 29 01:25:15 2009
@@ -1,19 +1,37 @@
-query: select * from 
+PREHOOK: query: select * from 
   (select count(1) from src 
     union all
    select count(1) from srcpart where ds = '2009-08-09'
   )x
-Input: default/src
-Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/607183026/10000
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/576939637/10000
+POSTHOOK: query: select * from 
+  (select count(1) from src 
+    union all
+   select count(1) from srcpart where ds = '2009-08-09'
+  )x
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/576939637/10000
 500
 0
-query: select * from 
+PREHOOK: query: select * from 
+  (select * from src 
+    union all
+   select * from srcpart where ds = '2009-08-09'
+  )x
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/563082762/10000
+POSTHOOK: query: select * from 
   (select * from src 
     union all
    select * from srcpart where ds = '2009-08-09'
   )x
-Input: default/src
-Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/329948857/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/563082762/10000
 238	val_238
 86	val_86
 311	val_311

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input42.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input42.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input42.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input42.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: explain extended
+PREHOOK: query: explain extended
 select * from srcpart a where a.ds='2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from srcpart a where a.ds='2008-04-08'
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08'))))
 
@@ -12,10 +16,16 @@
       limit: -1
 
 
-query: select * from srcpart a where a.ds='2008-04-08'
-Input: default/srcpart/ds=2008-04-08/hr=11
-Input: default/srcpart/ds=2008-04-08/hr=12
-Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/2090238290/10000
+PREHOOK: query: select * from srcpart a where a.ds='2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1981400750/10000
+POSTHOOK: query: select * from srcpart a where a.ds='2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1981400750/10000
 238	val_238	2008-04-08	11
 86	val_86	2008-04-08	11
 311	val_311	2008-04-08	11
@@ -1016,8 +1026,12 @@
 400	val_400	2008-04-08	12
 200	val_200	2008-04-08	12
 97	val_97	2008-04-08	12
-query: explain extended
+PREHOOK: query: explain extended
+select * from srcpart a where a.ds='2008-04-08' and key < 200
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
 select * from srcpart a where a.ds='2008-04-08' and key < 200
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08') (< (TOK_TABLE_OR_COL key) 200)))))
 
@@ -1056,7 +1070,7 @@
                   File Output Operator
                     compressed: false
                     GlobalTableId: 0
-                    directory: file:/data/users/njain/hive1/hive1/build/ql/tmp/1636549090/10001
+                    directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1664046852/10001
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1066,10 +1080,10 @@
                           columns.types string:string:string:string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
       Path -> Partition:
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -1088,10 +1102,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -1110,7 +1124,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
 
@@ -1118,10 +1132,16 @@
     Fetch Operator
       limit: -1
 
-query: select * from srcpart a where a.ds='2008-04-08' and key < 200
-Input: default/srcpart/ds=2008-04-08/hr=11
-Input: default/srcpart/ds=2008-04-08/hr=12
-Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/2029733644/10000
+PREHOOK: query: select * from srcpart a where a.ds='2008-04-08' and key < 200
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/200140260/10000
+POSTHOOK: query: select * from srcpart a where a.ds='2008-04-08' and key < 200
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/200140260/10000
 86	val_86	2008-04-08	11
 27	val_27	2008-04-08	11
 165	val_165	2008-04-08	11
@@ -1500,8 +1520,12 @@
 90	val_90	2008-04-08	12
 169	val_169	2008-04-08	12
 97	val_97	2008-04-08	12
-query: explain extended
+PREHOOK: query: explain extended
+select * from srcpart a where a.ds='2008-04-08' and rand(100) < 0.1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
 select * from srcpart a where a.ds='2008-04-08' and rand(100) < 0.1
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08') (< (TOK_FUNCTION rand 100) 0.1)))))
 
@@ -1535,7 +1559,7 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/njain/hive1/hive1/build/ql/tmp/1619555939/10001
+                  directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/56577408/10001
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1545,10 +1569,10 @@
                         columns.types string:string:string:string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
       Path -> Partition:
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -1567,10 +1591,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -1589,7 +1613,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
 
@@ -1598,10 +1622,16 @@
       limit: -1
 
 
-query: select * from srcpart a where a.ds='2008-04-08' and rand(100) < 0.1
-Input: default/srcpart/ds=2008-04-08/hr=11
-Input: default/srcpart/ds=2008-04-08/hr=12
-Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/774585255/10000
+PREHOOK: query: select * from srcpart a where a.ds='2008-04-08' and rand(100) < 0.1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1684443155/10000
+POSTHOOK: query: select * from srcpart a where a.ds='2008-04-08' and rand(100) < 0.1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1684443155/10000
 145	val_145	2008-04-08	11
 417	val_417	2008-04-08	11
 292	val_292	2008-04-08	11

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input4_cb_delim.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input4_cb_delim.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input4_cb_delim.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input4_cb_delim.q.out Tue Sep 29 01:25:15 2009
@@ -1,8 +1,21 @@
-query: CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012' STORED AS TEXTFILE
-query: LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB
-query: SELECT INPUT4_CB.VALUE, INPUT4_CB.KEY FROM INPUT4_CB
-Input: default/input4_cb
-Output: /data/users/athusoo/commits/hive_trunk_ws8/ql/../build/ql/tmp/45272155/119966499.10000
+PREHOOK: query: CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012' STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012' STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@INPUT4_CB
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB
+PREHOOK: type: LOAD
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@input4_cb
+PREHOOK: query: SELECT INPUT4_CB.VALUE, INPUT4_CB.KEY FROM INPUT4_CB
+PREHOOK: type: QUERY
+PREHOOK: Input: default@input4_cb
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/767747881/10000
+POSTHOOK: query: SELECT INPUT4_CB.VALUE, INPUT4_CB.KEY FROM INPUT4_CB
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@input4_cb
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/767747881/10000
 val_238	238
 val_86	86
 val_311	311
@@ -503,4 +516,8 @@
 val_400	400
 val_200	200
 val_97	97
-query: DROP TABLE INPUT4_CB
+PREHOOK: query: DROP TABLE INPUT4_CB
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE INPUT4_CB
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@input4_cb

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input4_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input4_limit.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input4_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input4_limit.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: explain
+PREHOOK: query: explain
 select * from src sort by key limit 10
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from src sort by key limit 10
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key))) (TOK_LIMIT 10)))
 
@@ -46,7 +50,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1741200337/10002 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/1763489725/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -73,9 +77,14 @@
       limit: 10
 
 
-query: select * from src sort by key limit 10
-Input: default/src
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/385052408/10000
+PREHOOK: query: select * from src sort by key limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1576676537/10000
+POSTHOOK: query: select * from src sort by key limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1576676537/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM (
   FROM src_thrift
   SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
@@ -7,6 +11,16 @@
   CLUSTER BY tkey 
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
+         USING '/bin/cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src_thrift) lint) (. (TOK_TABLE_OR_COL src_thrift) lintstring)) TOK_SERDE TOK_RECORDWRITER '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))
 
@@ -76,18 +90,34 @@
               name: dest1
 
 
-query: FROM (
+PREHOOK: query: FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
+         USING '/bin/cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM (
   FROM src_thrift
   SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
          USING '/bin/cat' AS (tkey, tvalue) 
   CLUSTER BY tkey 
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
-Input: default/src_thrift
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/690928213/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/532588189/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/532588189/10000
 [0,0,0]	[{"myint":0,"mystring":"0","underscore_int":0}]
 [1,2,3]	[{"myint":1,"mystring":"1","underscore_int":1}]
 [2,4,6]	[{"myint":4,"mystring":"8","underscore_int":2}]

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input6.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input6.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input6.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input6.q.out Tue Sep 29 01:25:15 2009
@@ -1,7 +1,16 @@
-query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src1
 INSERT OVERWRITE TABLE dest1 SELECT src1.key, src1.value WHERE src1.key is null
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src1.value WHERE src1.key is null
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src1)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value))) (TOK_WHERE (TOK_FUNCTION TOK_ISNULL (. (TOK_TABLE_OR_COL src1) key)))))
 
@@ -47,10 +56,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2028160907/10000
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/841012245/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/886482304/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/1344327188/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -84,10 +93,21 @@
               name: dest1
 
 
-query: FROM src1
+PREHOOK: query: FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src1.value WHERE src1.key is null
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src1
 INSERT OVERWRITE TABLE dest1 SELECT src1.key, src1.value WHERE src1.key is null
-Input: default/src1
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2074725714/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1210473188/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1210473188/10000

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input7.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input7.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input7.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input7.q.out Tue Sep 29 01:25:15 2009
@@ -1,7 +1,16 @@
-query: CREATE TABLE dest1(c1 DOUBLE, c2 INT) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(c1 DOUBLE, c2 INT) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(c1 DOUBLE, c2 INT) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src1
 INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src1)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR TOK_NULL) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)))))
 
@@ -46,10 +55,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1105502435/10000
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/907345585/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1662408362/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/796567644/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -83,13 +92,24 @@
               name: dest1
 
 
-query: FROM src1
+PREHOOK: query: FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src1
 INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key
-Input: default/src1
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/136163075/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1205611603/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1205611603/10000
 NULL	238
 NULL	NULL
 NULL	311

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input8.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input8.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input8.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input8.q.out Tue Sep 29 01:25:15 2009
@@ -1,7 +1,16 @@
-query: CREATE TABLE dest1(c1 STRING, c2 INT, c3 DOUBLE) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(c1 STRING, c2 INT, c3 DOUBLE) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(c1 STRING, c2 INT, c3 DOUBLE) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src1 
 INSERT OVERWRITE TABLE dest1 SELECT 4 + NULL, src1.key - NULL, NULL + NULL
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src1 
+INSERT OVERWRITE TABLE dest1 SELECT 4 + NULL, src1.key - NULL, NULL + NULL
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src1)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (+ 4 TOK_NULL)) (TOK_SELEXPR (- (. (TOK_TABLE_OR_COL src1) key) TOK_NULL)) (TOK_SELEXPR (+ TOK_NULL TOK_NULL)))))
 
@@ -50,10 +59,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/124487201/10000
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/539046228/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/473256518/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/1447024085/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -89,13 +98,24 @@
               name: dest1
 
 
-query: FROM src1 
+PREHOOK: query: FROM src1 
+INSERT OVERWRITE TABLE dest1 SELECT 4 + NULL, src1.key - NULL, NULL + NULL
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src1 
 INSERT OVERWRITE TABLE dest1 SELECT 4 + NULL, src1.key - NULL, NULL + NULL
-Input: default/src1
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/963034629/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1333594748/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1333594748/10000
 NULL	NULL	NULL
 NULL	NULL	NULL
 NULL	NULL	NULL

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out Tue Sep 29 01:25:15 2009
@@ -1,7 +1,16 @@
-query: CREATE TABLE dest1(value STRING, key INT) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(value STRING, key INT) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(value STRING, key INT) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src1
 INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key where NULL = NULL
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key where NULL = NULL
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src1)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR TOK_NULL) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key))) (TOK_WHERE (= TOK_NULL TOK_NULL))))
 
@@ -54,10 +63,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1618745236/10000
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/345938132/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1286113627/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/709063265/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -91,10 +100,21 @@
               name: dest1
 
 
-query: FROM src1
+PREHOOK: query: FROM src1
+INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key where NULL = NULL
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src1
 INSERT OVERWRITE TABLE dest1 SELECT NULL, src1.key where NULL = NULL
-Input: default/src1
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1674136764/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/132002118/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/132002118/10000

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input_columnarserde.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input_columnarserde.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input_columnarserde.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input_columnarserde.q.out Tue Sep 29 01:25:15 2009
@@ -1,13 +1,30 @@
-query: drop table input_columnarserde
-query: CREATE TABLE input_columnarserde(a array<int>, b array<string>, c map<string,string>, d int, e string)
+PREHOOK: query: drop table input_columnarserde
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table input_columnarserde
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE input_columnarserde(a array<int>, b array<string>, c map<string,string>, d int, e string)
 ROW FORMAT SERDE
   'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
 STORED AS
   INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
   OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'
-query: EXPLAIN
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE input_columnarserde(a array<int>, b array<string>, c map<string,string>, d int, e string)
+ROW FORMAT SERDE
+  'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+STORED AS
+  INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@input_columnarserde
+PREHOOK: query: EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE input_columnarserde SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
 FROM src_thrift
 INSERT OVERWRITE TABLE input_columnarserde SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB input_columnarserde)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) mstringstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) aint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) astring))) (TOK_DISTRIBUTEBY 1)))
 
@@ -74,13 +91,24 @@
               name: input_columnarserde
 
 
-query: FROM src_thrift
+PREHOOK: query: FROM src_thrift
+INSERT OVERWRITE TABLE input_columnarserde SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@input_columnarserde
+POSTHOOK: query: FROM src_thrift
 INSERT OVERWRITE TABLE input_columnarserde SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
-Input: default/src_thrift
-Output: default/input_columnarserde
-query: SELECT input_columnarserde.* FROM input_columnarserde DISTRIBUTE BY 1
-Input: default/input_columnarserde
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1695989715/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@input_columnarserde
+PREHOOK: query: SELECT input_columnarserde.* FROM input_columnarserde DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@input_columnarserde
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/851213096/10000
+POSTHOOK: query: SELECT input_columnarserde.* FROM input_columnarserde DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@input_columnarserde
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/851213096/10000
 [0,0,0]	["0","0","0"]	{"key_0":"value_0"}	1712634731	record_0
 [1,2,3]	["10","100","1000"]	{"key_1":"value_1"}	465985200	record_1
 [2,4,6]	["20","200","2000"]	{"key_2":"value_2"}	-751827638	record_2
@@ -92,9 +120,14 @@
 [8,16,24]	["80","800","8000"]	{"key_8":"value_8"}	1638581578	record_8
 [9,18,27]	["90","900","9000"]	{"key_9":"value_9"}	336964413	record_9
 null	null	{}	0	NULL
-query: SELECT input_columnarserde.a[0], input_columnarserde.b[0], input_columnarserde.c['key2'], input_columnarserde.d, input_columnarserde.e FROM input_columnarserde DISTRIBUTE BY 1
-Input: default/input_columnarserde
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1755799278/10000
+PREHOOK: query: SELECT input_columnarserde.a[0], input_columnarserde.b[0], input_columnarserde.c['key2'], input_columnarserde.d, input_columnarserde.e FROM input_columnarserde DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@input_columnarserde
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/722662209/10000
+POSTHOOK: query: SELECT input_columnarserde.a[0], input_columnarserde.b[0], input_columnarserde.c['key2'], input_columnarserde.d, input_columnarserde.e FROM input_columnarserde DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@input_columnarserde
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/722662209/10000
 0	0	NULL	1712634731	record_0
 1	10	NULL	465985200	record_1
 2	20	NULL	-751827638	record_2
@@ -106,4 +139,8 @@
 8	80	NULL	1638581578	record_8
 9	90	NULL	336964413	record_9
 NULL	NULL	NULL	0	NULL
-query: drop table input_columnarserde
+PREHOOK: query: drop table input_columnarserde
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table input_columnarserde
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@input_columnarserde

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input_dynamicserde.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input_dynamicserde.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input_dynamicserde.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input_dynamicserde.q.out Tue Sep 29 01:25:15 2009
@@ -1,13 +1,28 @@
-query: CREATE TABLE dest1(a array<int>, b array<string>, c map<string,string>, d int, e string)
+PREHOOK: query: CREATE TABLE dest1(a array<int>, b array<string>, c map<string,string>, d int, e string)
 ROW FORMAT DELIMITED
 FIELDS TERMINATED BY '1'
 COLLECTION ITEMS TERMINATED BY '2'
 MAP KEYS TERMINATED BY '3'
 LINES TERMINATED BY '10'
 STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(a array<int>, b array<string>, c map<string,string>, d int, e string)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '1'
+COLLECTION ITEMS TERMINATED BY '2'
+MAP KEYS TERMINATED BY '3'
+LINES TERMINATED BY '10'
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
 FROM src_thrift
 INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) mstringstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) aint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) astring)))))
 
@@ -51,10 +66,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1925142463/10000
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/1462054105/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1140895264/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/2096286623/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -94,13 +109,24 @@
               name: dest1
 
 
-query: FROM src_thrift
+PREHOOK: query: FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src_thrift
 INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring
-Input: default/src_thrift
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/842958925/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1569256194/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1569256194/10000
 [0,0,0]	["0","0","0"]	{"key_0":"value_0"}	1712634731	record_0
 [1,2,3]	["10","100","1000"]	{"key_1":"value_1"}	465985200	record_1
 [2,4,6]	["20","200","2000"]	{"key_2":"value_2"}	-751827638	record_2
@@ -112,9 +138,14 @@
 [8,16,24]	["80","800","8000"]	{"key_8":"value_8"}	1638581578	record_8
 [9,18,27]	["90","900","9000"]	{"key_9":"value_9"}	336964413	record_9
 null	null	null	0	NULL
-query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1145493551/10000
+PREHOOK: query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/632615537/10000
+POSTHOOK: query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/632615537/10000
 0	0	NULL	1712634731	record_0
 1	10	NULL	465985200	record_1
 2	20	NULL	-751827638	record_2

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input_lazyserde.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input_lazyserde.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input_lazyserde.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input_lazyserde.q.out Tue Sep 29 01:25:15 2009
@@ -1,13 +1,28 @@
-query: CREATE TABLE dest1(a array<int>, b array<string>, c map<string,string>, d int, e string)
+PREHOOK: query: CREATE TABLE dest1(a array<int>, b array<string>, c map<string,string>, d int, e string)
 ROW FORMAT DELIMITED
 FIELDS TERMINATED BY '1'
 COLLECTION ITEMS TERMINATED BY '2'
 MAP KEYS TERMINATED BY '3'
 LINES TERMINATED BY '10'
 STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(a array<int>, b array<string>, c map<string,string>, d int, e string)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '1'
+COLLECTION ITEMS TERMINATED BY '2'
+MAP KEYS TERMINATED BY '3'
+LINES TERMINATED BY '10'
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
 FROM src_thrift
 INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) lstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) mstringstring)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) aint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src_thrift) astring))) (TOK_DISTRIBUTEBY 1)))
 
@@ -74,13 +89,24 @@
               name: dest1
 
 
-query: FROM src_thrift
+PREHOOK: query: FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src_thrift
 INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring DISTRIBUTE BY 1
-Input: default/src_thrift
-Output: default/dest1
-query: SELECT dest1.* FROM dest1 DISTRIBUTE BY 1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/54447048/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1 DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/4821465/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1 DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/4821465/10000
 [0,0,0]	["0","0","0"]	{"key_0":"value_0"}	1712634731	record_0
 [1,2,3]	["10","100","1000"]	{"key_1":"value_1"}	465985200	record_1
 [2,4,6]	["20","200","2000"]	{"key_2":"value_2"}	-751827638	record_2
@@ -92,9 +118,14 @@
 [8,16,24]	["80","800","8000"]	{"key_8":"value_8"}	1638581578	record_8
 [9,18,27]	["90","900","9000"]	{"key_9":"value_9"}	336964413	record_9
 null	null	null	0	NULL
-query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1 DISTRIBUTE BY 1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1845091515/10000
+PREHOOK: query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1 DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/670788177/10000
+POSTHOOK: query: SELECT dest1.a[0], dest1.b[0], dest1.c['key2'], dest1.d, dest1.e FROM dest1 DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/670788177/10000
 0	0	NULL	1712634731	record_0
 1	10	NULL	465985200	record_1
 2	20	NULL	-751827638	record_2
@@ -106,14 +137,32 @@
 8	80	NULL	1638581578	record_8
 9	90	NULL	336964413	record_9
 NULL	NULL	NULL	0	NULL
-query: DROP TABLE dest1
-query: CREATE TABLE dest1(a array<int>) ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' ESCAPED BY '\\'
-query: INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint FROM src_thrift DISTRIBUTE BY 1
-Input: default/src_thrift
-Output: default/dest1
-query: SELECT * from dest1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/482165346/10000
+PREHOOK: query: DROP TABLE dest1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE dest1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: CREATE TABLE dest1(a array<int>) ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' ESCAPED BY '\\'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(a array<int>) ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' ESCAPED BY '\\'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint FROM src_thrift DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@dest1
+POSTHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint FROM src_thrift DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT * from dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1650408145/10000
+POSTHOOK: query: SELECT * from dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1650408145/10000
 [0,0,0]
 [1,2,3]
 [2,4,6]
@@ -125,14 +174,32 @@
 [8,16,24]
 [9,18,27]
 null
-query: DROP TABLE dest1
-query: CREATE TABLE dest1(a map<string,string>) ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' ESCAPED BY '\\'
-query: INSERT OVERWRITE TABLE dest1 SELECT src_thrift.mstringstring FROM src_thrift DISTRIBUTE BY 1
-Input: default/src_thrift
-Output: default/dest1
-query: SELECT * from dest1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/244883137/10000
+PREHOOK: query: DROP TABLE dest1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE dest1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: CREATE TABLE dest1(a map<string,string>) ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' ESCAPED BY '\\'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(a map<string,string>) ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' ESCAPED BY '\\'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT src_thrift.mstringstring FROM src_thrift DISTRIBUTE BY 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@dest1
+POSTHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT src_thrift.mstringstring FROM src_thrift DISTRIBUTE BY 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT * from dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/944903832/10000
+POSTHOOK: query: SELECT * from dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/944903832/10000
 {"key_0":"value_0"}
 {"key_1":"value_1"}
 {"key_2":"value_2"}
@@ -144,4 +211,8 @@
 {"key_8":"value_8"}
 {"key_9":"value_9"}
 null
-query: DROP TABLE dest1
+PREHOOK: query: DROP TABLE dest1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE dest1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@dest1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input_limit.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input_limit.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT x.* FROM SRC x LIMIT 20
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT x.* FROM SRC x LIMIT 20
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_LIMIT 20)))
 
@@ -12,9 +16,14 @@
       limit: 20
 
 
-query: SELECT x.* FROM SRC x LIMIT 20
-Input: default/src
-Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/197174171/10000
+PREHOOK: query: SELECT x.* FROM SRC x LIMIT 20
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/569213864/10000
+POSTHOOK: query: SELECT x.* FROM SRC x LIMIT 20
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/569213864/10000
 238	val_238
 86	val_86
 311	val_311

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part0.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part0.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part0.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part0.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08'
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF SRCPART x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) ds) '2008-04-08'))))
 
@@ -12,10 +16,16 @@
       limit: -1
 
 
-query: SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08'
-Input: default/srcpart/ds=2008-04-08/hr=11
-Input: default/srcpart/ds=2008-04-08/hr=12
-Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/299966053/10000
+PREHOOK: query: SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1391476561/10000
+POSTHOOK: query: SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1391476561/10000
 238	val_238	2008-04-08	11
 86	val_86	2008-04-08	11
 311	val_311	2008-04-08	11

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part1.q.out Tue Sep 29 01:25:15 2009
@@ -1,7 +1,16 @@
-query: CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING) STORED AS TEXTFILE
-query: EXPLAIN EXTENDED
+PREHOOK: query: CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN EXTENDED
 FROM srcpart
 INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12'
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+FROM srcpart
+INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12'
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) hr)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) ds))) (TOK_WHERE (and (and (< (. (TOK_TABLE_OR_COL srcpart) key) 100) (= (. (TOK_TABLE_OR_COL srcpart) ds) '2008-04-08')) (= (. (TOK_TABLE_OR_COL srcpart) hr) '12')))))
 
@@ -52,7 +61,7 @@
                     File Output Operator
                       compressed: false
                       GlobalTableId: 1
-                      directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1871117581/10002
+                      directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/991135310/10002
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -66,14 +75,14 @@
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             file.inputformat org.apache.hadoop.mapred.TextInputFormat
                             file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1
+                            location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest1
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: dest1
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [srcpart]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [srcpart]
       Path -> Partition:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -92,7 +101,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
 
@@ -102,11 +111,11 @@
           Move Operator
             files:
                 hdfs directory: true
-                source: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1871117581/10002
-                destination: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1967034913/10000
+                source: file:/data/users/njain/hive5/hive5/build/ql/tmp/991135310/10002
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/85899454/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1871117581/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/991135310/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -124,9 +133,9 @@
                           type: string
             Needs Tagging: false
             Path -> Alias:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1871117581/10002 [file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1871117581/10002]
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/991135310/10002 [file:/data/users/njain/hive5/hive5/build/ql/tmp/991135310/10002]
             Path -> Partition:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1871117581/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/991135310/10002 
                 Partition
                 
                     input format: org.apache.hadoop.mapred.TextInputFormat
@@ -141,7 +150,7 @@
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       file.inputformat org.apache.hadoop.mapred.TextInputFormat
                       file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1
+                      location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest1
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
             Reduce Operator Tree:
@@ -149,7 +158,7 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1967034913/10000
+                  directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/85899454/10000
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -162,7 +171,7 @@
                         bucket_count -1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                        location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1
+                        location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest1
                         file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest1
@@ -171,7 +180,7 @@
     Move Operator
       tables:
           replace: true
-          source: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1967034913/10000
+          source: file:/data/users/njain/hive5/hive5/build/ql/tmp/85899454/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -185,19 +194,30 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/dest1
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest1
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest1
-          tmp directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1967034913/10001
+          tmp directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/85899454/10001
 
 
-query: FROM srcpart
+PREHOOK: query: FROM srcpart
+INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM srcpart
 INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12'
-Input: default/srcpart/ds=2008-04-08/hr=12
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/75595335/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2078857073/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2078857073/10000
 86	val_86	12	2008-04-08
 27	val_27	12	2008-04-08
 98	val_98	12	2008-04-08