You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by rm...@apache.org on 2009/09/29 03:25:30 UTC

svn commit: r819792 [14/24] - in /hadoop/hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/test/results/clientnegative/ contrib/src/test/results/clientpositive/ data/conf/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apa...

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart1.q.out Tue Sep 29 01:25:15 2009
@@ -1,37 +1,95 @@
-query: drop table hive_test_src
-query: drop table hive_test_dst
-query: create table hive_test_src ( col1 string ) stored as textfile
-query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
-query: create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile
-query: insert overwrite table hive_test_dst partition ( pcol1='test_part', pCol2='test_Part') select col1 from hive_test_src
-Input: default/hive_test_src
-Output: default/hive_test_dst/pcol1=test_part/pcol2=test_Part
-query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part'
-Input: default/hive_test_dst/pcol1=test_part/pcol2=test_Part
-Output: /Users/pchakka/workspace/oshive/ql/../build/ql/tmp/20877141/262522507.10000
+PREHOOK: query: drop table hive_test_src
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table hive_test_src
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table hive_test_dst
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table hive_test_dst
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table hive_test_src ( col1 string ) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table hive_test_src ( col1 string ) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@hive_test_src
+PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@hive_test_src
+PREHOOK: query: create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@hive_test_dst
+PREHOOK: query: insert overwrite table hive_test_dst partition ( pcol1='test_part', pCol2='test_Part') select col1 from hive_test_src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_src
+PREHOOK: Output: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
+POSTHOOK: query: insert overwrite table hive_test_dst partition ( pcol1='test_part', pCol2='test_Part') select col1 from hive_test_src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_src
+POSTHOOK: Output: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
+PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1322974919/10000
+POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1322974919/10000
 1	test_part	test_Part
 2	test_part	test_Part
 3	test_part	test_Part
 4	test_part	test_Part
 5	test_part	test_Part
 6	test_part	test_Part
-query: insert overwrite table hive_test_dst partition ( pCol1='test_part', pcol2='test_Part') select col1 from hive_test_src
-Input: default/hive_test_src
-Output: default/hive_test_dst/pcol1=test_part/pcol2=test_Part
-query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
-Output: /Users/pchakka/workspace/oshive/ql/../build/ql/tmp/1586437457/438136405.10000
-query: select * from hive_test_dst where pcol1='test_part'
-Input: default/hive_test_dst/pcol1=test_part/pcol2=test_Part
-Output: /Users/pchakka/workspace/oshive/ql/../build/ql/tmp/991232921/143300248.10000
+PREHOOK: query: insert overwrite table hive_test_dst partition ( pCol1='test_part', pcol2='test_Part') select col1 from hive_test_src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_src
+PREHOOK: Output: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
+POSTHOOK: query: insert overwrite table hive_test_dst partition ( pCol1='test_part', pcol2='test_Part') select col1 from hive_test_src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_src
+POSTHOOK: Output: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
+PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
+PREHOOK: type: QUERY
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/125968681/10000
+POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
+POSTHOOK: type: QUERY
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/125968681/10000
+PREHOOK: query: select * from hive_test_dst where pcol1='test_part'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1540188393/10000
+POSTHOOK: query: select * from hive_test_dst where pcol1='test_part'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1540188393/10000
 1	test_part	test_Part
 2	test_part	test_Part
 3	test_part	test_Part
 4	test_part	test_Part
 5	test_part	test_Part
 6	test_part	test_Part
-query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
-Output: /Users/pchakka/workspace/oshive/ql/../build/ql/tmp/118754797/315488459.10000
-query: select * from hive_test_dst where pcol1='test_Part'
-Output: /Users/pchakka/workspace/oshive/ql/../build/ql/tmp/27905267/502654394.10000
-query: drop table hive_test_src
-query: drop table hive_test_dst
+PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
+PREHOOK: type: QUERY
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/950500880/10000
+POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
+POSTHOOK: type: QUERY
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/950500880/10000
+PREHOOK: query: select * from hive_test_dst where pcol1='test_Part'
+PREHOOK: type: QUERY
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1398711311/10000
+POSTHOOK: query: select * from hive_test_dst where pcol1='test_Part'
+POSTHOOK: type: QUERY
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1398711311/10000
+PREHOOK: query: drop table hive_test_src
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table hive_test_src
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@hive_test_src
+PREHOOK: query: drop table hive_test_dst
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table hive_test_dst
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@hive_test_dst

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart_err.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart_err.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart_err.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart_err.q.out Tue Sep 29 01:25:15 2009
@@ -1,16 +1,37 @@
-query: DROP TABLE loadpart1
-query: CREATE TABLE loadpart1(a STRING, b STRING) PARTITIONED BY (ds STRING)
-query: INSERT OVERWRITE TABLE loadpart1 PARTITION (ds='2009-01-01')
+PREHOOK: query: DROP TABLE loadpart1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE loadpart1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE loadpart1(a STRING, b STRING) PARTITIONED BY (ds STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE loadpart1(a STRING, b STRING) PARTITIONED BY (ds STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@loadpart1
+PREHOOK: query: INSERT OVERWRITE TABLE loadpart1 PARTITION (ds='2009-01-01')
 SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
 FROM src
-Input: default/src
-Output: default/loadpart1/ds=2009-01-01
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@loadpart1@ds=2009-01-01
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
-query: DESCRIBE loadpart1
+PREHOOK: query: DESCRIBE loadpart1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE loadpart1
+POSTHOOK: type: DESCTABLE
 a	string	
 b	string	
 ds	string	
-query: SHOW PARTITIONS loadpart1
-FAILED: Error in semantic analysis: line 3:23 Invalid Path '../data1/files/kv1.txt': No files matching path file:/data/users/pchakka/workspace/oshive/data1/files/kv1.txt
-query: SHOW PARTITIONS loadpart1
-query: DROP TABLE loadpart1
+PREHOOK: query: SHOW PARTITIONS loadpart1
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS loadpart1
+POSTHOOK: type: SHOWPARTITIONS
+FAILED: Error in semantic analysis: line 3:23 Invalid Path '../data1/files/kv1.txt': No files matching path file:/data/users/njain/hive5/hive5/data1/files/kv1.txt
+PREHOOK: query: SHOW PARTITIONS loadpart1
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS loadpart1
+POSTHOOK: type: SHOWPARTITIONS
+PREHOOK: query: DROP TABLE loadpart1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE loadpart1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@loadpart1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/louter_join_ppr.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/louter_join_ppr.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/louter_join_ppr.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/louter_join_ppr.q.out Tue Sep 29 01:25:15 2009
@@ -1,4 +1,4 @@
-query: EXPLAIN EXTENDED
+PREHOOK: query: EXPLAIN EXTENDED
  FROM 
   src a
  LEFT OUTER JOIN 
@@ -6,6 +6,16 @@
  ON (a.key = b.key AND b.ds = '2008-04-08')
  SELECT a.key, a.value, b.key, b.value
  WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+ FROM 
+  src a
+ LEFT OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF src a) (TOK_TABREF srcpart b) (AND (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (= (. (TOK_TABLE_OR_COL b) ds) '2008-04-08')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25)))))
 
@@ -68,11 +78,11 @@
                       type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src [a]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src [a]
       Path -> Partition:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -91,10 +101,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -113,10 +123,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -131,7 +141,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
       Reduce Operator Tree:
@@ -161,7 +171,7 @@
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
-                directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1573242871/10001
+                directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/713935758/10001
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -175,17 +185,30 @@
       limit: -1
 
 
-query: FROM 
+PREHOOK: query: FROM 
+  src a
+ LEFT OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/129557767/10000
+POSTHOOK: query: FROM 
   src a
  LEFT OUTER JOIN 
   srcpart b 
  ON (a.key = b.key AND b.ds = '2008-04-08')
  SELECT a.key, a.value, b.key, b.value
  WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-Input: default/srcpart/ds=2008-04-08/hr=11
-Input: default/srcpart/ds=2008-04-08/hr=12
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/592926769/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/129557767/10000
 17	val_17	17	val_17
 17	val_17	17	val_17
 18	val_18	18	val_18
@@ -198,7 +221,7 @@
 18	val_18	18	val_18
 19	val_19	19	val_19
 19	val_19	19	val_19
-query: EXPLAIN EXTENDED
+PREHOOK: query: EXPLAIN EXTENDED
  FROM 
   srcpart a
  LEFT OUTER JOIN 
@@ -206,6 +229,16 @@
  ON (a.key = b.key AND a.ds = '2008-04-08')
  SELECT a.key, a.value, b.key, b.value
  WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+ FROM 
+  srcpart a
+ LEFT OUTER JOIN 
+  src b 
+ ON (a.key = b.key AND a.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF srcpart a) (TOK_TABREF src b) (AND (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25)))))
 
@@ -263,11 +296,11 @@
                         type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src [b]
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src [b]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
       Path -> Partition:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -282,10 +315,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -304,10 +337,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -326,7 +359,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
       Reduce Operator Tree:
@@ -356,7 +389,7 @@
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
-                directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/614472149/10001
+                directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/2009208674/10001
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -370,17 +403,30 @@
       limit: -1
 
 
-query: FROM 
+PREHOOK: query: FROM 
+  srcpart a
+ LEFT OUTER JOIN 
+  src b 
+ ON (a.key = b.key AND a.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/765098920/10000
+POSTHOOK: query: FROM 
   srcpart a
  LEFT OUTER JOIN 
   src b 
  ON (a.key = b.key AND a.ds = '2008-04-08')
  SELECT a.key, a.value, b.key, b.value
  WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-Input: default/src
-Input: default/srcpart/ds=2008-04-08/hr=11
-Input: default/srcpart/ds=2008-04-08/hr=12
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1850755626/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/765098920/10000
 17	val_17	17	val_17
 17	val_17	17	val_17
 18	val_18	18	val_18
@@ -393,7 +439,7 @@
 18	val_18	18	val_18
 19	val_19	19	val_19
 19	val_19	19	val_19
-query: EXPLAIN EXTENDED
+PREHOOK: query: EXPLAIN EXTENDED
  FROM 
   src a
  LEFT OUTER JOIN 
@@ -401,6 +447,16 @@
  ON (a.key = b.key)
  SELECT a.key, a.value, b.key, b.value
  WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+ FROM 
+  src a
+ LEFT OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF src a) (TOK_TABREF srcpart b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25)) (= (. (TOK_TABLE_OR_COL b) ds) '2008-04-08')))))
 
@@ -455,13 +511,13 @@
                       type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [b]
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [b]
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src [a]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [b]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [b]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [b]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 [b]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src [a]
       Path -> Partition:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -480,10 +536,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -502,10 +558,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 
           Partition
             partition values:
               ds 2008-04-09
@@ -524,10 +580,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 
           Partition
             partition values:
               ds 2008-04-09
@@ -546,10 +602,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -564,7 +620,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
       Reduce Operator Tree:
@@ -594,7 +650,7 @@
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
-                directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/302010175/10001
+                directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1431088692/10001
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -608,19 +664,34 @@
       limit: -1
 
 
-query: FROM 
+PREHOOK: query: FROM 
+  src a
+ LEFT OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1274589662/10000
+POSTHOOK: query: FROM 
   src a
  LEFT OUTER JOIN 
   srcpart b 
  ON (a.key = b.key)
  SELECT a.key, a.value, b.key, b.value
  WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-Input: default/srcpart/ds=2008-04-08/hr=11
-Input: default/srcpart/ds=2008-04-08/hr=12
-Input: default/srcpart/ds=2008-04-09/hr=11
-Input: default/srcpart/ds=2008-04-09/hr=12
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/1342874033/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1274589662/10000
 17	val_17	17	val_17
 17	val_17	17	val_17
 18	val_18	18	val_18
@@ -633,7 +704,7 @@
 18	val_18	18	val_18
 19	val_19	19	val_19
 19	val_19	19	val_19
-query: EXPLAIN EXTENDED
+PREHOOK: query: EXPLAIN EXTENDED
  FROM 
   srcpart a
  LEFT OUTER JOIN 
@@ -641,6 +712,16 @@
  ON (a.key = b.key)
  SELECT a.key, a.value, b.key, b.value
  WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+ FROM 
+  srcpart a
+ LEFT OUTER JOIN 
+  src b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08'
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF srcpart a) (TOK_TABREF src b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25)) (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08')))))
 
@@ -695,11 +776,11 @@
                       type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src [b]
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src [b]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a]
       Path -> Partition:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -714,10 +795,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/src
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -736,10 +817,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -758,7 +839,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
       Reduce Operator Tree:
@@ -788,7 +869,7 @@
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
-                directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/864297430/10001
+                directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/946460772/10001
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -802,17 +883,30 @@
       limit: -1
 
 
-query: FROM 
+PREHOOK: query: FROM 
+  srcpart a
+ LEFT OUTER JOIN 
+  src b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1267063046/10000
+POSTHOOK: query: FROM 
   srcpart a
  LEFT OUTER JOIN 
   src b 
  ON (a.key = b.key)
  SELECT a.key, a.value, b.key, b.value
  WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND a.ds = '2008-04-08'
-Input: default/src
-Input: default/srcpart/ds=2008-04-08/hr=11
-Input: default/srcpart/ds=2008-04-08/hr=12
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/872577564/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1267063046/10000
 17	val_17	17	val_17
 17	val_17	17	val_17
 18	val_18	18	val_18

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out Tue Sep 29 01:25:15 2009
@@ -1,11 +1,24 @@
-query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (tkey, ten, one, tvalue)
 DISTRIBUTE BY tvalue, tkey
 SORT BY ten, one
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
+SORT BY ten, one
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ten)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL one)))))
 
@@ -91,17 +104,32 @@
               name: dest1
 
 
-query: FROM src
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
+SORT BY ten, one
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (tkey, ten, one, tvalue)
 DISTRIBUTE BY tvalue, tkey
 SORT BY ten, one
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/10231498/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/39861038/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/39861038/10000
 0	0	0	val_0
 0	0	0	val_0
 0	0	0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out Tue Sep 29 01:25:15 2009
@@ -1,10 +1,22 @@
-query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (tkey, ten, one, tvalue)
 DISTRIBUTE BY tvalue, tkey
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey))))
 
@@ -85,16 +97,30 @@
               name: dest1
 
 
-query: FROM src
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (tkey, ten, one, tvalue)
 DISTRIBUTE BY tvalue, tkey
-Input: default/src
-Output: default/dest1
-query: SELECT * FROM (SELECT dest1.* FROM dest1 DISTRIBUTE BY key SORT BY key, ten, one, value) T
-Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1983063438/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT * FROM (SELECT dest1.* FROM dest1 DISTRIBUTE BY key SORT BY key, ten, one, value) T
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1870907070/10000
+POSTHOOK: query: SELECT * FROM (SELECT dest1.* FROM dest1 DISTRIBUTE BY key SORT BY key, ten, one, value) T
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1870907070/10000
 0	0	0	val_0
 0	0	0	val_0
 0	0	0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out Tue Sep 29 01:25:15 2009
@@ -1,10 +1,22 @@
-query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (tkey, ten, one, tvalue)
 SORT BY tvalue, tkey
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+SORT BY tvalue, tkey
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tvalue)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tkey)))))
 
@@ -85,16 +97,30 @@
               name: dest1
 
 
-query: FROM src
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+SORT BY tvalue, tkey
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (tkey, ten, one, tvalue)
 SORT BY tvalue, tkey
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1887854554/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1933944424/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1933944424/10000
 0	0	0	val_0
 0	0	0	val_0
 0	0	0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out Tue Sep 29 01:25:15 2009
@@ -1,11 +1,24 @@
-query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (tkey, ten, one, tvalue)
 DISTRIBUTE BY tvalue, tkey
 SORT BY ten DESC, one ASC
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
+SORT BY ten DESC, one ASC
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC (TOK_TABLE_OR_COL ten)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL one)))))
 
@@ -91,17 +104,32 @@
               name: dest1
 
 
-query: FROM src
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (tkey, ten, one, tvalue)
+DISTRIBUTE BY tvalue, tkey
+SORT BY ten DESC, one ASC
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (tkey, ten, one, tvalue)
 DISTRIBUTE BY tvalue, tkey
 SORT BY ten DESC, one ASC
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1395191210/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/664955225/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/664955225/10000
 90	9	0	val_90
 90	9	0	val_90
 90	9	0	val_90

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out Tue Sep 29 01:25:15 2009
@@ -1,10 +1,22 @@
-query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src
 INSERT OVERWRITE TABLE dest1
 SELECT src.key as c1, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value as c4
 DISTRIBUTE BY c4, c1
 SORT BY c2 DESC, c3 ASC
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+SELECT src.key as c1, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value as c4
+DISTRIBUTE BY c4, c1
+SORT BY c2 DESC, c3 ASC
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) c2) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL c4) (TOK_TABLE_OR_COL c1)) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC (TOK_TABLE_OR_COL c2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL c3)))))
 
@@ -85,16 +97,30 @@
               name: dest1
 
 
-query: FROM src
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1
+SELECT src.key as c1, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value as c4
+DISTRIBUTE BY c4, c1
+SORT BY c2 DESC, c3 ASC
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1
 SELECT src.key as c1, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value as c4
 DISTRIBUTE BY c4, c1
 SORT BY c2 DESC, c3 ASC
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/1487693767/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2004793034/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2004793034/10000
 490	49	0	val_490
 491	49	1	val_491
 492	49	2	val_492

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce6.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce6.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce6.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce6.q.out Tue Sep 29 01:25:15 2009
@@ -1,10 +1,22 @@
-query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src
 INSERT OVERWRITE TABLE dest1
 SELECT src.key, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value
 DISTRIBUTE BY value, key
 SORT BY c2 DESC, c3 ASC
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+SELECT src.key, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value
+DISTRIBUTE BY value, key
+SORT BY c2 DESC, c3 ASC
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) c2) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL value) (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC (TOK_TABLE_OR_COL c2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL c3)))))
 
@@ -85,16 +97,30 @@
               name: dest1
 
 
-query: FROM src
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1
+SELECT src.key, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value
+DISTRIBUTE BY value, key
+SORT BY c2 DESC, c3 ASC
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1
 SELECT src.key, CAST(src.key / 10 AS INT) as c2, CAST(src.key % 10 AS INT) as c3, src.value
 DISTRIBUTE BY value, key
 SORT BY c2 DESC, c3 ASC
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/117785863/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/492905310/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/492905310/10000
 490	49	0	val_490
 491	49	1	val_491
 492	49	2	val_492

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce7.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce7.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce7.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce7.q.out Tue Sep 29 01:25:15 2009
@@ -1,10 +1,22 @@
-query: CREATE TABLE dest1(k STRING, v STRING, key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(k STRING, v STRING, key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(k STRING, v STRING, key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
 SORT BY tvalue, tkey
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
+SORT BY tvalue, tkey
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_ALLCOLREF src) (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST k v tkey ten one tvalue)))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tvalue)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tkey)))))
 
@@ -97,16 +109,30 @@
               name: dest1
 
 
-query: FROM src
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
+SORT BY tvalue, tkey
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
 SORT BY tvalue, tkey
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1990174559/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/950002812/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/950002812/10000
 0	val_0	0	0	0	val_0
 0	val_0	0	0	0	val_0
 0	val_0	0	0	0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce8.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce8.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce8.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce8.q.out Tue Sep 29 01:25:15 2009
@@ -1,11 +1,24 @@
-query: CREATE TABLE dest1(k STRING, v STRING, key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(k STRING, v STRING, key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(k STRING, v STRING, key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
 DISTRIBUTE BY rand(3)
 SORT BY tvalue, tkey
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
+DISTRIBUTE BY rand(3)
+SORT BY tvalue, tkey
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_ALLCOLREF src) (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST k v tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_FUNCTION rand 3)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tvalue)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tkey)))))
 
@@ -101,17 +114,32 @@
               name: dest1
 
 
-query: FROM src
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1
+MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
+USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
+DISTRIBUTE BY rand(3)
+SORT BY tvalue, tkey
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1
 MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
 USING '/bin/cat' AS (k, v, tkey, ten, one, tvalue)
 DISTRIBUTE BY rand(3)
 SORT BY tvalue, tkey
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/2052535127/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1419210118/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1419210118/10000
 0	val_0	0	0	0	val_0
 0	val_0	0	0	0	val_0
 0	val_0	0	0	0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/merge1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/merge1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/merge1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/merge1.q.out Tue Sep 29 01:25:15 2009
@@ -1,8 +1,20 @@
-query: drop table dest1
-query: create table dest1(key int, val int)
-query: explain
+PREHOOK: query: drop table dest1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table dest1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table dest1(key int, val int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table dest1(key int, val int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: explain
 insert overwrite table dest1
 select key, count(1) from src group by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+insert overwrite table dest1
+select key, count(1) from src group by key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (TOK_TABLE_OR_COL key))))
 
@@ -81,10 +93,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1134406044/10000
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/1186355480/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2072613560/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/443335059/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -118,13 +130,24 @@
               name: dest1
 
 
-query: insert overwrite table dest1
+PREHOOK: query: insert overwrite table dest1
+select key, count(1) from src group by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: insert overwrite table dest1
 select key, count(1) from src group by key
-Input: default/src
-Output: default/dest1
-query: select * from dest1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1979784812/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: select * from dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1168542666/10000
+POSTHOOK: query: select * from dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1168542666/10000
 0	3
 10	1
 100	2
@@ -434,4 +457,8 @@
 96	1
 97	2
 98	2
-query: drop table dest1
+PREHOOK: query: drop table dest1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table dest1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@dest1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/no_hooks.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/no_hooks.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/no_hooks.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/no_hooks.q.out Tue Sep 29 01:25:15 2009
@@ -1,3 +1,6 @@
+POSTHOOK: query: EXPLAIN
+SELECT *  FROM src src1 JOIN src src2 WHERE src1.key < 10 and src2.key < 10 SORT BY src1.key, src1.value, src2.key, src2.value
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src src1) (TOK_TABREF src src2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (< (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src2) key) 10))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src1) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL src2) value)))))
 
@@ -73,7 +76,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/2027777065/10002 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/816280242/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -109,6 +112,10 @@
       limit: -1
 
 
+POSTHOOK: query: SELECT *  FROM src src1 JOIN src src2 WHERE src1.key < 10 and src2.key < 10 SORT BY src1.key, src1.value, src2.key, src2.value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/642135428/10000
 0	val_0	0	val_0
 0	val_0	0	val_0
 0	val_0	0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/noalias_subq1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/noalias_subq1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/noalias_subq1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/noalias_subq1.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT c1 FROM (select value as c1, key as c2 from src) x where c2 < 100
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT c1 FROM (select value as c1, key as c2 from src) x where c2 < 100
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL value) c1) (TOK_SELEXPR (TOK_TABLE_OR_COL key) c2)))) x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL c1))) (TOK_WHERE (< (TOK_TABLE_OR_COL c2) 100))))
 
@@ -46,9 +50,14 @@
       limit: -1
 
 
-query: SELECT c1 FROM (select value as c1, key as c2 from src) x where c2 < 100
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/1738791810/10000
+PREHOOK: query: SELECT c1 FROM (select value as c1, key as c2 from src) x where c2 < 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/991930201/10000
+POSTHOOK: query: SELECT c1 FROM (select value as c1, key as c2 from src) x where c2 < 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/991930201/10000
 val_86
 val_27
 val_98

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias1.q.out Tue Sep 29 01:25:15 2009
@@ -1,7 +1,16 @@
-query: CREATE TABLE dest1(dummy STRING, key INT, value DOUBLE) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(dummy STRING, key INT, value DOUBLE) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(dummy STRING, key INT, value DOUBLE) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src
 INSERT OVERWRITE TABLE dest1 SELECT '1234', key, count(1) WHERE src.key < 100 group by key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT '1234', key, count(1) WHERE src.key < 100 group by key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR '1234') (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)) (TOK_GROUPBY (TOK_TABLE_OR_COL key))))
 
@@ -96,13 +105,24 @@
               name: dest1
 
 
-query: FROM src
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT '1234', key, count(1) WHERE src.key < 100 group by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1 SELECT '1234', key, count(1) WHERE src.key < 100 group by key
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/766222009/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1113537326/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1113537326/10000
 1234	0	3.0
 1234	10	1.0
 1234	11	1.0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias2.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias2.q.out Tue Sep 29 01:25:15 2009
@@ -1,7 +1,16 @@
-query: CREATE TABLE dest1(dummy STRING, key INT, value DOUBLE) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(dummy STRING, key INT, value DOUBLE) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(dummy STRING, key INT, value DOUBLE) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src
 INSERT OVERWRITE TABLE dest1 SELECT '1234', src.key, count(1) WHERE key < 100 group by src.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT '1234', src.key, count(1) WHERE key < 100 group by src.key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR '1234') (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 100)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key))))
 
@@ -96,13 +105,24 @@
               name: dest1
 
 
-query: FROM src
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT '1234', src.key, count(1) WHERE key < 100 group by src.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1 SELECT '1234', src.key, count(1) WHERE key < 100 group by src.key
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/1541116592/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/18460415/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/18460415/10000
 1234	0	3.0
 1234	10	1.0
 1234	11	1.0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/null_column.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/null_column.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/null_column.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/null_column.q.out Tue Sep 29 01:25:15 2009
@@ -1,52 +1,119 @@
-query: drop table temp_null
-query: drop table tt
-query: drop table tt_b
-query: create table temp_null(a int) stored as textfile
-query: load data local inpath '../data/files/test.dat' overwrite into table temp_null
-query: select null, null from temp_null
-Input: default/temp_null
-Output: file:/data/users/nzhang/work/734/734-trunk-apache-hive/build/ql/tmp/2083094836/10000
+PREHOOK: query: drop table temp_null
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table temp_null
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table tt
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tt
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table tt_b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tt_b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table temp_null(a int) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table temp_null(a int) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@temp_null
+PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table temp_null
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table temp_null
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@temp_null
+PREHOOK: query: select null, null from temp_null
+PREHOOK: type: QUERY
+PREHOOK: Input: default@temp_null
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1637717703/10000
+POSTHOOK: query: select null, null from temp_null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@temp_null
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1637717703/10000
 NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
-query: create table tt(a int, b string)
-query: insert overwrite table tt select null, null from temp_null
-Input: default/temp_null
-Output: default/tt
-query: select * from tt
-Input: default/tt
-Output: file:/data/users/nzhang/work/734/734-trunk-apache-hive/build/ql/tmp/2090243542/10000
+PREHOOK: query: create table tt(a int, b string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tt(a int, b string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tt
+PREHOOK: query: insert overwrite table tt select null, null from temp_null
+PREHOOK: type: QUERY
+PREHOOK: Input: default@temp_null
+PREHOOK: Output: default@tt
+POSTHOOK: query: insert overwrite table tt select null, null from temp_null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@temp_null
+POSTHOOK: Output: default@tt
+PREHOOK: query: select * from tt
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tt
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/974669501/10000
+POSTHOOK: query: select * from tt
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tt
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/974669501/10000
 NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
-query: create table tt_b(a int, b string) row format serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe"
-query: insert overwrite table tt_b select null, null from temp_null
-Input: default/temp_null
-Output: default/tt_b
-query: select * from tt_b
-Input: default/tt_b
-Output: file:/data/users/nzhang/work/734/734-trunk-apache-hive/build/ql/tmp/1191541403/10000
+PREHOOK: query: create table tt_b(a int, b string) row format serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe"
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tt_b(a int, b string) row format serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe"
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tt_b
+PREHOOK: query: insert overwrite table tt_b select null, null from temp_null
+PREHOOK: type: QUERY
+PREHOOK: Input: default@temp_null
+PREHOOK: Output: default@tt_b
+POSTHOOK: query: insert overwrite table tt_b select null, null from temp_null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@temp_null
+POSTHOOK: Output: default@tt_b
+PREHOOK: query: select * from tt_b
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tt_b
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1955123010/10000
+POSTHOOK: query: select * from tt_b
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tt_b
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1955123010/10000
 NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
 NULL	NULL
-query: insert overwrite directory "../build/ql/test/data/warehouse/null_columns.out" select null, null from temp_null
-Input: default/temp_null
-Output: ../build/ql/test/data/warehouse/null_columns.out
+PREHOOK: query: insert overwrite directory "../build/ql/test/data/warehouse/null_columns.out" select null, null from temp_null
+PREHOOK: type: QUERY
+PREHOOK: Input: default@temp_null
+PREHOOK: Output: ../build/ql/test/data/warehouse/null_columns.out
+POSTHOOK: query: insert overwrite directory "../build/ql/test/data/warehouse/null_columns.out" select null, null from temp_null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@temp_null
+POSTHOOK: Output: ../build/ql/test/data/warehouse/null_columns.out
 \N\N
 \N\N
 \N\N
 \N\N
 \N\N
 \N\N
-query: drop table tt
-query: drop table tt_b
-query: drop table temp_null
+PREHOOK: query: drop table tt
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tt
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@tt
+PREHOOK: query: drop table tt_b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tt_b
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@tt_b
+PREHOOK: query: drop table temp_null
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table temp_null
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@temp_null

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: explain
+PREHOOK: query: explain
 select count(1) from src x where x.key > 9999
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select count(1) from src x where x.key > 9999
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999))))
 
@@ -57,12 +61,21 @@
       limit: -1
 
 
-query: select count(1) from src x where x.key > 9999
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/1836686316/10000
+PREHOOK: query: select count(1) from src x where x.key > 9999
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/306583345/10000
+POSTHOOK: query: select count(1) from src x where x.key > 9999
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/306583345/10000
 0
-query: explain
+PREHOOK: query: explain
+select count(1) from src x where x.key > 9999
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
 select count(1) from src x where x.key > 9999
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999))))
 
@@ -120,12 +133,21 @@
       limit: -1
 
 
-query: select count(1) from src x where x.key > 9999
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/1953617237/10000
+PREHOOK: query: select count(1) from src x where x.key > 9999
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/975282783/10000
+POSTHOOK: query: select count(1) from src x where x.key > 9999
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/975282783/10000
 0
-query: explain
+PREHOOK: query: explain
 select count(1) from src x where x.key > 9999
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select count(1) from src x where x.key > 9999
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999))))
 
@@ -175,7 +197,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/181284866/10002 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/321134863/10002 
             Reduce Output Operator
               sort order: 
               tag: -1
@@ -205,12 +227,21 @@
       limit: -1
 
 
-query: select count(1) from src x where x.key > 9999
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/2085444097/10000
+PREHOOK: query: select count(1) from src x where x.key > 9999
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1134965962/10000
+POSTHOOK: query: select count(1) from src x where x.key > 9999
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1134965962/10000
 0
-query: explain
+PREHOOK: query: explain
+select count(1) from src x where x.key > 9999
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
 select count(1) from src x where x.key > 9999
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x) key) 9999))))
 
@@ -263,7 +294,12 @@
       limit: -1
 
 
-query: select count(1) from src x where x.key > 9999
-Input: default/src
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/244114908/10000
+PREHOOK: query: select count(1) from src x where x.key > 9999
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2046297028/10000
+POSTHOOK: query: select count(1) from src x where x.key > 9999
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2046297028/10000
 0