You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/08/30 08:44:53 UTC

svn commit: r1621416 [9/11] - in /hive/branches/spark: ./ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/test/results/clientnegative/ contrib/src/test/results/clientpositive/ hbase-handler/src/te...

Modified: hive/branches/spark/ql/src/test/results/clientpositive/parquet_ctas.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/parquet_ctas.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/parquet_ctas.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/parquet_ctas.q.out Sat Aug 30 06:44:46 2014
@@ -39,9 +39,12 @@ POSTHOOK: Lineage: staging.value SIMPLE 
 PREHOOK: query: create table parquet_ctas stored as parquet as select * from staging
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@staging
+PREHOOK: Output: database:default
+PREHOOK: Output: default@parquet_ctas
 POSTHOOK: query: create table parquet_ctas stored as parquet as select * from staging
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@staging
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@parquet_ctas
 PREHOOK: query: describe parquet_ctas
 PREHOOK: type: DESCTABLE
@@ -72,9 +75,12 @@ POSTHOOK: Input: default@parquet_ctas
 PREHOOK: query: create table parquet_ctas_advanced stored as parquet as select key+1,concat(value,"value") from staging
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@staging
+PREHOOK: Output: database:default
+PREHOOK: Output: default@parquet_ctas_advanced
 POSTHOOK: query: create table parquet_ctas_advanced stored as parquet as select key+1,concat(value,"value") from staging
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@staging
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@parquet_ctas_advanced
 PREHOOK: query: describe parquet_ctas_advanced
 PREHOOK: type: DESCTABLE
@@ -105,9 +111,12 @@ POSTHOOK: Input: default@parquet_ctas_ad
 PREHOOK: query: create table parquet_ctas_alias stored as parquet as select key+1 as mykey,concat(value,"value") as myvalue from staging
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@staging
+PREHOOK: Output: database:default
+PREHOOK: Output: default@parquet_ctas_alias
 POSTHOOK: query: create table parquet_ctas_alias stored as parquet as select key+1 as mykey,concat(value,"value") as myvalue from staging
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@staging
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@parquet_ctas_alias
 PREHOOK: query: describe parquet_ctas_alias
 PREHOOK: type: DESCTABLE
@@ -138,9 +147,12 @@ POSTHOOK: Input: default@parquet_ctas_al
 PREHOOK: query: create table parquet_ctas_mixed stored as parquet as select key,key+1,concat(value,"value") as myvalue from staging
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@staging
+PREHOOK: Output: database:default
+PREHOOK: Output: default@parquet_ctas_mixed
 POSTHOOK: query: create table parquet_ctas_mixed stored as parquet as select key,key+1,concat(value,"value") as myvalue from staging
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@staging
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@parquet_ctas_mixed
 PREHOOK: query: describe parquet_ctas_mixed
 PREHOOK: type: DESCTABLE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/parquet_join.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/parquet_join.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/parquet_join.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/parquet_join.q.out Sat Aug 30 06:44:46 2014
@@ -39,16 +39,22 @@ POSTHOOK: Lineage: staging.value SIMPLE 
 PREHOOK: query: create table parquet_jointable1 stored as parquet as select * from staging
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@staging
+PREHOOK: Output: database:default
+PREHOOK: Output: default@parquet_jointable1
 POSTHOOK: query: create table parquet_jointable1 stored as parquet as select * from staging
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@staging
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@parquet_jointable1
 PREHOOK: query: create table parquet_jointable2 stored as parquet as select key,key+1,concat(value,"value") as myvalue from staging
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@staging
+PREHOOK: Output: database:default
+PREHOOK: Output: default@parquet_jointable2
 POSTHOOK: query: create table parquet_jointable2 stored as parquet as select key,key+1,concat(value,"value") as myvalue from staging
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@staging
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@parquet_jointable2
 PREHOOK: query: -- MR join
 

Modified: hive/branches/spark/ql/src/test/results/clientpositive/partition_decode_name.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/partition_decode_name.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/partition_decode_name.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/partition_decode_name.q.out Sat Aug 30 06:44:46 2014
@@ -6,6 +6,8 @@ from (select '2011-01-11', '2011-01-11+1
       select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sc
 POSTHOOK: query: create table sc as select * 
 from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows)
       union all 
@@ -14,6 +16,7 @@ from (select '2011-01-11', '2011-01-11+1
       select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@sc
 PREHOOK: query: create table sc_part (key string) partitioned by (ts string) stored as rcfile
 PREHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/partition_special_char.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/partition_special_char.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/partition_special_char.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/partition_special_char.q.out Sat Aug 30 06:44:46 2014
@@ -6,6 +6,8 @@ from (select '2011-01-11', '2011-01-11+1
       select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sc
 POSTHOOK: query: create table sc as select * 
 from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows)
       union all 
@@ -14,6 +16,7 @@ from (select '2011-01-11', '2011-01-11+1
       select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@sc
 PREHOOK: query: create table sc_part (key string) partitioned by (ts string) stored as rcfile
 PREHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/ppd_field_garbage.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/ppd_field_garbage.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/ppd_field_garbage.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/ppd_field_garbage.q.out Sat Aug 30 06:44:46 2014
@@ -11,9 +11,12 @@ POSTHOOK: Output: default@test_issue
 PREHOOK: query: CREATE VIEW v_test_issue AS SELECT fileid, i.user, test_c.user_c.age FROM test_issue LATERAL VIEW explode(infos) info AS i
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@test_issue
+PREHOOK: Output: database:default
+PREHOOK: Output: default@v_test_issue
 POSTHOOK: query: CREATE VIEW v_test_issue AS SELECT fileid, i.user, test_c.user_c.age FROM test_issue LATERAL VIEW explode(infos) info AS i
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@test_issue
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@v_test_issue
 PREHOOK: query: -- dummy data
 LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE test_issue

Modified: hive/branches/spark/ql/src/test/results/clientpositive/ppd_union_view.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/ppd_union_view.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/ppd_union_view.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/ppd_union_view.q.out Sat Aug 30 06:44:46 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/ptf.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/ptf.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/ptf.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/ptf.q.out Sat Aug 30 06:44:46 2014
@@ -772,6 +772,8 @@ from part 
 group by p_mfgr, p_brand
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@part
+PREHOOK: Output: database:default
+PREHOOK: Output: default@mfgr_price_view
 POSTHOOK: query: -- 16. testViewAsTableInputToPTF
 create view IF NOT EXISTS mfgr_price_view as 
 select p_mfgr, p_brand, 
@@ -780,6 +782,7 @@ from part 
 group by p_mfgr, p_brand
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@part
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@mfgr_price_view
 PREHOOK: query: select p_mfgr, p_brand, s, 
 sum(s) over w1  as s1

Modified: hive/branches/spark/ql/src/test/results/clientpositive/query_result_fileformat.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/query_result_fileformat.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/query_result_fileformat.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/query_result_fileformat.q.out Sat Aug 30 06:44:46 2014
@@ -4,12 +4,15 @@ PREHOOK: query: create table nzhang_test
 http://asdf' value from src limit 1
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@nzhang_test1
 POSTHOOK: query: create table nzhang_test1 stored as sequencefile as select 'key1' as key, 'value
 1
 
 http://asdf' value from src limit 1
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@nzhang_test1
 PREHOOK: query: select * from nzhang_test1
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/quotedid_basic.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/quotedid_basic.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/quotedid_basic.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/quotedid_basic.q.out Sat Aug 30 06:44:46 2014
@@ -359,12 +359,15 @@ select `x+1```, `y&y`
 from t4 where `x+1``` < '200'
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@t4
+PREHOOK: Output: database:default
+PREHOOK: Output: default@v1
 POSTHOOK: query: -- view
 create view v1 as 
 select `x+1```, `y&y`
 from t4 where `x+1``` < '200'
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@t4
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@v1
 PREHOOK: query: select `x+1```, `y&y`, rank() over(partition by `x+1``` order by  `y&y`)
 from v1

Modified: hive/branches/spark/ql/src/test/results/clientpositive/rcfile_createas1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/rcfile_createas1.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/rcfile_createas1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/rcfile_createas1.q.out Sat Aug 30 06:44:46 2014
@@ -105,7 +105,7 @@ STAGE PLANS:
           input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
           output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
           serde name: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          name: rcfile_createas1b
+          name: default.rcfile_createas1b
 
   Stage: Stage-2
     Stats-Aggr Operator
@@ -134,6 +134,8 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@rcfile_createas1a
 PREHOOK: Input: default@rcfile_createas1a@ds=1
 PREHOOK: Input: default@rcfile_createas1a@ds=2
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcfile_createas1b
 POSTHOOK: query: CREATE TABLE rcfile_createas1b
     STORED AS RCFILE AS 
         SELECT key, value, PMOD(HASH(key), 50) as part
@@ -142,6 +144,7 @@ POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@rcfile_createas1a
 POSTHOOK: Input: default@rcfile_createas1a@ds=1
 POSTHOOK: Input: default@rcfile_createas1a@ds=2
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@rcfile_createas1b
 PREHOOK: query: SELECT SUM(HASH(c)) FROM (
     SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/rcfile_default_format.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/rcfile_default_format.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/rcfile_default_format.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/rcfile_default_format.q.out Sat Aug 30 06:44:46 2014
@@ -39,9 +39,12 @@ Storage Desc Params:	 	 
 PREHOOK: query: CREATE TABLE rcfile_default_format_ctas AS SELECT key,value FROM src
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcfile_default_format_ctas
 POSTHOOK: query: CREATE TABLE rcfile_default_format_ctas AS SELECT key,value FROM src
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@rcfile_default_format_ctas
 PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format_ctas
 PREHOOK: type: DESCTABLE
@@ -134,9 +137,12 @@ Storage Desc Params:	 	 
 PREHOOK: query: CREATE TABLE textfile_default_format_ctas AS SELECT key,value FROM rcfile_default_format_ctas
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@rcfile_default_format_ctas
+PREHOOK: Output: database:default
+PREHOOK: Output: default@textfile_default_format_ctas
 POSTHOOK: query: CREATE TABLE textfile_default_format_ctas AS SELECT key,value FROM rcfile_default_format_ctas
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@rcfile_default_format_ctas
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@textfile_default_format_ctas
 PREHOOK: query: DESCRIBE FORMATTED textfile_default_format_ctas
 PREHOOK: type: DESCTABLE
@@ -177,9 +183,12 @@ Storage Desc Params:	 	 
 PREHOOK: query: CREATE TABLE rcfile_default_format_ctas_default_serde AS SELECT key,value FROM rcfile_default_format_ctas
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@rcfile_default_format_ctas
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcfile_default_format_ctas_default_serde
 POSTHOOK: query: CREATE TABLE rcfile_default_format_ctas_default_serde AS SELECT key,value FROM rcfile_default_format_ctas
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@rcfile_default_format_ctas
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@rcfile_default_format_ctas_default_serde
 PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format_ctas_default_serde
 PREHOOK: type: DESCTABLE
@@ -258,9 +267,12 @@ Storage Desc Params:	 	 
 PREHOOK: query: CREATE TABLE rcfile_ctas_default_serde STORED AS rcfile AS SELECT key,value FROM rcfile_default_format_ctas
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@rcfile_default_format_ctas
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcfile_ctas_default_serde
 POSTHOOK: query: CREATE TABLE rcfile_ctas_default_serde STORED AS rcfile AS SELECT key,value FROM rcfile_default_format_ctas
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@rcfile_default_format_ctas
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@rcfile_ctas_default_serde
 PREHOOK: query: DESCRIBE FORMATTED rcfile_ctas_default_serde
 PREHOOK: type: DESCTABLE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out Sat Aug 30 06:44:46 2014
@@ -48,19 +48,25 @@ PREHOOK: Input: default@sih_i_part
 PREHOOK: Input: default@sih_i_part@p=1
 PREHOOK: Input: default@sih_i_part@p=2
 PREHOOK: Input: default@sih_i_part@p=3
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sih_src
 POSTHOOK: query: create table sih_src as select key, value from sih_i_part order by key, value
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@sih_i_part
 POSTHOOK: Input: default@sih_i_part@p=1
 POSTHOOK: Input: default@sih_i_part@p=2
 POSTHOOK: Input: default@sih_i_part@p=3
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@sih_src
 PREHOOK: query: create table sih_src2 as select key, value from sih_src order by key, value
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@sih_src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sih_src2
 POSTHOOK: query: create table sih_src2 as select key, value from sih_src order by key, value
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@sih_src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@sih_src2
 PREHOOK: query: -- Relaxing hive.exec.mode.local.auto.input.files.max=1.
 -- Hadoop20 will not generate more splits than there are files (one).

Modified: hive/branches/spark/ql/src/test/results/clientpositive/semijoin.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/semijoin.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/semijoin.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/semijoin.q.out Sat Aug 30 06:44:46 2014
@@ -1,9 +1,12 @@
 PREHOOK: query: create table t1 as select cast(key as int) key, value from src where key <= 10
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@t1
 POSTHOOK: query: create table t1 as select cast(key as int) key, value from src where key <= 10
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@t1
 PREHOOK: query: select * from t1 sort by key
 PREHOOK: type: QUERY
@@ -27,9 +30,12 @@ POSTHOOK: Input: default@t1
 PREHOOK: query: create table t2 as select cast(2*key as int) key, value from t1
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@t1
+PREHOOK: Output: database:default
+PREHOOK: Output: default@t2
 POSTHOOK: query: create table t2 as select cast(2*key as int) key, value from t1
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@t1
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@t2
 PREHOOK: query: select * from t2 sort by key
 PREHOOK: type: QUERY
@@ -54,10 +60,13 @@ PREHOOK: query: create table t3 as selec
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@t1
 PREHOOK: Input: default@t2
+PREHOOK: Output: database:default
+PREHOOK: Output: default@t3
 POSTHOOK: query: create table t3 as select * from (select * from t1 union all select * from t2) b
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@t1
 POSTHOOK: Input: default@t2
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@t3
 PREHOOK: query: select * from t3 sort by key, value
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/serde_regex.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/serde_regex.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/serde_regex.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/serde_regex.q.out Sat Aug 30 06:44:46 2014
@@ -45,7 +45,7 @@ STAGE PLANS:
           serde name: org.apache.hadoop.hive.serde2.RegexSerDe
           serde properties:
             input.regex ([^ ]*) ([^ ]*) ([^ ]*) (-|\[[^\]]*\]) ([^ "]*|"[^"]*") (-|[0-9]*) (-|[0-9]*)(?: ([^ "]*|"[^"]*") ([^ "]*|"[^"]*"))?
-          name: serde_regex
+          name: default.serde_regex
 
 PREHOOK: query: CREATE TABLE serde_regex(
   host STRING,
@@ -160,7 +160,7 @@ STAGE PLANS:
           serde name: org.apache.hadoop.hive.serde2.RegexSerDe
           serde properties:
             input.regex ([^ ]*) ([^ ]*)
-          name: serde_regex1
+          name: default.serde_regex1
 
 PREHOOK: query: CREATE TABLE serde_regex1(
   key decimal(38,18),

Modified: hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_db_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_db_table.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_db_table.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_db_table.q.out Sat Aug 30 06:44:46 2014
@@ -17,11 +17,10 @@ tmp_feng
 PREHOOK: query: CREATE TABLE tmp_feng.tmp_showcrt (key string, value int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:tmp_feng
-PREHOOK: Output: tmp_feng@tmp_feng.tmp_showcrt
+PREHOOK: Output: tmp_feng@tmp_showcrt
 POSTHOOK: query: CREATE TABLE tmp_feng.tmp_showcrt (key string, value int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:tmp_feng
-POSTHOOK: Output: tmp_feng@tmp_feng.tmp_showcrt
 POSTHOOK: Output: tmp_feng@tmp_showcrt
 PREHOOK: query: USE default
 PREHOOK: type: SWITCHDATABASE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_temp_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_temp_table.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_temp_table.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_temp_table.q.out Sat Aug 30 06:44:46 2014
@@ -7,12 +7,11 @@ POSTHOOK: Output: database:tmpdb
 PREHOOK: query: create temporary table tmpdb.tmp1 (c1 string, c2 string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:tmpdb
-PREHOOK: Output: tmpdb@tmpdb.tmp1
+PREHOOK: Output: tmpdb@tmp1
 POSTHOOK: query: create temporary table tmpdb.tmp1 (c1 string, c2 string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:tmpdb
 POSTHOOK: Output: tmpdb@tmp1
-POSTHOOK: Output: tmpdb@tmpdb.tmp1
 PREHOOK: query: show create table tmpdb.tmp1
 PREHOOK: type: SHOW_CREATETABLE
 PREHOOK: Input: tmpdb@tmp1

Modified: hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_view.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_view.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_view.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/show_create_table_view.q.out Sat Aug 30 06:44:46 2014
@@ -3,11 +3,14 @@ PREHOOK: query: -- Test SHOW CREATE TABL
 CREATE VIEW tmp_copy_src AS SELECT * FROM src
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tmp_copy_src
 POSTHOOK: query: -- Test SHOW CREATE TABLE on a view name.
 
 CREATE VIEW tmp_copy_src AS SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@tmp_copy_src
 PREHOOK: query: SHOW CREATE TABLE tmp_copy_src
 PREHOOK: type: SHOW_CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_noskew.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_noskew.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_noskew.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_noskew.q.out Sat Aug 30 06:44:46 2014
@@ -147,7 +147,7 @@ STAGE PLANS:
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          name: noskew
+          name: default.noskew
 
   Stage: Stage-3
     Stats-Aggr Operator
@@ -155,9 +155,12 @@ STAGE PLANS:
 PREHOOK: query: create table noskew as select a.* from src a join src b on a.key=b.key order by a.key limit 30
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@noskew
 POSTHOOK: query: create table noskew as select a.* from src a join src b on a.key=b.key order by a.key limit 30
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@noskew
 PREHOOK: query: select * from noskew
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin9.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin9.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin9.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/smb_mapjoin9.q.out Sat Aug 30 06:44:46 2014
@@ -352,7 +352,7 @@ STAGE PLANS:
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          name: smb_mapjoin9_results
+          name: default.smb_mapjoin9_results
 
   Stage: Stage-3
     Stats-Aggr Operator
@@ -397,6 +397,8 @@ PREHOOK: Input: default@hive_test_smb_bu
 PREHOOK: Input: default@hive_test_smb_bucket1@ds=2010-10-15
 PREHOOK: Input: default@hive_test_smb_bucket2
 PREHOOK: Input: default@hive_test_smb_bucket2@ds=2010-10-15
+PREHOOK: Output: database:default
+PREHOOK: Output: default@smb_mapjoin9_results
 POSTHOOK: query: create table smb_mapjoin9_results as
 SELECT /* + MAPJOIN(b) */ b.key as k1, b.value, b.ds, a.key as k2
 FROM hive_test_smb_bucket1 a JOIN 
@@ -407,6 +409,7 @@ POSTHOOK: Input: default@hive_test_smb_b
 POSTHOOK: Input: default@hive_test_smb_bucket1@ds=2010-10-15
 POSTHOOK: Input: default@hive_test_smb_bucket2
 POSTHOOK: Input: default@hive_test_smb_bucket2@ds=2010-10-15
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@smb_mapjoin9_results
 PREHOOK: query: drop table smb_mapjoin9_results
 PREHOOK: type: DROPTABLE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/stats5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/stats5.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/stats5.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/stats5.q.out Sat Aug 30 06:44:46 2014
@@ -1,9 +1,12 @@
 PREHOOK: query: create table analyze_src as select * from src
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@analyze_src
 POSTHOOK: query: create table analyze_src as select * from src
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@analyze_src
 PREHOOK: query: explain analyze table analyze_src compute statistics
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/stats_counter.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/stats_counter.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/stats_counter.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/stats_counter.q.out Sat Aug 30 06:44:46 2014
@@ -2,10 +2,13 @@ PREHOOK: query: -- by analyze
 create table dummy1 as select * from src
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dummy1
 POSTHOOK: query: -- by analyze
 create table dummy1 as select * from src
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dummy1
 PREHOOK: query: analyze table dummy1 compute statistics
 PREHOOK: type: QUERY
@@ -55,10 +58,13 @@ PREHOOK: query: -- by autogather
 create table dummy2 as select * from src
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dummy2
 POSTHOOK: query: -- by autogather
 create table dummy2 as select * from src
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dummy2
 PREHOOK: query: desc formatted dummy2
 PREHOOK: type: DESCTABLE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/str_to_map.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/str_to_map.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/str_to_map.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/str_to_map.q.out Sat Aug 30 06:44:46 2014
@@ -187,9 +187,12 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src tablesample (3 rows)
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tbl_s2m
 POSTHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src tablesample (3 rows)
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@tbl_s2m
 PREHOOK: query: select str_to_map(t,'_','=')['333'] from tbl_s2m
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/subquery_exists.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/subquery_exists.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/subquery_exists.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/subquery_exists.q.out Sat Aug 30 06:44:46 2014
@@ -125,6 +125,8 @@ where exists
   where b.value = a.value  and a.key = b.key and a.value > 'val_9')
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cv1
 POSTHOOK: query: -- view test
 create view cv1 as 
 select * 
@@ -135,6 +137,7 @@ where exists
   where b.value = a.value  and a.key = b.key and a.value > 'val_9')
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@cv1
 PREHOOK: query: select * from cv1
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/subquery_exists_having.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/subquery_exists_having.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/subquery_exists_having.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/subquery_exists_having.q.out Sat Aug 30 06:44:46 2014
@@ -320,6 +320,8 @@ having exists
   )
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cv1
 POSTHOOK: query: -- view test
 create view cv1 as 
 select b.key, count(*) as c
@@ -332,6 +334,7 @@ having exists
   )
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@cv1
 PREHOOK: query: select * from cv1
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/subquery_notin.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/subquery_notin.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/subquery_notin.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/subquery_notin.q.out Sat Aug 30 06:44:46 2014
@@ -1413,22 +1413,28 @@ create view T1_v as 
 select key from src where key <'11'
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@T1_v
 POSTHOOK: query: -- null check
 create view T1_v as 
 select key from src where key <'11'
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@T1_v
 PREHOOK: query: create view T2_v as 
 select case when key > '104' then null else key end as key from T1_v
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@src
 PREHOOK: Input: default@t1_v
+PREHOOK: Output: database:default
+PREHOOK: Output: default@T2_v
 POSTHOOK: query: create view T2_v as 
 select case when key > '104' then null else key end as key from T1_v
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@t1_v
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@T2_v
 Warning: Shuffle Join JOIN[24][tables = [t1_v, sq_1_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
 PREHOOK: query: explain

Modified: hive/branches/spark/ql/src/test/results/clientpositive/subquery_views.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/subquery_views.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/subquery_views.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/subquery_views.q.out Sat Aug 30 06:44:46 2014
@@ -10,6 +10,8 @@ where exists
   where b.value = a.value  and a.key = b.key and a.value > 'val_9')
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cv1
 POSTHOOK: query: -- SORT_QUERY_RESULTS
 
 -- exists test
@@ -22,6 +24,7 @@ where exists
   where b.value = a.value  and a.key = b.key and a.value > 'val_9')
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@cv1
 PREHOOK: query: select * 
 from cv1 where cv1.key in (select key from cv1 c where c.key > '95')
@@ -51,6 +54,8 @@ where b.key not in
   )
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cv2
 POSTHOOK: query: -- not in test
 create view cv2 as 
 select * 
@@ -62,6 +67,7 @@ where b.key not in
   )
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@cv2
 Warning: Shuffle Join JOIN[42][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-5:MAPRED' is a cross product
 Warning: Shuffle Join JOIN[18][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
@@ -416,6 +422,8 @@ group by key, value
 having count(*) in (select count(*) from src s1 where s1.key > '9' group by s1.key )
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cv3
 POSTHOOK: query: -- in where + having
 create view cv3 as
 select key, value, count(*) 
@@ -425,6 +433,7 @@ group by key, value
 having count(*) in (select count(*) from src s1 where s1.key > '9' group by s1.key )
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@cv3
 PREHOOK: query: select * from cv3
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/symlink_text_input_format.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/symlink_text_input_format.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/symlink_text_input_format.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/symlink_text_input_format.q.out Sat Aug 30 06:44:46 2014
@@ -18,7 +18,7 @@ STAGE PLANS:
           columns: key string, value string
           input format: org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-          name: symlink_text_input_format
+          name: default.symlink_text_input_format
 
 PREHOOK: query: CREATE TABLE symlink_text_input_format (key STRING, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
 PREHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/temp_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/temp_table.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/temp_table.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/temp_table.q.out Sat Aug 30 06:44:46 2014
@@ -59,7 +59,7 @@ STAGE PLANS:
 #### A masked pattern was here ####
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          name: foo
+          name: default.foo
           isTemporary: true
 
   Stage: Stage-2
@@ -98,9 +98,12 @@ STAGE PLANS:
 PREHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM src WHERE key % 2 = 0
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@foo
 POSTHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM src WHERE key % 2 = 0
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@foo
 PREHOOK: query: EXPLAIN CREATE TEMPORARY TABLE bar AS SELECT * FROM src WHERE key % 2 = 1
 PREHOOK: type: CREATETABLE_AS_SELECT
@@ -163,7 +166,7 @@ STAGE PLANS:
 #### A masked pattern was here ####
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          name: bar
+          name: default.bar
           isTemporary: true
 
   Stage: Stage-2
@@ -202,9 +205,12 @@ STAGE PLANS:
 PREHOOK: query: CREATE TEMPORARY TABLE bar AS SELECT * FROM src WHERE key % 2 = 1
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@bar
 POSTHOOK: query: CREATE TEMPORARY TABLE bar AS SELECT * FROM src WHERE key % 2 = 1
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@bar
 PREHOOK: query: DESCRIBE foo
 PREHOOK: type: DESCTABLE
@@ -452,9 +458,12 @@ POSTHOOK: type: SHOWTABLES
 PREHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM default.foo
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@foo
+PREHOOK: Output: database:two
+PREHOOK: Output: two@foo
 POSTHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM default.foo
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@foo
+POSTHOOK: Output: database:two
 POSTHOOK: Output: two@foo
 PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES

Modified: hive/branches/spark/ql/src/test/results/clientpositive/temp_table_gb1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/temp_table_gb1.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/temp_table_gb1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/temp_table_gb1.q.out Sat Aug 30 06:44:46 2014
@@ -11,9 +11,12 @@ POSTHOOK: Output: default@dest_g2
 PREHOOK: query: CREATE TEMPORARY TABLE src_temp AS SELECT * FROM src
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_temp
 POSTHOOK: query: CREATE TEMPORARY TABLE src_temp AS SELECT * FROM src
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_temp
 PREHOOK: query: FROM src_temp
 INSERT OVERWRITE TABLE dest_g2 SELECT substr(src_temp.key,1,1), count(DISTINCT substr(src_temp.value,5)), concat(substr(src_temp.key,1,1),sum(substr(src_temp.value,5))) GROUP BY substr(src_temp.key,1,1)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/temp_table_join1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/temp_table_join1.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/temp_table_join1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/temp_table_join1.q.out Sat Aug 30 06:44:46 2014
@@ -1,16 +1,22 @@
 PREHOOK: query: CREATE TABLE src_nontemp AS SELECT * FROM src limit 10
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_nontemp
 POSTHOOK: query: CREATE TABLE src_nontemp AS SELECT * FROM src limit 10
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_nontemp
 PREHOOK: query: CREATE TEMPORARY TABLE src_temp AS SELECT * FROM src limit 10
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_temp
 POSTHOOK: query: CREATE TEMPORARY TABLE src_temp AS SELECT * FROM src limit 10
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_temp
 PREHOOK: query: -- Non temp table join
 EXPLAIN

Modified: hive/branches/spark/ql/src/test/results/clientpositive/temp_table_names.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/temp_table_names.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/temp_table_names.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/temp_table_names.q.out Sat Aug 30 06:44:46 2014
@@ -1,12 +1,11 @@
 PREHOOK: query: -- Test temp tables with upper/lower case names
 create temporary table Default.Temp_Table_Names (C1 string, c2 string)
 PREHOOK: type: CREATETABLE
-PREHOOK: Output: Default@Default.Temp_Table_Names
+PREHOOK: Output: Default@Temp_Table_Names
 PREHOOK: Output: database:default
 POSTHOOK: query: -- Test temp tables with upper/lower case names
 create temporary table Default.Temp_Table_Names (C1 string, c2 string)
 POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: Default@Default.Temp_Table_Names
 POSTHOOK: Output: Default@Temp_Table_Names
 POSTHOOK: Output: database:default
 PREHOOK: query: show tables 'Temp_Table*'

Modified: hive/branches/spark/ql/src/test/results/clientpositive/temp_table_precedence.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/temp_table_precedence.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/temp_table_precedence.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/temp_table_precedence.q.out Sat Aug 30 06:44:46 2014
@@ -8,13 +8,12 @@ PREHOOK: query: -- Create non-temp table
 create table ttp.tab1 (a1 string, a2 string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:ttp
-PREHOOK: Output: ttp@ttp.tab1
+PREHOOK: Output: ttp@tab1
 POSTHOOK: query: -- Create non-temp tables
 create table ttp.tab1 (a1 string, a2 string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:ttp
 POSTHOOK: Output: ttp@tab1
-POSTHOOK: Output: ttp@ttp.tab1
 PREHOOK: query: insert overwrite table ttp.tab1 select * from src where key = 5 limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -47,12 +46,11 @@ POSTHOOK: Input: ttp@tab1
 PREHOOK: query: create table ttp.tab2 (b1 string, b2 string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:ttp
-PREHOOK: Output: ttp@ttp.tab2
+PREHOOK: Output: ttp@tab2
 POSTHOOK: query: create table ttp.tab2 (b1 string, b2 string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:ttp
 POSTHOOK: Output: ttp@tab2
-POSTHOOK: Output: ttp@ttp.tab2
 PREHOOK: query: insert overwrite table ttp.tab2 select * from src where key = 2 limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -84,13 +82,12 @@ PREHOOK: query: -- Now create temp table
 create temporary table ttp.tab1 (c1 int, c2 string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:ttp
-PREHOOK: Output: ttp@ttp.tab1
+PREHOOK: Output: ttp@tab1
 POSTHOOK: query: -- Now create temp table with same name
 create temporary table ttp.tab1 (c1 int, c2 string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:ttp
 POSTHOOK: Output: ttp@tab1
-POSTHOOK: Output: ttp@ttp.tab1
 PREHOOK: query: insert overwrite table ttp.tab1 select * from src where key = 0 limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src

Modified: hive/branches/spark/ql/src/test/results/clientpositive/temp_table_subquery1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/temp_table_subquery1.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/temp_table_subquery1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/temp_table_subquery1.q.out Sat Aug 30 06:44:46 2014
@@ -1,9 +1,12 @@
 PREHOOK: query: create temporary table src_temp as select * from src
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_temp
 POSTHOOK: query: create temporary table src_temp as select * from src
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_temp
 PREHOOK: query: -- subquery exists
 select *

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out Sat Aug 30 06:44:46 2014
@@ -2,21 +2,27 @@ PREHOOK: query: create table A as
 select * from src
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@A
 POSTHOOK: query: create table A as
 select * from src
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@A
 PREHOOK: query: create table B as
 select * from src
 limit 10
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@B
 POSTHOOK: query: create table B as
 select * from src
 limit 10
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@B
 Warning: Shuffle Join JOIN[4][tables = [a, b]] in Stage 'Reducer 2' is a cross product
 PREHOOK: query: explain select * from A join B

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out Sat Aug 30 06:44:46 2014
@@ -2,21 +2,27 @@ PREHOOK: query: create table A as
 select * from src
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@A
 POSTHOOK: query: create table A as
 select * from src
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@A
 PREHOOK: query: create table B as
 select * from src
 limit 10
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@B
 POSTHOOK: query: create table B as
 select * from src
 limit 10
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@B
 Warning: Map Join MAPJOIN[7][bigTable=a] in task 'Map 2' is a cross product
 PREHOOK: query: explain select * from A join B

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/ctas.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/ctas.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/ctas.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/ctas.q.out Sat Aug 30 06:44:46 2014
@@ -91,7 +91,7 @@ STAGE PLANS:
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          name: nzhang_CTAS1
+          name: default.nzhang_CTAS1
 
   Stage: Stage-3
     Stats-Aggr Operator
@@ -105,9 +105,12 @@ STAGE PLANS:
 PREHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@nzhang_CTAS1
 POSTHOOK: query: create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@nzhang_CTAS1
 PREHOOK: query: select * from nzhang_CTAS1
 PREHOOK: type: QUERY
@@ -236,7 +239,7 @@ STAGE PLANS:
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          name: nzhang_ctas2
+          name: default.nzhang_ctas2
 
   Stage: Stage-3
     Stats-Aggr Operator
@@ -250,9 +253,12 @@ STAGE PLANS:
 PREHOOK: query: create table nzhang_ctas2 as select * from src sort by key, value limit 10
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@nzhang_ctas2
 POSTHOOK: query: create table nzhang_ctas2 as select * from src sort by key, value limit 10
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@nzhang_ctas2
 PREHOOK: query: select * from nzhang_ctas2
 PREHOOK: type: QUERY
@@ -381,7 +387,7 @@ STAGE PLANS:
           input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
           output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
           serde name: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          name: nzhang_ctas3
+          name: default.nzhang_ctas3
 
   Stage: Stage-3
     Stats-Aggr Operator
@@ -395,9 +401,12 @@ STAGE PLANS:
 PREHOOK: query: create table nzhang_ctas3 row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" stored as RCFile as select key/2 half_key, concat(value, "_con") conb  from src sort by half_key, conb limit 10
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@nzhang_ctas3
 POSTHOOK: query: create table nzhang_ctas3 row format serde "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" stored as RCFile as select key/2 half_key, concat(value, "_con") conb  from src sort by half_key, conb limit 10
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@nzhang_ctas3
 PREHOOK: query: select * from nzhang_ctas3
 PREHOOK: type: QUERY
@@ -592,7 +601,7 @@ STAGE PLANS:
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          name: nzhang_ctas4
+          name: default.nzhang_ctas4
 
   Stage: Stage-3
     Stats-Aggr Operator
@@ -606,9 +615,12 @@ STAGE PLANS:
 PREHOOK: query: create table nzhang_ctas4 row format delimited fields terminated by ',' stored as textfile as select key, value from src sort by key, value limit 10
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@nzhang_ctas4
 POSTHOOK: query: create table nzhang_ctas4 row format delimited fields terminated by ',' stored as textfile as select key, value from src sort by key, value limit 10
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@nzhang_ctas4
 PREHOOK: query: select * from nzhang_ctas4
 PREHOOK: type: QUERY
@@ -854,7 +866,7 @@ STAGE PLANS:
 
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          name: nzhang_ctas5
+          name: default.nzhang_ctas5
 
   Stage: Stage-3
     Stats-Aggr Operator
@@ -869,9 +881,12 @@ STAGE PLANS:
 PREHOOK: query: create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@nzhang_ctas5
 POSTHOOK: query: create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@nzhang_ctas5
 PREHOOK: query: create table nzhang_ctas6 (key string, `to` string)
 PREHOOK: type: CREATETABLE
@@ -894,7 +909,10 @@ POSTHOOK: Lineage: nzhang_ctas6.to SIMPL
 PREHOOK: query: create table nzhang_ctas7 as select key, `to` from nzhang_ctas6
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@nzhang_ctas6
+PREHOOK: Output: database:default
+PREHOOK: Output: default@nzhang_ctas7
 POSTHOOK: query: create table nzhang_ctas7 as select key, `to` from nzhang_ctas6
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@nzhang_ctas6
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@nzhang_ctas7

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/dynpart_sort_opt_vectorization.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/dynpart_sort_opt_vectorization.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/dynpart_sort_opt_vectorization.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/dynpart_sort_opt_vectorization.q.out Sat Aug 30 06:44:46 2014
@@ -14,6 +14,7 @@ PREHOOK: query: create table over1k(
        fields terminated by '|'
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k
 POSTHOOK: query: create table over1k(
            t tinyint,
            si smallint,
@@ -42,6 +43,7 @@ POSTHOOK: Output: default@over1k
 PREHOOK: query: create table over1k_orc like over1k
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_orc
 POSTHOOK: query: create table over1k_orc like over1k
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -81,6 +83,7 @@ PREHOOK: query: create table over1k_part
        partitioned by (ds string, t tinyint) stored as orc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part_orc
 POSTHOOK: query: create table over1k_part_orc(
            si smallint,
            i int,
@@ -93,6 +96,7 @@ POSTHOOK: Output: default@over1k_part_or
 PREHOOK: query: create table over1k_part_limit_orc like over1k_part_orc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part_limit_orc
 POSTHOOK: query: create table over1k_part_limit_orc like over1k_part_orc
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -114,6 +118,7 @@ PREHOOK: query: create table over1k_part
        clustered by (si) into 4 buckets stored as orc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part_buck_orc
 POSTHOOK: query: create table over1k_part_buck_orc(
            si smallint,
            i int,
@@ -134,6 +139,7 @@ PREHOOK: query: create table over1k_part
        sorted by (f) into 4 buckets stored as orc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part_buck_sort_orc
 POSTHOOK: query: create table over1k_part_buck_sort_orc(
            si smallint,
            i int,
@@ -195,6 +201,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col4 (type: tinyint)
                   Statistics: Num rows: 1048 Data size: 310873 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col0 (type: smallint), _col1 (type: int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
+            Execution mode: vectorized
         Reducer 3 
             Reduce Operator Tree:
               Extract
@@ -280,6 +287,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col4 (type: tinyint)
                     Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col0 (type: smallint), _col1 (type: int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
+            Execution mode: vectorized
         Reducer 3 
             Reduce Operator Tree:
               Extract
@@ -564,6 +572,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col4 (type: tinyint)
                   Statistics: Num rows: 1048 Data size: 310873 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col0 (type: smallint), _col1 (type: int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
+            Execution mode: vectorized
         Reducer 3 
             Reduce Operator Tree:
               Extract
@@ -649,6 +658,7 @@ STAGE PLANS:
                     Map-reduce partition columns: _col4 (type: tinyint)
                     Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col0 (type: smallint), _col1 (type: int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
+            Execution mode: vectorized
         Reducer 3 
             Reduce Operator Tree:
               Extract
@@ -1292,6 +1302,7 @@ create table over1k_part2_orc(
        partitioned by (ds string, t tinyint)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part2_orc
 POSTHOOK: query: -- tests for HIVE-6883
 create table over1k_part2_orc(
            si smallint,
@@ -1419,6 +1430,7 @@ STAGE PLANS:
                   Map-reduce partition columns: _col4 (type: tinyint)
                   Statistics: Num rows: 1048 Data size: 310873 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col0 (type: smallint), _col1 (type: int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
+            Execution mode: vectorized
         Reducer 3 
             Reduce Operator Tree:
               Extract
@@ -1762,6 +1774,7 @@ create table over1k_part_buck_sort2_orc(
        sorted by (f) into 1 buckets
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k_part_buck_sort2_orc
 POSTHOOK: query: -- hadoop-1 does not honor number of reducers in local mode. There is always only 1 reducer irrespective of the number of buckets.
 -- Hence all records go to one bucket and all other buckets will be empty. Similar to HIVE-6867. However, hadoop-2 honors number
 -- of reducers and records are spread across all reducers. To avoid this inconsistency we will make number of buckets to 1 for this test.

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/insert1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/insert1.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/insert1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/insert1.q.out Sat Aug 30 06:44:46 2014
@@ -143,12 +143,11 @@ POSTHOOK: Output: database:x
 PREHOOK: query: create table x.insert1(key int, value string) stored as textfile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:x
-PREHOOK: Output: x@x.insert1
+PREHOOK: Output: x@insert1
 POSTHOOK: query: create table x.insert1(key int, value string) stored as textfile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:x
 POSTHOOK: Output: x@insert1
-POSTHOOK: Output: x@x.insert1
 PREHOOK: query: explain insert into table x.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1)
 PREHOOK: type: QUERY
 POSTHOOK: query: explain insert into table x.INSERT1 select a.key, a.value from insert2 a WHERE (a.key=-1)
@@ -406,11 +405,10 @@ POSTHOOK: Output: database:db1
 PREHOOK: query: CREATE TABLE db1.result(col1 STRING)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:db1
-PREHOOK: Output: db1@db1.result
+PREHOOK: Output: db1@result
 POSTHOOK: query: CREATE TABLE db1.result(col1 STRING)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:db1
-POSTHOOK: Output: db1@db1.result
 POSTHOOK: Output: db1@result
 PREHOOK: query: INSERT OVERWRITE TABLE db1.result SELECT 'db1_insert1' FROM src LIMIT 1
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_analyze.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_analyze.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_analyze.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_analyze.q.out Sat Aug 30 06:44:46 2014
@@ -105,7 +105,7 @@ Table Parameters:	 	 
 	numFiles            	1                   
 	numRows             	100                 
 	rawDataSize         	52600               
-	totalSize           	3042                
+	totalSize           	3098                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -195,7 +195,7 @@ Table Parameters:	 	 
 	numFiles            	1                   
 	numRows             	100                 
 	rawDataSize         	52600               
-	totalSize           	3042                
+	totalSize           	3098                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -309,7 +309,7 @@ Partition Parameters:	 	 
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	21950               
-	totalSize           	1962                
+	totalSize           	2016                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -354,7 +354,7 @@ Partition Parameters:	 	 
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	22050               
-	totalSize           	1981                
+	totalSize           	2036                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -456,7 +456,7 @@ Partition Parameters:	 	 
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	21950               
-	totalSize           	1962                
+	totalSize           	2016                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -501,7 +501,7 @@ Partition Parameters:	 	 
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	22050               
-	totalSize           	1981                
+	totalSize           	2036                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -621,7 +621,7 @@ Partition Parameters:	 	 
 	numFiles            	4                   
 	numRows             	50                  
 	rawDataSize         	21980               
-	totalSize           	4746                
+	totalSize           	4955                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -666,7 +666,7 @@ Partition Parameters:	 	 
 	numFiles            	4                   
 	numRows             	50                  
 	rawDataSize         	22048               
-	totalSize           	4829                
+	totalSize           	5046                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -774,7 +774,7 @@ Partition Parameters:	 	 
 	numFiles            	4                   
 	numRows             	50                  
 	rawDataSize         	21980               
-	totalSize           	4746                
+	totalSize           	4955                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -819,7 +819,7 @@ Partition Parameters:	 	 
 	numFiles            	4                   
 	numRows             	50                  
 	rawDataSize         	22048               
-	totalSize           	4829                
+	totalSize           	5046                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -984,7 +984,7 @@ Partition Parameters:	 	 
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	21950               
-	totalSize           	1962                
+	totalSize           	2016                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/ptf.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/ptf.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/ptf.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/ptf.q.out Sat Aug 30 06:44:46 2014
@@ -772,6 +772,8 @@ from part 
 group by p_mfgr, p_brand
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@part
+PREHOOK: Output: database:default
+PREHOOK: Output: default@mfgr_price_view
 POSTHOOK: query: -- 16. testViewAsTableInputToPTF
 create view IF NOT EXISTS mfgr_price_view as 
 select p_mfgr, p_brand, 
@@ -780,6 +782,7 @@ from part 
 group by p_mfgr, p_brand
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@part
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@mfgr_price_view
 PREHOOK: query: select p_mfgr, p_brand, s, 
 sum(s) over w1  as s1

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/stats_counter.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/stats_counter.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/stats_counter.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/stats_counter.q.out Sat Aug 30 06:44:46 2014
@@ -2,10 +2,13 @@ PREHOOK: query: -- by analyze
 create table dummy1 as select * from src
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dummy1
 POSTHOOK: query: -- by analyze
 create table dummy1 as select * from src
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dummy1
 PREHOOK: query: analyze table dummy1 compute statistics
 PREHOOK: type: QUERY
@@ -55,10 +58,13 @@ PREHOOK: query: -- by autogather
 create table dummy2 as select * from src
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dummy2
 POSTHOOK: query: -- by autogather
 create table dummy2 as select * from src
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dummy2
 PREHOOK: query: desc formatted dummy2
 PREHOOK: type: DESCTABLE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/subquery_exists.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/subquery_exists.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/subquery_exists.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/subquery_exists.q.out Sat Aug 30 06:44:46 2014
@@ -133,6 +133,8 @@ where exists
   where b.value = a.value  and a.key = b.key and a.value > 'val_9')
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@cv1
 POSTHOOK: query: -- view test
 create view cv1 as 
 select * 
@@ -143,6 +145,7 @@ where exists
   where b.value = a.value  and a.key = b.key and a.value > 'val_9')
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@cv1
 PREHOOK: query: select * from cv1
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/temp_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/temp_table.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/temp_table.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/temp_table.q.out Sat Aug 30 06:44:46 2014
@@ -46,7 +46,7 @@ STAGE PLANS:
 #### A masked pattern was here ####
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          name: foo
+          name: default.foo
           isTemporary: true
 
   Stage: Stage-3
@@ -61,9 +61,12 @@ STAGE PLANS:
 PREHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM src WHERE key % 2 = 0
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@foo
 POSTHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM src WHERE key % 2 = 0
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@foo
 PREHOOK: query: EXPLAIN CREATE TEMPORARY TABLE bar AS SELECT * FROM src WHERE key % 2 = 1
 PREHOOK: type: CREATETABLE_AS_SELECT
@@ -113,7 +116,7 @@ STAGE PLANS:
 #### A masked pattern was here ####
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          name: bar
+          name: default.bar
           isTemporary: true
 
   Stage: Stage-3
@@ -128,9 +131,12 @@ STAGE PLANS:
 PREHOOK: query: CREATE TEMPORARY TABLE bar AS SELECT * FROM src WHERE key % 2 = 1
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@bar
 POSTHOOK: query: CREATE TEMPORARY TABLE bar AS SELECT * FROM src WHERE key % 2 = 1
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@bar
 PREHOOK: query: DESCRIBE foo
 PREHOOK: type: DESCTABLE
@@ -378,9 +384,12 @@ POSTHOOK: type: SHOWTABLES
 PREHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM default.foo
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@foo
+PREHOOK: Output: database:two
+PREHOOK: Output: two@foo
 POSTHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM default.foo
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@foo
+POSTHOOK: Output: database:two
 POSTHOOK: Output: two@foo
 PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_dml.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_dml.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_dml.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_dml.q.out Sat Aug 30 06:44:46 2014
@@ -82,7 +82,7 @@ STAGE PLANS:
           input format: org.apache.hadoop.mapred.TextInputFormat
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          name: tmp_src
+          name: default.tmp_src
 
   Stage: Stage-3
     Stats-Aggr Operator
@@ -96,9 +96,12 @@ STAGE PLANS:
 PREHOOK: query: CREATE TABLE tmp_src AS SELECT * FROM (SELECT value, count(value) AS cnt FROM src GROUP BY value) f1 ORDER BY cnt
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tmp_src
 POSTHOOK: query: CREATE TABLE tmp_src AS SELECT * FROM (SELECT value, count(value) AS cnt FROM src GROUP BY value) f1 ORDER BY cnt
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@tmp_src
 PREHOOK: query: SELECT * FROM tmp_src
 PREHOOK: type: QUERY
@@ -1478,10 +1481,13 @@ PREHOOK: query: -- create empty table
 CREATE TABLE empty STORED AS orc AS SELECT * FROM tmp_src_part WHERE d = -1000
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@tmp_src_part
+PREHOOK: Output: database:default
+PREHOOK: Output: default@empty
 POSTHOOK: query: -- create empty table
 CREATE TABLE empty STORED AS orc AS SELECT * FROM tmp_src_part WHERE d = -1000
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@tmp_src_part
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@empty
 PREHOOK: query: SELECT * FROM empty
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_union.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_union.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_union.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/tez_union.q.out Sat Aug 30 06:44:46 2014
@@ -92,12 +92,15 @@ UNION  ALL  
 select s2.key as key, s2.value as value from src s2
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ut
 POSTHOOK: query: create table ut as
 select s1.key as key, s1.value as value from src s1 join src s3 on s1.key=s3.key
 UNION  ALL  
 select s2.key as key, s2.value as value from src s2
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@ut
 PREHOOK: query: select * from ut order by key, value limit 20
 PREHOOK: type: QUERY
@@ -268,12 +271,15 @@ select count(*) as cnt from (select u1.k
 u as u1 join u as u2 on (u1.key = u2.key)) a
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ut
 POSTHOOK: query: create table ut as
 with u as (select * from src union all select * from src)
 select count(*) as cnt from (select u1.key as k1, u2.key as k2 from
 u as u1 join u as u2 on (u1.key = u2.key)) a
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@ut
 PREHOOK: query: select * from ut order by cnt limit 20
 PREHOOK: type: QUERY
@@ -401,12 +407,15 @@ src s1
 join (select * from src union all select * from src) u1 on s1.key = u1.key
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ut
 POSTHOOK: query: create table ut as
 select s1.key as skey, u1.key as ukey from
 src s1
 join (select * from src union all select * from src) u1 on s1.key = u1.key
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@ut
 PREHOOK: query: select * from ut order by skey, ukey limit 20
 PREHOOK: type: QUERY
@@ -708,6 +717,8 @@ join src s8 on (u1.key = s8.key)
 order by lkey
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ut
 POSTHOOK: query: create table ut as
 select s1.key as skey, u1.key as ukey, s8.key as lkey from 
 src s1
@@ -718,6 +729,7 @@ join src s8 on (u1.key = s8.key)
 order by lkey
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@ut
 PREHOOK: query: select * from ut order by skey, ukey, lkey limit 100
 PREHOOK: type: QUERY
@@ -951,11 +963,14 @@ select s2.key as key from src s2 join sr
 union all select s4.key from src s4 join src s5 on s4.key = s5.key
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ut
 POSTHOOK: query: create table ut as
 select s2.key as key from src s2 join src s3 on s2.key = s3.key
 union all select s4.key from src s4 join src s5 on s4.key = s5.key
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@ut
 PREHOOK: query: select * from ut order by key limit 30
 PREHOOK: type: QUERY
@@ -1190,12 +1205,15 @@ select u.key as ukey, s.key as skey from
 right outer join src s on u.key = s.key
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ut
 POSTHOOK: query: create table ut as
 select u.key as ukey, s.key as skey from
 (select * from src union all select * from src) u
 right outer join src s on u.key = s.key
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@ut
 PREHOOK: query: select * from ut order by ukey, skey limit 20
 PREHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out Sat Aug 30 06:44:46 2014
@@ -5,6 +5,8 @@ PREHOOK: query: CREATE TABLE decimal_vgb
 	FROM alltypesorc
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
+PREHOOK: Output: database:default
+PREHOOK: Output: default@decimal_vgby
 POSTHOOK: query: CREATE TABLE decimal_vgby STORED AS ORC AS 
 	SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
 	CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
@@ -12,6 +14,7 @@ POSTHOOK: query: CREATE TABLE decimal_vg
 	FROM alltypesorc
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@alltypesorc
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@decimal_vgby
 PREHOOK: query: EXPLAIN SELECT cint,
 	COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_part_project.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_part_project.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_part_project.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_part_project.q.out Sat Aug 30 06:44:46 2014
@@ -65,28 +65,28 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesorc_part
-                  Statistics: Num rows: 200 Data size: 4068 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 200 Data size: 41576 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: (cdouble + 2) (type: double)
                     outputColumnNames: _col0
-                    Statistics: Num rows: 200 Data size: 4068 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 200 Data size: 41576 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: double)
                       sort order: +
-                      Statistics: Num rows: 200 Data size: 4068 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 200 Data size: 41576 Basic stats: COMPLETE Column stats: NONE
             Execution mode: vectorized
         Reducer 2 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: double)
                 outputColumnNames: _col0
-                Statistics: Num rows: 200 Data size: 4068 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 200 Data size: 41576 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 10
-                  Statistics: Num rows: 10 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 10 Data size: 2070 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 10 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 10 Data size: 2070 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out?rev=1621416&r1=1621415&r2=1621416&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out Sat Aug 30 06:44:46 2014 differ