You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by px...@apache.org on 2017/01/19 23:09:35 UTC

[2/2] hive git commit: HIVE-15297: Hive should not split semicolon within quoted string literals (Pengcheng Xiong, reviewed by Ashutosh Chauhan) (addendum II)

HIVE-15297: Hive should not split semicolon within quoted string literals (Pengcheng Xiong, reviewed by Ashutosh Chauhan) (addendum II)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/230ed787
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/230ed787
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/230ed787

Branch: refs/heads/master
Commit: 230ed7876fbed54e6a3d5fa73f692a9861148aa5
Parents: 355d6b1
Author: Pengcheng Xiong <px...@apache.org>
Authored: Thu Jan 19 15:09:19 2017 -0800
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Thu Jan 19 15:09:19 2017 -0800

----------------------------------------------------------------------
 .../queries/clientnegative/fs_default_name1.q   |  2 +-
 .../queries/clientnegative/fs_default_name2.q   |  2 +-
 .../queries/clientpositive/analyze_tbl_part.q   |  4 +-
 ql/src/test/queries/clientpositive/input19.q    |  2 +-
 .../clientpositive/insert_overwrite_directory.q |  6 +-
 ql/src/test/queries/clientpositive/sample5.q    |  6 +-
 .../test/queries/clientpositive/serde_opencsv.q |  6 +-
 .../clientpositive/vectorized_math_funcs.q      | 16 ++--
 .../clientnegative/fs_default_name1.q.out       |  2 +-
 .../clientnegative/fs_default_name2.q.out       |  2 +-
 .../clientpositive/acid_table_stats.q.out       | 14 +--
 .../clientpositive/analyze_tbl_part.q.out       |  8 +-
 .../clientpositive/index_bitmap_auto.q.out      | 24 ++---
 .../infer_bucket_sort_reducers_power_two.q.out  | 50 +++-------
 .../test/results/clientpositive/input19.q.out   |  4 +-
 .../insert_overwrite_directory.q.out            |  8 +-
 .../clientpositive/llap/orc_llap_counters.q.out |  2 +-
 .../llap/vector_char_simple.q.out               | 18 ++--
 .../llap/vector_decimal_expressions.q.out       |  8 +-
 .../llap/vectorized_math_funcs.q.out            | 56 ++++++------
 .../results/clientpositive/perf/query33.q.out   |  6 +-
 .../results/clientpositive/perf/query60.q.out   |  6 +-
 .../results/clientpositive/perf/query83.q.out   |  6 +-
 .../test/results/clientpositive/sample5.q.out   | 18 ++--
 .../results/clientpositive/serde_opencsv.q.out  |  8 +-
 .../results/clientpositive/spark/sample5.q.out  | 18 ++--
 .../spark/vectorized_math_funcs.q.out           | 56 ++++++------
 .../clientpositive/tez/explainanalyze_4.q.out   | 12 +--
 .../clientpositive/tez/explainanalyze_5.q.out   |  8 +-
 .../tez/hybridgrace_hashjoin_1.q.out            | 96 ++++++--------------
 .../tez/hybridgrace_hashjoin_2.q.out            | 32 ++-----
 .../tez/orc_vectorization_ppd.q.out             | 80 +++++-----------
 .../clientpositive/tez/tez_union_with_udf.q.out |  6 +-
 .../clientpositive/tez/unionDistinct_2.q.out    |  8 +-
 .../clientpositive/tez/update_orig_table.q.out  |  8 +-
 .../clientpositive/tez/vectorization_div0.q.out | 24 ++---
 .../tez/vectorization_limit.q.out               | 32 ++-----
 .../clientpositive/vectorized_math_funcs.q.out  | 56 ++++++------
 38 files changed, 263 insertions(+), 457 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/queries/clientnegative/fs_default_name1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/fs_default_name1.q b/ql/src/test/queries/clientnegative/fs_default_name1.q
index f50369b..a0a406b 100644
--- a/ql/src/test/queries/clientnegative/fs_default_name1.q
+++ b/ql/src/test/queries/clientnegative/fs_default_name1.q
@@ -1,2 +1,2 @@
-set fs.default.name='http://www.example.com;
+set fs.default.name='http://www.example.com';
 show tables;

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/queries/clientnegative/fs_default_name2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/fs_default_name2.q b/ql/src/test/queries/clientnegative/fs_default_name2.q
index 485c3db..4b9efda 100644
--- a/ql/src/test/queries/clientnegative/fs_default_name2.q
+++ b/ql/src/test/queries/clientnegative/fs_default_name2.q
@@ -1,2 +1,2 @@
-set fs.default.name='http://www.example.com;
+set fs.default.name='http://www.example.com';
 SELECT * FROM src;

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/queries/clientpositive/analyze_tbl_part.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/analyze_tbl_part.q b/ql/src/test/queries/clientpositive/analyze_tbl_part.q
index bf7152b..54e816a 100644
--- a/ql/src/test/queries/clientpositive/analyze_tbl_part.q
+++ b/ql/src/test/queries/clientpositive/analyze_tbl_part.q
@@ -19,12 +19,12 @@ describe formatted src_stat_part PARTITION(partitionId=2) value;
 
 create table src_stat_string_part(key string, value string) partitioned by (partitionName string);
 
-insert overwrite table src_stat_string_part partition (partitionName="p'1")
+insert overwrite table src_stat_string_part partition (partitionName="p\'1")
 select * from src1;
 
 insert overwrite table src_stat_string_part partition (partitionName="p\"1")
 select * from src1;
 
-ANALYZE TABLE src_stat_string_part partition (partitionName="p'1") COMPUTE STATISTICS for columns key, value;
+ANALYZE TABLE src_stat_string_part partition (partitionName="p\'1") COMPUTE STATISTICS for columns key, value;
 
 ANALYZE TABLE src_stat_string_part partition (partitionName="p\"1") COMPUTE STATISTICS for columns key, value;

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/queries/clientpositive/input19.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/input19.q b/ql/src/test/queries/clientpositive/input19.q
index 3dc7fec..33ff8b7 100644
--- a/ql/src/test/queries/clientpositive/input19.q
+++ b/ql/src/test/queries/clientpositive/input19.q
@@ -1,5 +1,5 @@
 
-create table apachelog(ipaddress STRING,identd STRING,user_name STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES (  'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',  'quote.delim'= '("|\\[|\\])',  'field.delim'=' ',  'serialization.null.format'='-'  ) STORED AS TEXTFILE;
+create table apachelog(ipaddress STRING,identd STRING,user_name STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES (  'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',  'quote.delim'= '(\"|\\[|\\])',  'field.delim'=' ',  'serialization.null.format'='-'  ) STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../../data/files/apache.access.log' INTO TABLE apachelog;
 SELECT a.* FROM apachelog a;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/queries/clientpositive/insert_overwrite_directory.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/insert_overwrite_directory.q b/ql/src/test/queries/clientpositive/insert_overwrite_directory.q
index 2775314..4431f45 100644
--- a/ql/src/test/queries/clientpositive/insert_overwrite_directory.q
+++ b/ql/src/test/queries/clientpositive/insert_overwrite_directory.q
@@ -80,7 +80,7 @@ insert overwrite directory '../../data/files/array_table_4'
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
 WITH SERDEPROPERTIES (
 'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',
-'quote.delim'= '("|\\[|\\])',  'field.delim'=',',
+'quote.delim'= '(\"|\\[|\\])',  'field.delim'=',',
 'serialization.null.format'='-NA-', 'colelction.delim'='#') STORED AS TEXTFILE
 select a, null, b from array_table;
 
@@ -97,7 +97,7 @@ insert overwrite directory '../../data/files/map_table_4'
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
 WITH SERDEPROPERTIES (
 'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',
-'quote.delim'= '("|\\[|\\])',  'field.delim'=':',
+'quote.delim'= '(\"|\\[|\\])',  'field.delim'=':',
 'serialization.null.format'='-NA-', 'colelction.delim'='#', 'mapkey.delim'='%') STORED AS TEXTFILE
 select foo, null, bar from map_table;
 
@@ -138,4 +138,4 @@ dfs -rmr ../../data/files/map_table_4;
 dfs -rmr ../../data/files/rctable;
 dfs -rmr ../../data/files/rctable_out;
 dfs -rmr ../../data/files/src_table_1;
-dfs -rmr ../../data/files/src_table_2;
\ No newline at end of file
+dfs -rmr ../../data/files/src_table_2;

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/queries/clientpositive/sample5.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/sample5.q b/ql/src/test/queries/clientpositive/sample5.q
index d0aab34..ddbeac0 100644
--- a/ql/src/test/queries/clientpositive/sample5.q
+++ b/ql/src/test/queries/clientpositive/sample5.q
@@ -4,10 +4,12 @@ CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
 
 -- no input pruning, sample filter
 EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
+INSERT OVERWRITE TABLE dest1 SELECT s.* 
+-- here's another test
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s;
 
-INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
+INSERT OVERWRITE TABLE dest1 SELECT s.* 
+-- here's another test
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s;
 
 SELECT dest1.* FROM dest1 SORT BY key, value;

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/queries/clientpositive/serde_opencsv.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/serde_opencsv.q b/ql/src/test/queries/clientpositive/serde_opencsv.q
index a5ef8da..26d79a6 100644
--- a/ql/src/test/queries/clientpositive/serde_opencsv.q
+++ b/ql/src/test/queries/clientpositive/serde_opencsv.q
@@ -11,7 +11,7 @@ CREATE TABLE serde_opencsv(
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'
 WITH SERDEPROPERTIES(
   "separatorChar" = ",",
-  "quoteChar"     = "'",
+  "quoteChar"     = "\'",
   "escapeChar"    = "\\"
 ) stored as textfile;
 
@@ -27,10 +27,10 @@ CREATE TABLE serde_opencsv(
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'
 WITH SERDEPROPERTIES(
   "separatorChar" = ",",
-  "quoteChar"     = "'",
+  "quoteChar"     = "\'",
   "escapeChar"    = "\\"
 ) stored as textfile;
 
 LOAD DATA LOCAL INPATH "../../data/files/opencsv-data.txt" INTO TABLE serde_opencsv;
 
-SELECT count(*) FROM serde_opencsv;
\ No newline at end of file
+SELECT count(*) FROM serde_opencsv;

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/queries/clientpositive/vectorized_math_funcs.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/vectorized_math_funcs.q b/ql/src/test/queries/clientpositive/vectorized_math_funcs.q
index d79fcce..b01c468 100644
--- a/ql/src/test/queries/clientpositive/vectorized_math_funcs.q
+++ b/ql/src/test/queries/clientpositive/vectorized_math_funcs.q
@@ -15,9 +15,9 @@ select
   ,Ln(cdouble)  
   ,Ln(cfloat)
   ,Log10(cdouble)
-  -- Use log2 as a representative function to test all input types.
+-- Use log2 as a representative function to test all input types.
   ,Log2(cdouble)
-  -- Use 15601.0 to test zero handling, as there are no zeroes in the table
+-- Use 15601.0 to test zero handling, as there are no zeroes in the table
   ,Log2(cdouble - 15601.0)
   ,Log2(cfloat)
   ,Log2(cbigint)
@@ -47,7 +47,7 @@ select
   ,Negative(cdouble)
   ,Sign(cdouble)
   ,Sign(cbigint)
-  -- Test nesting
+-- Test nesting
   ,cos(-sin(log(cdouble)) + 3.14159)
 from alltypesorc
 -- limit output to a reasonably small number of rows
@@ -60,16 +60,16 @@ select
   ,Round(cdouble, 2)
   ,Floor(cdouble)
   ,Ceil(cdouble)
-  -- Omit rand() from runtime test because it's nondeterministic.
-  -- ,Rand()
+-- Omit rand() from runtime test because it's nondeterministic.
+-- ,Rand()
   ,Rand(98007)
   ,Exp(ln(cdouble))
   ,Ln(cdouble)  
   ,Ln(cfloat)
   ,Log10(cdouble)
-  -- Use log2 as a representative function to test all input types.
+-- Use log2 as a representative function to test all input types.
   ,Log2(cdouble)
-  -- Use 15601.0 to test zero handling, as there are no zeroes in the table
+-- Use 15601.0 to test zero handling, as there are no zeroes in the table
   ,Log2(cdouble - 15601.0)
   ,Log2(cfloat)
   ,Log2(cbigint)
@@ -99,7 +99,7 @@ select
   ,Negative(cdouble)
   ,Sign(cdouble)
   ,Sign(cbigint)
-  -- Test nesting
+-- Test nesting
   ,cos(-sin(log(cdouble)) + 3.14159)
 from alltypesorc
 -- limit output to a reasonably small number of rows

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientnegative/fs_default_name1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/fs_default_name1.q.out b/ql/src/test/results/clientnegative/fs_default_name1.q.out
index 97477ee..f5cdbd1 100644
--- a/ql/src/test/results/clientnegative/fs_default_name1.q.out
+++ b/ql/src/test/results/clientnegative/fs_default_name1.q.out
@@ -1 +1 @@
-FAILED: IllegalArgumentException Illegal character in scheme name at index 0: 'http://www.example.com
+FAILED: IllegalArgumentException Illegal character in scheme name at index 0: 'http://www.example.com'

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientnegative/fs_default_name2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/fs_default_name2.q.out b/ql/src/test/results/clientnegative/fs_default_name2.q.out
index 97477ee..f5cdbd1 100644
--- a/ql/src/test/results/clientnegative/fs_default_name2.q.out
+++ b/ql/src/test/results/clientnegative/fs_default_name2.q.out
@@ -1 +1 @@
-FAILED: IllegalArgumentException Illegal character in scheme name at index 0: 'http://www.example.com
+FAILED: IllegalArgumentException Illegal character in scheme name at index 0: 'http://www.example.com'

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/acid_table_stats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/acid_table_stats.q.out b/ql/src/test/results/clientpositive/acid_table_stats.q.out
index 08cd00a..5d8a000 100644
--- a/ql/src/test/results/clientpositive/acid_table_stats.q.out
+++ b/ql/src/test/results/clientpositive/acid_table_stats.q.out
@@ -92,7 +92,7 @@ Partition Parameters:
 	numFiles            	2                   
 	numRows             	0                   
 	rawDataSize         	0                   
-	totalSize           	3852                
+	totalSize           	3837                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -130,9 +130,9 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: acid
-            Statistics: Num rows: 1 Data size: 3852 Basic stats: PARTIAL Column stats: NONE
+            Statistics: Num rows: 1 Data size: 3837 Basic stats: PARTIAL Column stats: NONE
             Select Operator
-              Statistics: Num rows: 1 Data size: 3852 Basic stats: PARTIAL Column stats: NONE
+              Statistics: Num rows: 1 Data size: 3837 Basic stats: PARTIAL Column stats: NONE
               Group By Operator
                 aggregations: count()
                 mode: hash
@@ -209,7 +209,7 @@ Partition Parameters:
 	numFiles            	2                   
 	numRows             	1000                
 	rawDataSize         	208000              
-	totalSize           	3852                
+	totalSize           	3837                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -258,7 +258,7 @@ Partition Parameters:
 	numFiles            	2                   
 	numRows             	1000                
 	rawDataSize         	208000              
-	totalSize           	3852                
+	totalSize           	3837                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -385,7 +385,7 @@ Partition Parameters:
 	numFiles            	4                   
 	numRows             	1000                
 	rawDataSize         	208000              
-	totalSize           	7704                
+	totalSize           	7689                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -434,7 +434,7 @@ Partition Parameters:
 	numFiles            	4                   
 	numRows             	2000                
 	rawDataSize         	416000              
-	totalSize           	7704                
+	totalSize           	7689                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/analyze_tbl_part.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/analyze_tbl_part.q.out b/ql/src/test/results/clientpositive/analyze_tbl_part.q.out
index ee65b0a..ed90b6f 100644
--- a/ql/src/test/results/clientpositive/analyze_tbl_part.q.out
+++ b/ql/src/test/results/clientpositive/analyze_tbl_part.q.out
@@ -89,12 +89,12 @@ POSTHOOK: query: create table src_stat_string_part(key string, value string) par
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@src_stat_string_part
-PREHOOK: query: insert overwrite table src_stat_string_part partition (partitionName="p'1")
+PREHOOK: query: insert overwrite table src_stat_string_part partition (partitionName="p\'1")
 select * from src1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 PREHOOK: Output: default@src_stat_string_part@partitionname=p%271
-POSTHOOK: query: insert overwrite table src_stat_string_part partition (partitionName="p'1")
+POSTHOOK: query: insert overwrite table src_stat_string_part partition (partitionName="p\'1")
 select * from src1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
@@ -113,12 +113,12 @@ POSTHOOK: Input: default@src1
 POSTHOOK: Output: default@src_stat_string_part@partitionname=p%221
 POSTHOOK: Lineage: src_stat_string_part PARTITION(partitionname=p"1).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: src_stat_string_part PARTITION(partitionname=p"1).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: ANALYZE TABLE src_stat_string_part partition (partitionName="p'1") COMPUTE STATISTICS for columns key, value
+PREHOOK: query: ANALYZE TABLE src_stat_string_part partition (partitionName="p\'1") COMPUTE STATISTICS for columns key, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_stat_string_part
 PREHOOK: Input: default@src_stat_string_part@partitionname=p%271
 #### A masked pattern was here ####
-POSTHOOK: query: ANALYZE TABLE src_stat_string_part partition (partitionName="p'1") COMPUTE STATISTICS for columns key, value
+POSTHOOK: query: ANALYZE TABLE src_stat_string_part partition (partitionName="p\'1") COMPUTE STATISTICS for columns key, value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_stat_string_part
 POSTHOOK: Input: default@src_stat_string_part@partitionname=p%271

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/index_bitmap_auto.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/index_bitmap_auto.q.out b/ql/src/test/results/clientpositive/index_bitmap_auto.q.out
index 7546dd3..295e687 100644
--- a/ql/src/test/results/clientpositive/index_bitmap_auto.q.out
+++ b/ql/src/test/results/clientpositive/index_bitmap_auto.q.out
@@ -1,28 +1,18 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
-
--- try the query without indexing, with manual indexing, and with automatic indexing
--- without indexing
-SELECT key, value FROM src WHERE key=0 AND value = "val_0"
+PREHOOK: query: SELECT key, value FROM src WHERE key=0 AND value = "val_0"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: -- SORT_QUERY_RESULTS
-
--- try the query without indexing, with manual indexing, and with automatic indexing
--- without indexing
-SELECT key, value FROM src WHERE key=0 AND value = "val_0"
+POSTHOOK: query: SELECT key, value FROM src WHERE key=0 AND value = "val_0"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 0	val_0
 0	val_0
 0	val_0
-PREHOOK: query: -- create indices
-EXPLAIN
+PREHOOK: query: EXPLAIN
 CREATE INDEX src1_index ON TABLE src(key) as 'BITMAP' WITH DEFERRED REBUILD
 PREHOOK: type: CREATEINDEX
-POSTHOOK: query: -- create indices
-EXPLAIN
+POSTHOOK: query: EXPLAIN
 CREATE INDEX src1_index ON TABLE src(key) as 'BITMAP' WITH DEFERRED REBUILD
 POSTHOOK: type: CREATEINDEX
 STAGE DEPENDENCIES:
@@ -97,8 +87,7 @@ POSTHOOK: query: SELECT * FROM default__src_src2_index__
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@default__src_src2_index__
 #### A masked pattern was here ####
-PREHOOK: query: -- manual indexing
-EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT a.bucketname AS `_bucketname`, COLLECT_SET(a.offset) as `_offsets`
 FROM (SELECT `_bucketname` AS bucketname, `_offset` AS offset, `_bitmaps` AS bitmaps FROM default__src_src1_index__
        WHERE key = 0) a
@@ -109,8 +98,7 @@ FROM (SELECT `_bucketname` AS bucketname, `_offset` AS offset, `_bitmaps` AS bit
    a.bucketname = b.bucketname AND a.offset = b.offset WHERE NOT
 EWAH_BITMAP_EMPTY(EWAH_BITMAP_AND(a.bitmaps, b.bitmaps)) GROUP BY a.bucketname
 PREHOOK: type: QUERY
-POSTHOOK: query: -- manual indexing
-EXPLAIN
+POSTHOOK: query: EXPLAIN
 SELECT a.bucketname AS `_bucketname`, COLLECT_SET(a.offset) as `_offsets`
 FROM (SELECT `_bucketname` AS bucketname, `_offset` AS offset, `_bitmaps` AS bitmaps FROM default__src_src1_index__
        WHERE key = 0) a

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/infer_bucket_sort_reducers_power_two.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/infer_bucket_sort_reducers_power_two.q.out b/ql/src/test/results/clientpositive/infer_bucket_sort_reducers_power_two.q.out
index 5fc5e91..3fddc65 100644
--- a/ql/src/test/results/clientpositive/infer_bucket_sort_reducers_power_two.q.out
+++ b/ql/src/test/results/clientpositive/infer_bucket_sort_reducers_power_two.q.out
@@ -1,27 +1,17 @@
-PREHOOK: query: -- This tests inferring how data is bucketed/sorted from the operators in the reducer
--- and populating that information in partitions' metadata, it also verifies that the
--- number of reducers chosen will be a power of two
-
-CREATE TABLE test_table (key STRING, value STRING) PARTITIONED BY (part STRING)
+PREHOOK: query: CREATE TABLE test_table (key STRING, value STRING) PARTITIONED BY (part STRING)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@test_table
-POSTHOOK: query: -- This tests inferring how data is bucketed/sorted from the operators in the reducer
--- and populating that information in partitions' metadata, it also verifies that the
--- number of reducers chosen will be a power of two
-
-CREATE TABLE test_table (key STRING, value STRING) PARTITIONED BY (part STRING)
+POSTHOOK: query: CREATE TABLE test_table (key STRING, value STRING) PARTITIONED BY (part STRING)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@test_table
-PREHOOK: query: -- Test group by, should be bucketed and sorted by group by key
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
+PREHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
 SELECT key, count(*) FROM src GROUP BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_table@part=1
-POSTHOOK: query: -- Test group by, should be bucketed and sorted by group by key
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
+POSTHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
 SELECT key, count(*) FROM src GROUP BY key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -67,14 +57,12 @@ Bucket Columns:     	[key]
 Sort Columns:       	[Order(col:key, order:1)]	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
-PREHOOK: query: -- Test join, should be bucketed and sorted by join key
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
+PREHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
 SELECT a.key, a.value FROM src a JOIN src b ON a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_table@part=1
-POSTHOOK: query: -- Test join, should be bucketed and sorted by join key
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
+POSTHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
 SELECT a.key, a.value FROM src a JOIN src b ON a.key = b.key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -120,14 +108,12 @@ Bucket Columns:     	[key]
 Sort Columns:       	[Order(col:key, order:1)]	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
-PREHOOK: query: -- Test join with two keys, should be bucketed and sorted by join keys
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
+PREHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
 SELECT a.key, a.value FROM src a JOIN src b ON a.key = b.key AND a.value = b.value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_table@part=1
-POSTHOOK: query: -- Test join with two keys, should be bucketed and sorted by join keys
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
+POSTHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
 SELECT a.key, a.value FROM src a JOIN src b ON a.key = b.key AND a.value = b.value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -173,14 +159,12 @@ Bucket Columns:     	[key, value]
 Sort Columns:       	[Order(col:key, order:1), Order(col:value, order:1)]	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
-PREHOOK: query: -- Test join on three tables on same key, should be bucketed and sorted by join key
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
+PREHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
 SELECT a.key, c.value FROM src a JOIN src b ON (a.key = b.key) JOIN src c ON (b.key = c.key)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_table@part=1
-POSTHOOK: query: -- Test join on three tables on same key, should be bucketed and sorted by join key
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
+POSTHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
 SELECT a.key, c.value FROM src a JOIN src b ON (a.key = b.key) JOIN src c ON (b.key = c.key)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -226,14 +210,12 @@ Bucket Columns:     	[key]
 Sort Columns:       	[Order(col:key, order:1)]	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
-PREHOOK: query: -- Test join on three tables on different keys, should be bucketed and sorted by latter key
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
+PREHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
 SELECT a.key, c.value FROM src a JOIN src b ON (a.key = b.key) JOIN src c ON (b.value = c.value)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_table@part=1
-POSTHOOK: query: -- Test join on three tables on different keys, should be bucketed and sorted by latter key
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
+POSTHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1') 
 SELECT a.key, c.value FROM src a JOIN src b ON (a.key = b.key) JOIN src c ON (b.value = c.value)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -279,16 +261,12 @@ Bucket Columns:     	[value]
 Sort Columns:       	[Order(col:value, order:1)]	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
-PREHOOK: query: -- Test group by in subquery with another group by outside, should be bucketed and sorted by the
--- key of the outer group by
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1')
+PREHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1')
 SELECT count(1), value FROM (SELECT key, count(1) as value FROM src group by key) a group by value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_table@part=1
-POSTHOOK: query: -- Test group by in subquery with another group by outside, should be bucketed and sorted by the
--- key of the outer group by
-INSERT OVERWRITE TABLE test_table PARTITION (part = '1')
+POSTHOOK: query: INSERT OVERWRITE TABLE test_table PARTITION (part = '1')
 SELECT count(1), value FROM (SELECT key, count(1) as value FROM src group by key) a group by value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/input19.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input19.q.out b/ql/src/test/results/clientpositive/input19.q.out
index f8baf10..754c891 100644
--- a/ql/src/test/results/clientpositive/input19.q.out
+++ b/ql/src/test/results/clientpositive/input19.q.out
@@ -1,8 +1,8 @@
-PREHOOK: query: create table apachelog(ipaddress STRING,identd STRING,user_name STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES (  'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',  'quote.delim'= '("|\\[|\\])',  'field.delim'=' ',  'serialization.null.format'='-'  ) STORED AS TEXTFILE
+PREHOOK: query: create table apachelog(ipaddress STRING,identd STRING,user_name STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES (  'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',  'quote.delim'= '(\"|\\[|\\])',  'field.delim'=' ',  'serialization.null.format'='-'  ) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@apachelog
-POSTHOOK: query: create table apachelog(ipaddress STRING,identd STRING,user_name STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES (  'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',  'quote.delim'= '("|\\[|\\])',  'field.delim'=' ',  'serialization.null.format'='-'  ) STORED AS TEXTFILE
+POSTHOOK: query: create table apachelog(ipaddress STRING,identd STRING,user_name STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES (  'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',  'quote.delim'= '(\"|\\[|\\])',  'field.delim'=' ',  'serialization.null.format'='-'  ) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@apachelog

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/insert_overwrite_directory.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/insert_overwrite_directory.q.out b/ql/src/test/results/clientpositive/insert_overwrite_directory.q.out
index a459839..0519371 100644
--- a/ql/src/test/results/clientpositive/insert_overwrite_directory.q.out
+++ b/ql/src/test/results/clientpositive/insert_overwrite_directory.q.out
@@ -1190,7 +1190,7 @@ PREHOOK: query: insert overwrite directory '../../data/files/array_table_4'
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
 WITH SERDEPROPERTIES (
 'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',
-'quote.delim'= '("|\\[|\\])',  'field.delim'=',',
+'quote.delim'= '(\"|\\[|\\])',  'field.delim'=',',
 'serialization.null.format'='-NA-', 'colelction.delim'='#') STORED AS TEXTFILE
 select a, null, b from array_table
 PREHOOK: type: QUERY
@@ -1200,7 +1200,7 @@ POSTHOOK: query: insert overwrite directory '../../data/files/array_table_4'
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
 WITH SERDEPROPERTIES (
 'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',
-'quote.delim'= '("|\\[|\\])',  'field.delim'=',',
+'quote.delim'= '(\"|\\[|\\])',  'field.delim'=',',
 'serialization.null.format'='-NA-', 'colelction.delim'='#') STORED AS TEXTFILE
 select a, null, b from array_table
 POSTHOOK: type: QUERY
@@ -1228,7 +1228,7 @@ PREHOOK: query: insert overwrite directory '../../data/files/map_table_4'
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
 WITH SERDEPROPERTIES (
 'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',
-'quote.delim'= '("|\\[|\\])',  'field.delim'=':',
+'quote.delim'= '(\"|\\[|\\])',  'field.delim'=':',
 'serialization.null.format'='-NA-', 'colelction.delim'='#', 'mapkey.delim'='%') STORED AS TEXTFILE
 select foo, null, bar from map_table
 PREHOOK: type: QUERY
@@ -1238,7 +1238,7 @@ POSTHOOK: query: insert overwrite directory '../../data/files/map_table_4'
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
 WITH SERDEPROPERTIES (
 'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',
-'quote.delim'= '("|\\[|\\])',  'field.delim'=':',
+'quote.delim'= '(\"|\\[|\\])',  'field.delim'=':',
 'serialization.null.format'='-NA-', 'colelction.delim'='#', 'mapkey.delim'='%') STORED AS TEXTFILE
 select foo, null, bar from map_table
 POSTHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out b/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out
index ba18fa6..e1c1dd8 100644
--- a/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out
@@ -1098,7 +1098,7 @@ Stage-1 HIVE COUNTERS:
    DESERIALIZE_ERRORS: 0
    RECORDS_IN_Map_1: 0
    RECORDS_OUT_0: 1
-   RECORDS_OUT_INTERMEDIATE_Map_1: 1
+   RECORDS_OUT_INTERMEDIATE_Map_1: 0
 Stage-1 LLAP IO COUNTERS:
    METADATA_CACHE_HIT: 2
    SELECTED_ROWGROUPS: 0

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/llap/vector_char_simple.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_char_simple.q.out b/ql/src/test/results/clientpositive/llap/vector_char_simple.q.out
index 063170d..9e4b7f7 100644
--- a/ql/src/test/results/clientpositive/llap/vector_char_simple.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_char_simple.q.out
@@ -108,16 +108,14 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: -- should match the query from src
-select key, value
+PREHOOK: query: select key, value
 from char_2
 order by key asc
 limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@char_2
 #### A masked pattern was here ####
-POSTHOOK: query: -- should match the query from src
-select key, value
+POSTHOOK: query: select key, value
 from char_2
 order by key asc
 limit 5
@@ -211,16 +209,14 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: -- should match the query from src
-select key, value
+PREHOOK: query: select key, value
 from char_2
 order by key desc
 limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@char_2
 #### A masked pattern was here ####
-POSTHOOK: query: -- should match the query from src
-select key, value
+POSTHOOK: query: select key, value
 from char_2
 order by key desc
 limit 5
@@ -240,15 +236,13 @@ POSTHOOK: query: drop table char_2
 POSTHOOK: type: DROPTABLE
 POSTHOOK: Input: default@char_2
 POSTHOOK: Output: default@char_2
-PREHOOK: query: -- Implicit conversion.  Occurs in reduce-side under Tez.
-create table char_3 (
+PREHOOK: query: create table char_3 (
   field char(12)
 ) stored as orc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@char_3
-POSTHOOK: query: -- Implicit conversion.  Occurs in reduce-side under Tez.
-create table char_3 (
+POSTHOOK: query: create table char_3 (
   field char(12)
 ) stored as orc
 POSTHOOK: type: CREATETABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
index b37e30b..67b58c7 100644
--- a/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
@@ -1,13 +1,9 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE decimal_test STORED AS ORC AS SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2 FROM alltypesorc
+PREHOOK: query: CREATE TABLE decimal_test STORED AS ORC AS SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2 FROM alltypesorc
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@decimal_test
-POSTHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE decimal_test STORED AS ORC AS SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2 FROM alltypesorc
+POSTHOOK: query: CREATE TABLE decimal_test STORED AS ORC AS SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2 FROM alltypesorc
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@alltypesorc
 POSTHOOK: Output: database:default

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out b/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out
index 0a81f62..da862b9 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out
@@ -1,6 +1,4 @@
-PREHOOK: query: -- Test math functions in vectorized mode to verify they run correctly end-to-end.
-
-explain 
+PREHOOK: query: explain 
 select
    cdouble
   ,Round(cdouble, 2)
@@ -12,9 +10,9 @@ select
   ,Ln(cdouble)  
   ,Ln(cfloat)
   ,Log10(cdouble)
-  -- Use log2 as a representative function to test all input types.
+
   ,Log2(cdouble)
-  -- Use 15601.0 to test zero handling, as there are no zeroes in the table
+
   ,Log2(cdouble - 15601.0)
   ,Log2(cfloat)
   ,Log2(cbigint)
@@ -44,17 +42,15 @@ select
   ,Negative(cdouble)
   ,Sign(cdouble)
   ,Sign(cbigint)
-  -- Test nesting
+
   ,cos(-sin(log(cdouble)) + 3.14159)
 from alltypesorc
--- limit output to a reasonably small number of rows
+
 where cbigint % 500 = 0
--- test use of a math function in the WHERE clause
+
 and sin(cfloat) >= -1.0
 PREHOOK: type: QUERY
-POSTHOOK: query: -- Test math functions in vectorized mode to verify they run correctly end-to-end.
-
-explain 
+POSTHOOK: query: explain 
 select
    cdouble
   ,Round(cdouble, 2)
@@ -66,9 +62,9 @@ select
   ,Ln(cdouble)  
   ,Ln(cfloat)
   ,Log10(cdouble)
-  -- Use log2 as a representative function to test all input types.
+
   ,Log2(cdouble)
-  -- Use 15601.0 to test zero handling, as there are no zeroes in the table
+
   ,Log2(cdouble - 15601.0)
   ,Log2(cfloat)
   ,Log2(cbigint)
@@ -98,12 +94,12 @@ select
   ,Negative(cdouble)
   ,Sign(cdouble)
   ,Sign(cbigint)
-  -- Test nesting
+
   ,cos(-sin(log(cdouble)) + 3.14159)
 from alltypesorc
--- limit output to a reasonably small number of rows
+
 where cbigint % 500 = 0
--- test use of a math function in the WHERE clause
+
 and sin(cfloat) >= -1.0
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -128,16 +124,16 @@ PREHOOK: query: select
   ,Round(cdouble, 2)
   ,Floor(cdouble)
   ,Ceil(cdouble)
-  -- Omit rand() from runtime test because it's nondeterministic.
-  -- ,Rand()
+
+
   ,Rand(98007)
   ,Exp(ln(cdouble))
   ,Ln(cdouble)  
   ,Ln(cfloat)
   ,Log10(cdouble)
-  -- Use log2 as a representative function to test all input types.
+
   ,Log2(cdouble)
-  -- Use 15601.0 to test zero handling, as there are no zeroes in the table
+
   ,Log2(cdouble - 15601.0)
   ,Log2(cfloat)
   ,Log2(cbigint)
@@ -167,12 +163,12 @@ PREHOOK: query: select
   ,Negative(cdouble)
   ,Sign(cdouble)
   ,Sign(cbigint)
-  -- Test nesting
+
   ,cos(-sin(log(cdouble)) + 3.14159)
 from alltypesorc
--- limit output to a reasonably small number of rows
+
 where cbigint % 500 = 0
--- test use of a math function in the WHERE clause
+
 and sin(cfloat) >= -1.0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alltypesorc
@@ -182,16 +178,16 @@ POSTHOOK: query: select
   ,Round(cdouble, 2)
   ,Floor(cdouble)
   ,Ceil(cdouble)
-  -- Omit rand() from runtime test because it's nondeterministic.
-  -- ,Rand()
+
+
   ,Rand(98007)
   ,Exp(ln(cdouble))
   ,Ln(cdouble)  
   ,Ln(cfloat)
   ,Log10(cdouble)
-  -- Use log2 as a representative function to test all input types.
+
   ,Log2(cdouble)
-  -- Use 15601.0 to test zero handling, as there are no zeroes in the table
+
   ,Log2(cdouble - 15601.0)
   ,Log2(cfloat)
   ,Log2(cbigint)
@@ -221,12 +217,12 @@ POSTHOOK: query: select
   ,Negative(cdouble)
   ,Sign(cdouble)
   ,Sign(cbigint)
-  -- Test nesting
+
   ,cos(-sin(log(cdouble)) + 3.14159)
 from alltypesorc
--- limit output to a reasonably small number of rows
+
 where cbigint % 500 = 0
--- test use of a math function in the WHERE clause
+
 and sin(cfloat) >= -1.0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/perf/query33.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query33.q.out b/ql/src/test/results/clientpositive/perf/query33.q.out
index e42c685..89660e3 100644
--- a/ql/src/test/results/clientpositive/perf/query33.q.out
+++ b/ql/src/test/results/clientpositive/perf/query33.q.out
@@ -1,5 +1,4 @@
-PREHOOK: query: -- start query 1 in stream 0 using template query33.tpl and seed 1930872976
-explain with ss as (
+PREHOOK: query: explain with ss as (
  select
           i_manufact_id,sum(ss_ext_sales_price) total_sales
  from
@@ -72,8 +71,7 @@ where i_category in ('Books'))
  order by total_sales
 limit 100
 PREHOOK: type: QUERY
-POSTHOOK: query: -- start query 1 in stream 0 using template query33.tpl and seed 1930872976
-explain with ss as (
+POSTHOOK: query: explain with ss as (
  select
           i_manufact_id,sum(ss_ext_sales_price) total_sales
  from

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/perf/query60.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query60.q.out b/ql/src/test/results/clientpositive/perf/query60.q.out
index a7c8d03..3b68c44 100644
--- a/ql/src/test/results/clientpositive/perf/query60.q.out
+++ b/ql/src/test/results/clientpositive/perf/query60.q.out
@@ -1,5 +1,4 @@
-PREHOOK: query: -- start query 1 in stream 0 using template query60.tpl and seed 1930872976
-explain with ss as (
+PREHOOK: query: explain with ss as (
  select
           i_item_id,sum(ss_ext_sales_price) total_sales
  from
@@ -75,8 +74,7 @@ where i_category in ('Children'))
       ,total_sales
  limit 100
 PREHOOK: type: QUERY
-POSTHOOK: query: -- start query 1 in stream 0 using template query60.tpl and seed 1930872976
-explain with ss as (
+POSTHOOK: query: explain with ss as (
  select
           i_item_id,sum(ss_ext_sales_price) total_sales
  from

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/perf/query83.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query83.q.out b/ql/src/test/results/clientpositive/perf/query83.q.out
index 2789643..004dc41 100644
--- a/ql/src/test/results/clientpositive/perf/query83.q.out
+++ b/ql/src/test/results/clientpositive/perf/query83.q.out
@@ -1,5 +1,4 @@
-PREHOOK: query: -- start query 1 in stream 0 using template query83.tpl and seed 1930872976
-explain with sr_items as
+PREHOOK: query: explain with sr_items as
  (select i_item_id item_id,
         sum(sr_return_quantity) sr_item_qty
  from store_returns,
@@ -64,8 +63,7 @@ explain with sr_items as
          ,sr_item_qty
  limit 100
 PREHOOK: type: QUERY
-POSTHOOK: query: -- start query 1 in stream 0 using template query83.tpl and seed 1930872976
-explain with sr_items as
+POSTHOOK: query: explain with sr_items as
  (select i_item_id item_id,
         sum(sr_return_quantity) sr_item_qty
  from store_returns,

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/sample5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample5.q.out b/ql/src/test/results/clientpositive/sample5.q.out
index 147a567..a2b4f9f 100644
--- a/ql/src/test/results/clientpositive/sample5.q.out
+++ b/ql/src/test/results/clientpositive/sample5.q.out
@@ -6,18 +6,14 @@ POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dest1
-PREHOOK: query: -- SORT_QUERY_RESULTS
+PREHOOK: query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.* 
 
--- no input pruning, sample filter
-EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s
 PREHOOK: type: QUERY
-POSTHOOK: query: -- SORT_QUERY_RESULTS
+POSTHOOK: query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.* 
 
--- no input pruning, sample filter
-EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -343,12 +339,14 @@ STAGE PLANS:
           hdfs directory: true
 #### A masked pattern was here ####
 
-PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
+PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT s.* 
+
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcbucket
 PREHOOK: Output: default@dest1
-POSTHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
+POSTHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT s.* 
+
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcbucket

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/serde_opencsv.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/serde_opencsv.q.out b/ql/src/test/results/clientpositive/serde_opencsv.q.out
index 230c475..1ffc229 100644
--- a/ql/src/test/results/clientpositive/serde_opencsv.q.out
+++ b/ql/src/test/results/clientpositive/serde_opencsv.q.out
@@ -11,7 +11,7 @@ CREATE TABLE serde_opencsv(
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'
 WITH SERDEPROPERTIES(
   "separatorChar" = ",",
-  "quoteChar"     = "'",
+  "quoteChar"     = "\'",
   "escapeChar"    = "\\"
 ) stored as textfile
 PREHOOK: type: CREATETABLE
@@ -28,7 +28,7 @@ CREATE TABLE serde_opencsv(
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'
 WITH SERDEPROPERTIES(
   "separatorChar" = ",",
-  "quoteChar"     = "'",
+  "quoteChar"     = "\'",
   "escapeChar"    = "\\"
 ) stored as textfile
 POSTHOOK: type: CREATETABLE
@@ -61,7 +61,7 @@ PREHOOK: query: CREATE TABLE serde_opencsv(
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'
 WITH SERDEPROPERTIES(
   "separatorChar" = ",",
-  "quoteChar"     = "'",
+  "quoteChar"     = "\'",
   "escapeChar"    = "\\"
 ) stored as textfile
 PREHOOK: type: CREATETABLE
@@ -79,7 +79,7 @@ POSTHOOK: query: CREATE TABLE serde_opencsv(
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'
 WITH SERDEPROPERTIES(
   "separatorChar" = ",",
-  "quoteChar"     = "'",
+  "quoteChar"     = "\'",
   "escapeChar"    = "\\"
 ) stored as textfile
 POSTHOOK: type: CREATETABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/spark/sample5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample5.q.out b/ql/src/test/results/clientpositive/spark/sample5.q.out
index 77477ba..07cd6b8 100644
--- a/ql/src/test/results/clientpositive/spark/sample5.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample5.q.out
@@ -6,18 +6,14 @@ POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dest1
-PREHOOK: query: -- SORT_QUERY_RESULTS
+PREHOOK: query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.* 
 
--- no input pruning, sample filter
-EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s
 PREHOOK: type: QUERY
-POSTHOOK: query: -- SORT_QUERY_RESULTS
+POSTHOOK: query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest1 SELECT s.* 
 
--- no input pruning, sample filter
-EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -158,12 +154,14 @@ STAGE PLANS:
     Stats-Aggr Operator
 #### A masked pattern was here ####
 
-PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
+PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT s.* 
+
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcbucket
 PREHOOK: Output: default@dest1
-POSTHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test
+POSTHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT s.* 
+
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcbucket

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/spark/vectorized_math_funcs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vectorized_math_funcs.q.out b/ql/src/test/results/clientpositive/spark/vectorized_math_funcs.q.out
index 0a81f62..da862b9 100644
--- a/ql/src/test/results/clientpositive/spark/vectorized_math_funcs.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorized_math_funcs.q.out
@@ -1,6 +1,4 @@
-PREHOOK: query: -- Test math functions in vectorized mode to verify they run correctly end-to-end.
-
-explain 
+PREHOOK: query: explain 
 select
    cdouble
   ,Round(cdouble, 2)
@@ -12,9 +10,9 @@ select
   ,Ln(cdouble)  
   ,Ln(cfloat)
   ,Log10(cdouble)
-  -- Use log2 as a representative function to test all input types.
+
   ,Log2(cdouble)
-  -- Use 15601.0 to test zero handling, as there are no zeroes in the table
+
   ,Log2(cdouble - 15601.0)
   ,Log2(cfloat)
   ,Log2(cbigint)
@@ -44,17 +42,15 @@ select
   ,Negative(cdouble)
   ,Sign(cdouble)
   ,Sign(cbigint)
-  -- Test nesting
+
   ,cos(-sin(log(cdouble)) + 3.14159)
 from alltypesorc
--- limit output to a reasonably small number of rows
+
 where cbigint % 500 = 0
--- test use of a math function in the WHERE clause
+
 and sin(cfloat) >= -1.0
 PREHOOK: type: QUERY
-POSTHOOK: query: -- Test math functions in vectorized mode to verify they run correctly end-to-end.
-
-explain 
+POSTHOOK: query: explain 
 select
    cdouble
   ,Round(cdouble, 2)
@@ -66,9 +62,9 @@ select
   ,Ln(cdouble)  
   ,Ln(cfloat)
   ,Log10(cdouble)
-  -- Use log2 as a representative function to test all input types.
+
   ,Log2(cdouble)
-  -- Use 15601.0 to test zero handling, as there are no zeroes in the table
+
   ,Log2(cdouble - 15601.0)
   ,Log2(cfloat)
   ,Log2(cbigint)
@@ -98,12 +94,12 @@ select
   ,Negative(cdouble)
   ,Sign(cdouble)
   ,Sign(cbigint)
-  -- Test nesting
+
   ,cos(-sin(log(cdouble)) + 3.14159)
 from alltypesorc
--- limit output to a reasonably small number of rows
+
 where cbigint % 500 = 0
--- test use of a math function in the WHERE clause
+
 and sin(cfloat) >= -1.0
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -128,16 +124,16 @@ PREHOOK: query: select
   ,Round(cdouble, 2)
   ,Floor(cdouble)
   ,Ceil(cdouble)
-  -- Omit rand() from runtime test because it's nondeterministic.
-  -- ,Rand()
+
+
   ,Rand(98007)
   ,Exp(ln(cdouble))
   ,Ln(cdouble)  
   ,Ln(cfloat)
   ,Log10(cdouble)
-  -- Use log2 as a representative function to test all input types.
+
   ,Log2(cdouble)
-  -- Use 15601.0 to test zero handling, as there are no zeroes in the table
+
   ,Log2(cdouble - 15601.0)
   ,Log2(cfloat)
   ,Log2(cbigint)
@@ -167,12 +163,12 @@ PREHOOK: query: select
   ,Negative(cdouble)
   ,Sign(cdouble)
   ,Sign(cbigint)
-  -- Test nesting
+
   ,cos(-sin(log(cdouble)) + 3.14159)
 from alltypesorc
--- limit output to a reasonably small number of rows
+
 where cbigint % 500 = 0
--- test use of a math function in the WHERE clause
+
 and sin(cfloat) >= -1.0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alltypesorc
@@ -182,16 +178,16 @@ POSTHOOK: query: select
   ,Round(cdouble, 2)
   ,Floor(cdouble)
   ,Ceil(cdouble)
-  -- Omit rand() from runtime test because it's nondeterministic.
-  -- ,Rand()
+
+
   ,Rand(98007)
   ,Exp(ln(cdouble))
   ,Ln(cdouble)  
   ,Ln(cfloat)
   ,Log10(cdouble)
-  -- Use log2 as a representative function to test all input types.
+
   ,Log2(cdouble)
-  -- Use 15601.0 to test zero handling, as there are no zeroes in the table
+
   ,Log2(cdouble - 15601.0)
   ,Log2(cfloat)
   ,Log2(cbigint)
@@ -221,12 +217,12 @@ POSTHOOK: query: select
   ,Negative(cdouble)
   ,Sign(cdouble)
   ,Sign(cbigint)
-  -- Test nesting
+
   ,cos(-sin(log(cdouble)) + 3.14159)
 from alltypesorc
--- limit output to a reasonably small number of rows
+
 where cbigint % 500 = 0
--- test use of a math function in the WHERE clause
+
 and sin(cfloat) >= -1.0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/tez/explainanalyze_4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainanalyze_4.q.out b/ql/src/test/results/clientpositive/tez/explainanalyze_4.q.out
index 27c1bbe..21832c3 100644
--- a/ql/src/test/results/clientpositive/tez/explainanalyze_4.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainanalyze_4.q.out
@@ -16,8 +16,7 @@ order by a.cint
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-PREHOOK: query: -- First try with regular mergejoin
-explain analyze
+PREHOOK: query: explain analyze
 select
   *
 from alltypesorc a join alltypesorc b on a.cint = b.cint
@@ -25,8 +24,7 @@ where
   a.cint between 1000000 and 3000000 and b.cbigint is not null
 order by a.cint
 PREHOOK: type: QUERY
-POSTHOOK: query: -- First try with regular mergejoin
-explain analyze
+POSTHOOK: query: explain analyze
 select
   *
 from alltypesorc a join alltypesorc b on a.cint = b.cint
@@ -307,8 +305,7 @@ order by a.cint
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-PREHOOK: query: -- Try with dynamically partitioned hashjoin
-explain analyze
+PREHOOK: query: explain analyze
 select
   *
 from alltypesorc a join alltypesorc b on a.cint = b.cint
@@ -316,8 +313,7 @@ where
   a.cint between 1000000 and 3000000 and b.cbigint is not null
 order by a.cint
 PREHOOK: type: QUERY
-POSTHOOK: query: -- Try with dynamically partitioned hashjoin
-explain analyze
+POSTHOOK: query: explain analyze
 select
   *
 from alltypesorc a join alltypesorc b on a.cint = b.cint

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/tez/explainanalyze_5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/explainanalyze_5.q.out b/ql/src/test/results/clientpositive/tez/explainanalyze_5.q.out
index b9d69fd..85ff921 100644
--- a/ql/src/test/results/clientpositive/tez/explainanalyze_5.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainanalyze_5.q.out
@@ -216,17 +216,13 @@ Bucket Columns:     	[]
 Sort Columns:       	[]                  	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
-PREHOOK: query: -- SORT_QUERY_RESULTS
-
-create table acid_uami(i int,
+PREHOOK: query: create table acid_uami(i int,
                  de decimal(5,2),
                  vc varchar(128)) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@acid_uami
-POSTHOOK: query: -- SORT_QUERY_RESULTS
-
-create table acid_uami(i int,
+POSTHOOK: query: create table acid_uami(i int,
                  de decimal(5,2),
                  vc varchar(128)) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 POSTHOOK: type: CREATETABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_1.q.out b/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_1.q.out
index 63d8546..d6f84cb 100644
--- a/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_1.q.out
+++ b/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_1.q.out
@@ -1,28 +1,13 @@
-PREHOOK: query: -- Hybrid Grace Hash Join
--- Test basic functionalities:
--- 1. Various cases when hash partitions spill
--- 2. Partitioned table spilling
--- 3. Vectorization
-
-
-SELECT 1
+PREHOOK: query: SELECT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-POSTHOOK: query: -- Hybrid Grace Hash Join
--- Test basic functionalities:
--- 1. Various cases when hash partitions spill
--- 2. Partitioned table spilling
--- 3. Vectorization
-
-
-SELECT 1
+POSTHOOK: query: SELECT 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
 1
-PREHOOK: query: -- Base result for inner join
-explain
+PREHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
@@ -30,8 +15,7 @@ select count(*) from
  on cd.cint = c.cint
  where c.cint < 2000000000) t1
 PREHOOK: type: QUERY
-POSTHOOK: query: -- Base result for inner join
-explain
+POSTHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
@@ -139,9 +123,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
 3152013
-PREHOOK: query: -- Two partitions are created. One in memory, one on disk on creation.
--- The one in memory will eventually exceed memory limit, but won't spill.
-explain
+PREHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
@@ -149,9 +131,7 @@ select count(*) from
  on cd.cint = c.cint
  where c.cint < 2000000000) t1
 PREHOOK: type: QUERY
-POSTHOOK: query: -- Two partitions are created. One in memory, one on disk on creation.
--- The one in memory will eventually exceed memory limit, but won't spill.
-explain
+POSTHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
@@ -260,16 +240,14 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
 3152013
-PREHOOK: query: -- Base result for inner join
-explain
+PREHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
  inner join alltypesorc cd
  on cd.cint = c.cint) t1
 PREHOOK: type: QUERY
-POSTHOOK: query: -- Base result for inner join
-explain
+POSTHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
@@ -374,18 +352,14 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
 3152013
-PREHOOK: query: -- 16 partitions are created: 3 in memory, 13 on disk on creation.
--- 1 partition is spilled during first round processing, which ends up having 2 in memory, 14 on disk
-explain
+PREHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
  inner join alltypesorc cd
  on cd.cint = c.cint) t1
 PREHOOK: type: QUERY
-POSTHOOK: query: -- 16 partitions are created: 3 in memory, 13 on disk on creation.
--- 1 partition is spilled during first round processing, which ends up having 2 in memory, 14 on disk
-explain
+POSTHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
@@ -491,16 +465,14 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
 3152013
-PREHOOK: query: -- Base result for outer join
-explain
+PREHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
  left outer join alltypesorc cd
  on cd.cint = c.cint) t1
 PREHOOK: type: QUERY
-POSTHOOK: query: -- Base result for outer join
-explain
+POSTHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
@@ -599,16 +571,14 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
 3155128
-PREHOOK: query: -- 32 partitions are created. 3 in memory, 29 on disk on creation.
-explain
+PREHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
  left outer join alltypesorc cd
  on cd.cint = c.cint) t1
 PREHOOK: type: QUERY
-POSTHOOK: query: -- 32 partitions are created. 3 in memory, 29 on disk on creation.
-explain
+POSTHOOK: query: explain
 select count(*) from
 (select c.ctinyint
  from alltypesorc c
@@ -708,13 +678,11 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
 3155128
-PREHOOK: query: -- Partitioned table
-create table parttbl (key string, value char(20)) partitioned by (dt char(10))
+PREHOOK: query: create table parttbl (key string, value char(20)) partitioned by (dt char(10))
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@parttbl
-POSTHOOK: query: -- Partitioned table
-create table parttbl (key string, value char(20)) partitioned by (dt char(10))
+POSTHOOK: query: create table parttbl (key string, value char(20)) partitioned by (dt char(10))
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@parttbl
@@ -742,16 +710,14 @@ POSTHOOK: Input: default@src1
 POSTHOOK: Output: default@parttbl@dt=2000-01-02
 POSTHOOK: Lineage: parttbl PARTITION(dt=2000-01-02).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: parttbl PARTITION(dt=2000-01-02).value EXPRESSION [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- No spill, base result
-explain
+PREHOOK: query: explain
 select count(*) from
 (select p1.value
  from parttbl p1
  inner join parttbl p2
  on p1.key = p2.key) t1
 PREHOOK: type: QUERY
-POSTHOOK: query: -- No spill, base result
-explain
+POSTHOOK: query: explain
 select count(*) from
 (select p1.value
  from parttbl p1
@@ -860,16 +826,14 @@ POSTHOOK: Input: default@parttbl@dt=2000-01-01
 POSTHOOK: Input: default@parttbl@dt=2000-01-02
 #### A masked pattern was here ####
 1217
-PREHOOK: query: -- No spill, 2 partitions created in memory
-explain
+PREHOOK: query: explain
 select count(*) from
 (select p1.value
  from parttbl p1
  inner join parttbl p2
  on p1.key = p2.key) t1
 PREHOOK: type: QUERY
-POSTHOOK: query: -- No spill, 2 partitions created in memory
-explain
+POSTHOOK: query: explain
 select count(*) from
 (select p1.value
  from parttbl p1
@@ -979,16 +943,14 @@ POSTHOOK: Input: default@parttbl@dt=2000-01-01
 POSTHOOK: Input: default@parttbl@dt=2000-01-02
 #### A masked pattern was here ####
 1217
-PREHOOK: query: -- Spill case base result
-explain
+PREHOOK: query: explain
 select count(*) from
 (select p1.value
  from parttbl p1
  inner join parttbl p2
  on p1.key = p2.key) t1
 PREHOOK: type: QUERY
-POSTHOOK: query: -- Spill case base result
-explain
+POSTHOOK: query: explain
 select count(*) from
 (select p1.value
  from parttbl p1
@@ -1097,16 +1059,14 @@ POSTHOOK: Input: default@parttbl@dt=2000-01-01
 POSTHOOK: Input: default@parttbl@dt=2000-01-02
 #### A masked pattern was here ####
 1217
-PREHOOK: query: -- Spill case, one partition in memory, one spilled on creation
-explain
+PREHOOK: query: explain
 select count(*) from
 (select p1.value
  from parttbl p1
  inner join parttbl p2
  on p1.key = p2.key) t1
 PREHOOK: type: QUERY
-POSTHOOK: query: -- Spill case, one partition in memory, one spilled on creation
-explain
+POSTHOOK: query: explain
 select count(*) from
 (select p1.value
  from parttbl p1
@@ -1224,9 +1184,7 @@ POSTHOOK: query: drop table parttbl
 POSTHOOK: type: DROPTABLE
 POSTHOOK: Input: default@parttbl
 POSTHOOK: Output: default@parttbl
-PREHOOK: query: -- Test vectorization
--- Test case borrowed from vector_decimal_mapjoin.q
-CREATE TABLE decimal_mapjoin STORED AS ORC AS
+PREHOOK: query: CREATE TABLE decimal_mapjoin STORED AS ORC AS
   SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1,
   CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
   cint
@@ -1235,9 +1193,7 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@decimal_mapjoin
-POSTHOOK: query: -- Test vectorization
--- Test case borrowed from vector_decimal_mapjoin.q
-CREATE TABLE decimal_mapjoin STORED AS ORC AS
+POSTHOOK: query: CREATE TABLE decimal_mapjoin STORED AS ORC AS
   SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1,
   CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
   cint

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_2.q.out b/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_2.q.out
index 473daca..aac3e97 100644
--- a/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_2.q.out
+++ b/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_2.q.out
@@ -1,23 +1,17 @@
-PREHOOK: query: -- Hybrid Grace Hash Join
--- Test n-way join
-SELECT 1
+PREHOOK: query: SELECT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-POSTHOOK: query: -- Hybrid Grace Hash Join
--- Test n-way join
-SELECT 1
+POSTHOOK: query: SELECT 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
 1
-PREHOOK: query: -- 3-way mapjoin (1 big table, 2 small tables)
-SELECT 1
+PREHOOK: query: SELECT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-POSTHOOK: query: -- 3-way mapjoin (1 big table, 2 small tables)
-SELECT 1
+POSTHOOK: query: SELECT 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
@@ -271,13 +265,11 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
 428
-PREHOOK: query: -- 4-way mapjoin (1 big table, 3 small tables)
-SELECT 1
+PREHOOK: query: SELECT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-POSTHOOK: query: -- 4-way mapjoin (1 big table, 3 small tables)
-SELECT 1
+POSTHOOK: query: SELECT 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
@@ -571,13 +563,11 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
 5680
-PREHOOK: query: -- 2 sets of 3-way mapjoin under 2 different tasks
-SELECT 1
+PREHOOK: query: SELECT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-POSTHOOK: query: -- 2 sets of 3-way mapjoin under 2 different tasks
-SELECT 1
+POSTHOOK: query: SELECT 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
@@ -1054,13 +1044,11 @@ POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
 428
 452
-PREHOOK: query: -- A chain of 2 sets of 3-way mapjoin under the same task
-SELECT 1
+PREHOOK: query: SELECT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-POSTHOOK: query: -- A chain of 2 sets of 3-way mapjoin under the same task
-SELECT 1
+POSTHOOK: query: SELECT 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/tez/orc_vectorization_ppd.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/orc_vectorization_ppd.q.out b/ql/src/test/results/clientpositive/tez/orc_vectorization_ppd.q.out
index 738abc4..35b204b 100644
--- a/ql/src/test/results/clientpositive/tez/orc_vectorization_ppd.q.out
+++ b/ql/src/test/results/clientpositive/tez/orc_vectorization_ppd.q.out
@@ -1,10 +1,8 @@
-PREHOOK: query: -- create table with 1000 rows
-create table srcorc(key string, value string) stored as textfile
+PREHOOK: query: create table srcorc(key string, value string) stored as textfile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@srcorc
-POSTHOOK: query: -- create table with 1000 rows
-create table srcorc(key string, value string) stored as textfile
+POSTHOOK: query: create table srcorc(key string, value string) stored as textfile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@srcorc
@@ -28,8 +26,7 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@srcorc
 POSTHOOK: Lineage: srcorc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: srcorc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- load table with each row group having 1000 rows and stripe 1 & 2 having 5000 & 2000 rows respectively
-create table if not exists vectororc
+PREHOOK: query: create table if not exists vectororc
 (s1 string,
 s2 string,
 d double,
@@ -38,8 +35,7 @@ stored as ORC tblproperties("orc.row.index.stride"="1000", "orc.stripe.size"="10
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@vectororc
-POSTHOOK: query: -- load table with each row group having 1000 rows and stripe 1 & 2 having 5000 & 2000 rows respectively
-create table if not exists vectororc
+POSTHOOK: query: create table if not exists vectororc
 (s1 string,
 s2 string,
 d double,
@@ -48,13 +44,11 @@ stored as ORC tblproperties("orc.row.index.stride"="1000", "orc.stripe.size"="10
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@vectororc
-PREHOOK: query: -- insert creates separate orc files
-insert overwrite table vectororc select "apple", "a", rand(1), "zoo" from srcorc
+PREHOOK: query: insert overwrite table vectororc select "apple", "a", rand(1), "zoo" from srcorc
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcorc
 PREHOOK: Output: default@vectororc
-POSTHOOK: query: -- insert creates separate orc files
-insert overwrite table vectororc select "apple", "a", rand(1), "zoo" from srcorc
+POSTHOOK: query: insert overwrite table vectororc select "apple", "a", rand(1), "zoo" from srcorc
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcorc
 POSTHOOK: Output: default@vectororc
@@ -134,8 +128,7 @@ POSTHOOK: Lineage: vectororc.d EXPRESSION []
 POSTHOOK: Lineage: vectororc.s1 EXPRESSION []
 POSTHOOK: Lineage: vectororc.s2 SIMPLE []
 POSTHOOK: Lineage: vectororc.s3 SIMPLE []
-PREHOOK: query: -- since vectororc table has multiple orc file we will load them into a single file using another table
-create table if not exists testorc
+PREHOOK: query: create table if not exists testorc
 (s1 string,
 s2 string,
 d double,
@@ -144,8 +137,7 @@ stored as ORC tblproperties("orc.row.index.stride"="1000", "orc.stripe.size"="10
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@testorc
-POSTHOOK: query: -- since vectororc table has multiple orc file we will load them into a single file using another table
-create table if not exists testorc
+POSTHOOK: query: create table if not exists testorc
 (s1 string,
 s2 string,
 d double,
@@ -166,98 +158,74 @@ POSTHOOK: Lineage: testorc.d SIMPLE [(vectororc)vectororc.FieldSchema(name:d, ty
 POSTHOOK: Lineage: testorc.s1 SIMPLE [(vectororc)vectororc.FieldSchema(name:s1, type:string, comment:null), ]
 POSTHOOK: Lineage: testorc.s2 SIMPLE [(vectororc)vectororc.FieldSchema(name:s2, type:string, comment:null), ]
 POSTHOOK: Lineage: testorc.s3 SIMPLE [(vectororc)vectororc.FieldSchema(name:s3, type:string, comment:null), ]
-PREHOOK: query: -- row group (1,4) from stripe 1 and row group (1) from stripe 2
--- PPD ONLY
-select count(*),int(sum(d)) from testorc where s1 is not null
+PREHOOK: query: select count(*),int(sum(d)) from testorc where s1 is not null
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testorc
 #### A masked pattern was here ####
-POSTHOOK: query: -- row group (1,4) from stripe 1 and row group (1) from stripe 2
--- PPD ONLY
-select count(*),int(sum(d)) from testorc where s1 is not null
+POSTHOOK: query: select count(*),int(sum(d)) from testorc where s1 is not null
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testorc
 #### A masked pattern was here ####
 3000	1505
-PREHOOK: query: -- VECTORIZATION + PPD
-select count(*),int(sum(d)) from testorc where s1 is not null
+PREHOOK: query: select count(*),int(sum(d)) from testorc where s1 is not null
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testorc
 #### A masked pattern was here ####
-POSTHOOK: query: -- VECTORIZATION + PPD
-select count(*),int(sum(d)) from testorc where s1 is not null
+POSTHOOK: query: select count(*),int(sum(d)) from testorc where s1 is not null
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testorc
 #### A masked pattern was here ####
 3000	1505
-PREHOOK: query: -- row group (2,3,5) from stripe 1 and row group (2) from stripe 2
--- PPD ONLY
-select count(*),int(sum(d)) from testorc where s2 in ("b", "c", "e", "g")
+PREHOOK: query: select count(*),int(sum(d)) from testorc where s2 in ("b", "c", "e", "g")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testorc
 #### A masked pattern was here ####
-POSTHOOK: query: -- row group (2,3,5) from stripe 1 and row group (2) from stripe 2
--- PPD ONLY
-select count(*),int(sum(d)) from testorc where s2 in ("b", "c", "e", "g")
+POSTHOOK: query: select count(*),int(sum(d)) from testorc where s2 in ("b", "c", "e", "g")
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testorc
 #### A masked pattern was here ####
 4000	2006
-PREHOOK: query: -- VECTORIZATION + PPD
-select count(*),int(sum(d)) from testorc where s2 in ("b", "c", "e", "g")
+PREHOOK: query: select count(*),int(sum(d)) from testorc where s2 in ("b", "c", "e", "g")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testorc
 #### A masked pattern was here ####
-POSTHOOK: query: -- VECTORIZATION + PPD
-select count(*),int(sum(d)) from testorc where s2 in ("b", "c", "e", "g")
+POSTHOOK: query: select count(*),int(sum(d)) from testorc where s2 in ("b", "c", "e", "g")
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testorc
 #### A masked pattern was here ####
 4000	2006
-PREHOOK: query: -- last row group of stripe 1 and first row group of stripe 2
--- PPD ONLY
-select count(*),int(sum(d)) from testorc where s3="z"
+PREHOOK: query: select count(*),int(sum(d)) from testorc where s3="z"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testorc
 #### A masked pattern was here ####
-POSTHOOK: query: -- last row group of stripe 1 and first row group of stripe 2
--- PPD ONLY
-select count(*),int(sum(d)) from testorc where s3="z"
+POSTHOOK: query: select count(*),int(sum(d)) from testorc where s3="z"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testorc
 #### A masked pattern was here ####
 2000	1011
-PREHOOK: query: -- VECTORIZATION + PPD
-select count(*),int(sum(d)) from testorc where s3="z"
+PREHOOK: query: select count(*),int(sum(d)) from testorc where s3="z"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testorc
 #### A masked pattern was here ####
-POSTHOOK: query: -- VECTORIZATION + PPD
-select count(*),int(sum(d)) from testorc where s3="z"
+POSTHOOK: query: select count(*),int(sum(d)) from testorc where s3="z"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testorc
 #### A masked pattern was here ####
 2000	1011
-PREHOOK: query: -- first row group of stripe 1 and last row group of stripe 2
--- PPD ONLY
-select count(*),int(sum(d)) from testorc where s2="a" or s2="g"
+PREHOOK: query: select count(*),int(sum(d)) from testorc where s2="a" or s2="g"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testorc
 #### A masked pattern was here ####
-POSTHOOK: query: -- first row group of stripe 1 and last row group of stripe 2
--- PPD ONLY
-select count(*),int(sum(d)) from testorc where s2="a" or s2="g"
+POSTHOOK: query: select count(*),int(sum(d)) from testorc where s2="a" or s2="g"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testorc
 #### A masked pattern was here ####
 2000	1006
-PREHOOK: query: -- VECTORIZATION + PPD
-select count(*),int(sum(d)) from testorc where s2="a" or s2="g"
+PREHOOK: query: select count(*),int(sum(d)) from testorc where s2="a" or s2="g"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testorc
 #### A masked pattern was here ####
-POSTHOOK: query: -- VECTORIZATION + PPD
-select count(*),int(sum(d)) from testorc where s2="a" or s2="g"
+POSTHOOK: query: select count(*),int(sum(d)) from testorc where s2="a" or s2="g"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testorc
 #### A masked pattern was here ####

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/tez/tez_union_with_udf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/tez_union_with_udf.q.out b/ql/src/test/results/clientpositive/tez/tez_union_with_udf.q.out
index 923e098..0c8546d 100644
--- a/ql/src/test/results/clientpositive/tez/tez_union_with_udf.q.out
+++ b/ql/src/test/results/clientpositive/tez/tez_union_with_udf.q.out
@@ -18,15 +18,13 @@ PREHOOK: Output: example_add
 POSTHOOK: query: create temporary function example_add as 'org.apache.hadoop.hive.udf.example.GenericUDFExampleAdd'
 POSTHOOK: type: CREATEFUNCTION
 POSTHOOK: Output: example_add
-PREHOOK: query: -- Now try the query with the UDF
-select example_add(key, key)from (select key from src limit 1) a
+PREHOOK: query: select example_add(key, key)from (select key from src limit 1) a
 union all
 select example_add(key, key)from (select key from src limit 1) b
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: -- Now try the query with the UDF
-select example_add(key, key)from (select key from src limit 1) a
+POSTHOOK: query: select example_add(key, key)from (select key from src limit 1) a
 union all
 select example_add(key, key)from (select key from src limit 1) b
 POSTHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/tez/unionDistinct_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/unionDistinct_2.q.out b/ql/src/test/results/clientpositive/tez/unionDistinct_2.q.out
index 10609d9..7cc6324 100644
--- a/ql/src/test/results/clientpositive/tez/unionDistinct_2.q.out
+++ b/ql/src/test/results/clientpositive/tez/unionDistinct_2.q.out
@@ -1,13 +1,9 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE u1 as select key, value from src order by key limit 5
+PREHOOK: query: CREATE TABLE u1 as select key, value from src order by key limit 5
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
 PREHOOK: Output: database:default
 PREHOOK: Output: default@u1
-POSTHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE u1 as select key, value from src order by key limit 5
+POSTHOOK: query: CREATE TABLE u1 as select key, value from src order by key limit 5
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
 POSTHOOK: Output: database:default

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/tez/update_orig_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/update_orig_table.q.out b/ql/src/test/results/clientpositive/tez/update_orig_table.q.out
index 66533e1..5b22fad 100644
--- a/ql/src/test/results/clientpositive/tez/update_orig_table.q.out
+++ b/ql/src/test/results/clientpositive/tez/update_orig_table.q.out
@@ -1,6 +1,4 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
-
-create table acid_uot(
+PREHOOK: query: create table acid_uot(
     ctinyint TINYINT,
     csmallint SMALLINT,
     cint INT,
@@ -17,9 +15,7 @@ PREHOOK: type: CREATETABLE
 #### A masked pattern was here ####
 PREHOOK: Output: database:default
 PREHOOK: Output: default@acid_uot
-POSTHOOK: query: -- SORT_QUERY_RESULTS
-
-create table acid_uot(
+POSTHOOK: query: create table acid_uot(
     ctinyint TINYINT,
     csmallint SMALLINT,
     cint INT,

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/tez/vectorization_div0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vectorization_div0.q.out b/ql/src/test/results/clientpositive/tez/vectorization_div0.q.out
index 12b90a4..3c017e6 100644
--- a/ql/src/test/results/clientpositive/tez/vectorization_div0.q.out
+++ b/ql/src/test/results/clientpositive/tez/vectorization_div0.q.out
@@ -1,9 +1,7 @@
-PREHOOK: query: -- TODO: add more stuff here after HIVE-5918 is fixed, such as cbigint and constants
-explain
+PREHOOK: query: explain
 select cdouble / 0.0 from alltypesorc limit 100
 PREHOOK: type: QUERY
-POSTHOOK: query: -- TODO: add more stuff here after HIVE-5918 is fixed, such as cbigint and constants
-explain
+POSTHOOK: query: explain
 select cdouble / 0.0 from alltypesorc limit 100
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -131,17 +129,11 @@ NULL
 NULL
 NULL
 NULL
-PREHOOK: query: -- There are no zeros in the table, but there is 988888, so use it as zero
-
--- TODO: add more stuff here after HIVE-5918 is fixed, such as cbigint and constants as numerators
-explain
+PREHOOK: query: explain
 select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / (cbigint - 988888L) 
 from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2 limit 100
 PREHOOK: type: QUERY
-POSTHOOK: query: -- There are no zeros in the table, but there is 988888, so use it as zero
-
--- TODO: add more stuff here after HIVE-5918 is fixed, such as cbigint and constants as numerators
-explain
+POSTHOOK: query: explain
 select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / (cbigint - 988888L) 
 from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2 limit 100
 POSTHOOK: type: QUERY
@@ -310,15 +302,11 @@ POSTHOOK: Input: default@alltypesorc
 59347745	NULL	0.000000020219807846111
 60229567	NULL	0.000000019923769334088
 60330397	NULL	0.000000019890470801974
-PREHOOK: query: -- There are no zeros in the table, but there is -200.0, so use it as zero
-
-explain
+PREHOOK: query: explain
 select (cdouble + 200.0) as s1, cbigint / (cdouble + 200.0) as s2, (cdouble + 200.0) / (cdouble + 200.0), cbigint / (cdouble + 200.0), 3 / (cdouble + 200.0), 1.2 / (cdouble + 200.0) 
 from alltypesorc where cdouble >= -500 and cdouble < -199 order by s1, s2 limit 100
 PREHOOK: type: QUERY
-POSTHOOK: query: -- There are no zeros in the table, but there is -200.0, so use it as zero
-
-explain
+POSTHOOK: query: explain
 select (cdouble + 200.0) as s1, cbigint / (cdouble + 200.0) as s2, (cdouble + 200.0) / (cdouble + 200.0), cbigint / (cdouble + 200.0), 3 / (cdouble + 200.0), 1.2 / (cdouble + 200.0) 
 from alltypesorc where cdouble >= -500 and cdouble < -199 order by s1, s2 limit 100
 POSTHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/230ed787/ql/src/test/results/clientpositive/tez/vectorization_limit.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vectorization_limit.q.out b/ql/src/test/results/clientpositive/tez/vectorization_limit.q.out
index 71e470b..dfb0102 100644
--- a/ql/src/test/results/clientpositive/tez/vectorization_limit.q.out
+++ b/ql/src/test/results/clientpositive/tez/vectorization_limit.q.out
@@ -38,14 +38,10 @@ POSTHOOK: Input: default@alltypesorc
 -1887561756	-8881.0
 -1887561756	-2281.0
 -1887561756	9531.0
-PREHOOK: query: -- HIVE-3562 Some limit can be pushed down to map stage - c/p parts from limit_pushdown
-
-explain
+PREHOOK: query: explain
 select ctinyint,cdouble,csmallint from alltypesorc where ctinyint is not null order by ctinyint,cdouble limit 20
 PREHOOK: type: QUERY
-POSTHOOK: query: -- HIVE-3562 Some limit can be pushed down to map stage - c/p parts from limit_pushdown
-
-explain
+POSTHOOK: query: explain
 select ctinyint,cdouble,csmallint from alltypesorc where ctinyint is not null order by ctinyint,cdouble limit 20
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -131,12 +127,10 @@ POSTHOOK: Input: default@alltypesorc
 -64	-2919.0	-2919
 -64	-1600.0	-1600
 -64	-200.0	-200
-PREHOOK: query: -- deduped RS
-explain
+PREHOOK: query: explain
 select ctinyint,avg(cdouble + 1) from alltypesorc group by ctinyint order by ctinyint limit 20
 PREHOOK: type: QUERY
-POSTHOOK: query: -- deduped RS
-explain
+POSTHOOK: query: explain
 select ctinyint,avg(cdouble + 1) from alltypesorc group by ctinyint order by ctinyint limit 20
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -227,12 +221,10 @@ NULL	9370.0945309795
 -48	1672.909090909091
 -47	-574.6428571428571
 -46	3033.55
-PREHOOK: query: -- distincts
-explain
+PREHOOK: query: explain
 select distinct(ctinyint) from alltypesorc limit 20
 PREHOOK: type: QUERY
-POSTHOOK: query: -- distincts
-explain
+POSTHOOK: query: explain
 select distinct(ctinyint) from alltypesorc limit 20
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -418,12 +410,10 @@ NULL	2932
 -48	29
 -47	22
 -46	24
-PREHOOK: query: -- limit zero
-explain
+PREHOOK: query: explain
 select ctinyint,cdouble from alltypesorc order by ctinyint limit 0
 PREHOOK: type: QUERY
-POSTHOOK: query: -- limit zero
-explain
+POSTHOOK: query: explain
 select ctinyint,cdouble from alltypesorc order by ctinyint limit 0
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -444,12 +434,10 @@ POSTHOOK: query: select ctinyint,cdouble from alltypesorc order by ctinyint limi
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-PREHOOK: query: -- 2MR (applied to last RS)
-explain
+PREHOOK: query: explain
 select cdouble, sum(ctinyint) as sum from alltypesorc where ctinyint is not null group by cdouble order by sum, cdouble limit 20
 PREHOOK: type: QUERY
-POSTHOOK: query: -- 2MR (applied to last RS)
-explain
+POSTHOOK: query: explain
 select cdouble, sum(ctinyint) as sum from alltypesorc where ctinyint is not null group by cdouble order by sum, cdouble limit 20
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES: