You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by px...@apache.org on 2015/08/21 22:51:28 UTC

hive git commit: backport HIVE-11493: Predicate with integer column equals double evaluates to false (Pengcheng Xiong, reviewed by Hari

Repository: hive
Updated Branches:
  refs/heads/branch-1.0 0f7605338 -> d687bfb81


backport HIVE-11493: Predicate with integer column equals double evaluates to false (Pengcheng Xiong, reviewed by Hari


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d687bfb8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d687bfb8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d687bfb8

Branch: refs/heads/branch-1.0
Commit: d687bfb817b5785e8bc871e89b1832f4540174e2
Parents: 0f76053
Author: Pengcheng Xiong <px...@apache.org>
Authored: Fri Aug 21 13:50:43 2015 -0700
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Fri Aug 21 13:50:43 2015 -0700

----------------------------------------------------------------------
 .../hive/ql/parse/TypeCheckProcFactory.java     |   2 +-
 .../clientpositive/cast_tinyint_to_double.q     |   7 +
 .../clientpositive/cast_tinyint_to_double.q.out |  38 ++
 .../clientpositive/infer_const_type.q.out       |   7 +-
 .../spark/metadata_only_queries.q.out           | 498 -------------------
 .../metadata_only_queries_with_filters.q.out    | 224 ---------
 .../clientpositive/tez/vectorization_0.q.out    |   2 +-
 .../tez/vectorization_short_regress.q.out       |  20 +-
 .../clientpositive/vectorization_0.q.out        |   2 +-
 .../vectorization_short_regress.q.out           |  20 +-
 10 files changed, 73 insertions(+), 747 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/d687bfb8/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index dceaa8c..c9c33fe 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -915,7 +915,7 @@ public class TypeCheckProcFactory {
               // we'll try again to convert it to double
               // however, if we already tried this, or the column is NUMBER type and
               // the operator is EQUAL, return false due to the type mismatch
-              if (triedDouble ||
+              if (triedDouble &&
                   (genericUDF instanceof GenericUDFOPEqual
                   && !columnType.equals(serdeConstants.STRING_TYPE_NAME))) {
                 return new ExprNodeConstantDesc(false);

http://git-wip-us.apache.org/repos/asf/hive/blob/d687bfb8/ql/src/test/queries/clientpositive/cast_tinyint_to_double.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cast_tinyint_to_double.q b/ql/src/test/queries/clientpositive/cast_tinyint_to_double.q
new file mode 100644
index 0000000..59c5e89
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/cast_tinyint_to_double.q
@@ -0,0 +1,7 @@
+drop table t;
+CREATE TABLE t(c tinyint);
+insert overwrite table t select 10 from src limit 1;
+
+select * from t where c = 10.0;
+
+select * from t where c = -10.0;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/d687bfb8/ql/src/test/results/clientpositive/cast_tinyint_to_double.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cast_tinyint_to_double.q.out b/ql/src/test/results/clientpositive/cast_tinyint_to_double.q.out
new file mode 100644
index 0000000..c29df65
--- /dev/null
+++ b/ql/src/test/results/clientpositive/cast_tinyint_to_double.q.out
@@ -0,0 +1,38 @@
+PREHOOK: query: drop table t
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table t
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE t(c tinyint)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@t
+POSTHOOK: query: CREATE TABLE t(c tinyint)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@t
+PREHOOK: query: insert overwrite table t select 10 from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t
+POSTHOOK: query: insert overwrite table t select 10 from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t
+POSTHOOK: Lineage: t.c EXPRESSION []
+PREHOOK: query: select * from t where c = 10.0
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t
+#### A masked pattern was here ####
+POSTHOOK: query: select * from t where c = 10.0
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t
+#### A masked pattern was here ####
+10
+PREHOOK: query: select * from t where c = -10.0
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t
+#### A masked pattern was here ####
+POSTHOOK: query: select * from t where c = -10.0
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t
+#### A masked pattern was here ####

http://git-wip-us.apache.org/repos/asf/hive/blob/d687bfb8/ql/src/test/results/clientpositive/infer_const_type.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/infer_const_type.q.out b/ql/src/test/results/clientpositive/infer_const_type.q.out
index 4f4fe1c..be75489 100644
--- a/ql/src/test/results/clientpositive/infer_const_type.q.out
+++ b/ql/src/test/results/clientpositive/infer_const_type.q.out
@@ -102,6 +102,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@infertypes
 #### A masked pattern was here ####
 127	32767	12345	-12345	906.0	-307.0	1234
+WARNING: Comparing a bigint and a double may result in a loss of precision.
 PREHOOK: query: -- all should return false as all numbers exceeed the largest number 
 -- which could be represented by the corresponding type
 -- and string_col = long_const should return false
@@ -136,7 +137,7 @@ STAGE PLANS:
             alias: infertypes
             Statistics: Num rows: 0 Data size: 117 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: false (type: boolean)
+              predicate: ((((ti = 128.0) or (si = 32768.0)) or (i = 2.147483648E9)) or (bi = 9.223372036854776E18)) (type: boolean)
               Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
               Select Operator
                 expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), str (type: string)
@@ -156,6 +157,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
+WARNING: Comparing a bigint and a double may result in a loss of precision.
 PREHOOK: query: SELECT * FROM infertypes WHERE
   ti  = '128' OR
   si  = 32768 OR
@@ -200,7 +202,7 @@ STAGE PLANS:
             alias: infertypes
             Statistics: Num rows: 0 Data size: 117 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: false (type: boolean)
+              predicate: (((ti = 127.0) or (si = 327.0)) or (i = -100.0)) (type: boolean)
               Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
               Select Operator
                 expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), str (type: string)
@@ -234,6 +236,7 @@ POSTHOOK: query: SELECT * FROM infertypes WHERE
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@infertypes
 #### A masked pattern was here ####
+127	32767	12345	-12345	906.0	-307.0	1234
 PREHOOK: query: EXPLAIN SELECT * FROM infertypes WHERE
   ti < '127.0' AND
   i > '100.0' AND

http://git-wip-us.apache.org/repos/asf/hive/blob/d687bfb8/ql/src/test/results/clientpositive/spark/metadata_only_queries.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/metadata_only_queries.q.out b/ql/src/test/results/clientpositive/spark/metadata_only_queries.q.out
deleted file mode 100644
index 0ccae50..0000000
--- a/ql/src/test/results/clientpositive/spark/metadata_only_queries.q.out
+++ /dev/null
@@ -1,498 +0,0 @@
-PREHOOK: query: create table over10k(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp, 
-           dec decimal,  
-           bin binary)
-       row format delimited
-       fields terminated by '|'
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over10k
-POSTHOOK: query: create table over10k(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp, 
-           dec decimal,  
-           bin binary)
-       row format delimited
-       fields terminated by '|'
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over10k
-PREHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over10k
-POSTHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over10k
-PREHOOK: query: create table stats_tbl(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal,  
-           bin binary)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@stats_tbl
-POSTHOOK: query: create table stats_tbl(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal,  
-           bin binary)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@stats_tbl
-PREHOOK: query: create table stats_tbl_part(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal,  
-           bin binary) partitioned by (dt string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@stats_tbl_part
-POSTHOOK: query: create table stats_tbl_part(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal,  
-           bin binary) partitioned by (dt string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@stats_tbl_part
-PREHOOK: query: insert overwrite table stats_tbl select * from over10k
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k
-PREHOOK: Output: default@stats_tbl
-POSTHOOK: query: insert overwrite table stats_tbl select * from over10k
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k
-POSTHOOK: Output: default@stats_tbl
-POSTHOOK: Lineage: stats_tbl.b SIMPLE [(over10k)over10k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl.bin SIMPLE [(over10k)over10k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: stats_tbl.bo SIMPLE [(over10k)over10k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: stats_tbl.d SIMPLE [(over10k)over10k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: stats_tbl.dec SIMPLE [(over10k)over10k.FieldSchema(name:dec, type:decimal(10,0), comment:null), ]
-POSTHOOK: Lineage: stats_tbl.f SIMPLE [(over10k)over10k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: stats_tbl.i SIMPLE [(over10k)over10k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: stats_tbl.s SIMPLE [(over10k)over10k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: stats_tbl.si SIMPLE [(over10k)over10k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl.t SIMPLE [(over10k)over10k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl.ts SIMPLE [(over10k)over10k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: insert into table stats_tbl_part partition (dt='2010') select * from over10k where t>0 and t<30
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k
-PREHOOK: Output: default@stats_tbl_part@dt=2010
-POSTHOOK: query: insert into table stats_tbl_part partition (dt='2010') select * from over10k where t>0 and t<30
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k
-POSTHOOK: Output: default@stats_tbl_part@dt=2010
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).b SIMPLE [(over10k)over10k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).bin SIMPLE [(over10k)over10k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).bo SIMPLE [(over10k)over10k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).d SIMPLE [(over10k)over10k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).dec SIMPLE [(over10k)over10k.FieldSchema(name:dec, type:decimal(10,0), comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).f SIMPLE [(over10k)over10k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).i SIMPLE [(over10k)over10k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).s SIMPLE [(over10k)over10k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).si SIMPLE [(over10k)over10k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).t SIMPLE [(over10k)over10k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).ts SIMPLE [(over10k)over10k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: insert into table stats_tbl_part partition (dt='2011') select * from over10k where t>30 and t<60
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k
-PREHOOK: Output: default@stats_tbl_part@dt=2011
-POSTHOOK: query: insert into table stats_tbl_part partition (dt='2011') select * from over10k where t>30 and t<60
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k
-POSTHOOK: Output: default@stats_tbl_part@dt=2011
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2011).b SIMPLE [(over10k)over10k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2011).bin SIMPLE [(over10k)over10k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2011).bo SIMPLE [(over10k)over10k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2011).d SIMPLE [(over10k)over10k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2011).dec SIMPLE [(over10k)over10k.FieldSchema(name:dec, type:decimal(10,0), comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2011).f SIMPLE [(over10k)over10k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2011).i SIMPLE [(over10k)over10k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2011).s SIMPLE [(over10k)over10k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2011).si SIMPLE [(over10k)over10k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2011).t SIMPLE [(over10k)over10k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2011).ts SIMPLE [(over10k)over10k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: insert into table stats_tbl_part partition (dt='2012') select * from over10k where t>60
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k
-PREHOOK: Output: default@stats_tbl_part@dt=2012
-POSTHOOK: query: insert into table stats_tbl_part partition (dt='2012') select * from over10k where t>60
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k
-POSTHOOK: Output: default@stats_tbl_part@dt=2012
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2012).b SIMPLE [(over10k)over10k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2012).bin SIMPLE [(over10k)over10k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2012).bo SIMPLE [(over10k)over10k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2012).d SIMPLE [(over10k)over10k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2012).dec SIMPLE [(over10k)over10k.FieldSchema(name:dec, type:decimal(10,0), comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2012).f SIMPLE [(over10k)over10k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2012).i SIMPLE [(over10k)over10k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2012).s SIMPLE [(over10k)over10k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2012).si SIMPLE [(over10k)over10k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2012).t SIMPLE [(over10k)over10k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2012).ts SIMPLE [(over10k)over10k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: explain 
-select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si), max(i), min(b) from stats_tbl
-PREHOOK: type: QUERY
-POSTHOOK: query: explain 
-select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si), max(i), min(b) from stats_tbl
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: stats_tbl
-                  Statistics: Num rows: 9999 Data size: 1030908 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: s (type: string), bo (type: boolean), bin (type: binary), si (type: smallint), i (type: int), b (type: bigint)
-                    outputColumnNames: s, bo, bin, si, i, b
-                    Statistics: Num rows: 9999 Data size: 1030908 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      aggregations: count(), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si), max(i), min(b)
-                      mode: hash
-                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
-                      Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: bigint), _col4 (type: bigint), _col5 (type: bigint), _col6 (type: bigint), _col7 (type: bigint), _col8 (type: int), _col9 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2), count(VALUE._col3), count(VALUE._col4), count(VALUE._col5), count(VALUE._col6), count(VALUE._col7), max(VALUE._col8), min(VALUE._col9)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
-                Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: explain
-select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si), max(i), min(b) from stats_tbl_part
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si), max(i), min(b) from stats_tbl_part
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: stats_tbl_part
-                  Statistics: Num rows: 9489 Data size: 978785 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: s (type: string), bo (type: boolean), bin (type: binary), si (type: smallint), i (type: int), b (type: bigint)
-                    outputColumnNames: s, bo, bin, si, i, b
-                    Statistics: Num rows: 9489 Data size: 978785 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      aggregations: count(), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si), max(i), min(b)
-                      mode: hash
-                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
-                      Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: bigint), _col4 (type: bigint), _col5 (type: bigint), _col6 (type: bigint), _col7 (type: bigint), _col8 (type: int), _col9 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2), count(VALUE._col3), count(VALUE._col4), count(VALUE._col5), count(VALUE._col6), count(VALUE._col7), max(VALUE._col8), min(VALUE._col9)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
-                Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: analyze table stats_tbl compute statistics for columns t,si,i,b,f,d,bo,s,bin
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table stats_tbl compute statistics for columns t,si,i,b,f,d,bo,s,bin
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl
-#### A masked pattern was here ####
-PREHOOK: query: analyze table stats_tbl_part partition(dt='2010') compute statistics for columns t,si,i,b,f,d,bo,s,bin
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-PREHOOK: Input: default@stats_tbl_part@dt=2010
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table stats_tbl_part partition(dt='2010') compute statistics for columns t,si,i,b,f,d,bo,s,bin
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-POSTHOOK: Input: default@stats_tbl_part@dt=2010
-#### A masked pattern was here ####
-PREHOOK: query: analyze table stats_tbl_part partition(dt='2011') compute statistics for columns t,si,i,b,f,d,bo,s,bin
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-PREHOOK: Input: default@stats_tbl_part@dt=2011
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table stats_tbl_part partition(dt='2011') compute statistics for columns t,si,i,b,f,d,bo,s,bin
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-POSTHOOK: Input: default@stats_tbl_part@dt=2011
-#### A masked pattern was here ####
-PREHOOK: query: analyze table stats_tbl_part partition(dt='2012') compute statistics for columns t,si,i,b,f,d,bo,s,bin
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-PREHOOK: Input: default@stats_tbl_part@dt=2012
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table stats_tbl_part partition(dt='2012') compute statistics for columns t,si,i,b,f,d,bo,s,bin
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-POSTHOOK: Input: default@stats_tbl_part@dt=2012
-#### A masked pattern was here ####
-PREHOOK: query: explain 
-select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si) from stats_tbl
-PREHOOK: type: QUERY
-POSTHOOK: query: explain 
-select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si) from stats_tbl
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si) from stats_tbl
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si) from stats_tbl
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl
-#### A masked pattern was here ####
-9999	9999	1999.8000000000002	9999	9999	9999	9999	9999
-PREHOOK: query: explain
-select min(i), max(i), min(b), max(b), min(f), max(f), min(d), max(d) from stats_tbl
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-select min(i), max(i), min(b), max(b), min(f), max(f), min(d), max(d) from stats_tbl
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select min(i), max(i), min(b), max(b), min(f), max(f), min(d), max(d) from stats_tbl
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl
-#### A masked pattern was here ####
-POSTHOOK: query: select min(i), max(i), min(b), max(b), min(f), max(f), min(d), max(d) from stats_tbl
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl
-#### A masked pattern was here ####
-65536	65791	4294967296	4294967551	0.01	99.98	0.01	50.0
-PREHOOK: query: explain 
-select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si) from stats_tbl_part
-PREHOOK: type: QUERY
-POSTHOOK: query: explain 
-select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si) from stats_tbl_part
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si) from stats_tbl_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*), sum(1), sum(0.2), count(1), count(s), count(bo), count(bin), count(si) from stats_tbl_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-#### A masked pattern was here ####
-9489	9489	1897.8000000000002	9489	9489	9489	9489	9489
-PREHOOK: query: explain
-select min(i), max(i), min(b), max(b), min(f), max(f), min(d), max(d) from stats_tbl_part
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-select min(i), max(i), min(b), max(b), min(f), max(f), min(d), max(d) from stats_tbl_part
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select min(i), max(i), min(b), max(b), min(f), max(f), min(d), max(d) from stats_tbl_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-#### A masked pattern was here ####
-POSTHOOK: query: select min(i), max(i), min(b), max(b), min(f), max(f), min(d), max(d) from stats_tbl_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-#### A masked pattern was here ####
-65536	65791	4294967296	4294967551	0.01	99.98	0.01	50.0
-PREHOOK: query: explain select count(ts) from stats_tbl_part
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select count(ts) from stats_tbl_part
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: stats_tbl_part
-                  Statistics: Num rows: 9489 Data size: 978785 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: ts (type: timestamp)
-                    outputColumnNames: ts
-                    Statistics: Num rows: 9489 Data size: 978785 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      aggregations: count(ts)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: drop table stats_tbl
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@stats_tbl
-PREHOOK: Output: default@stats_tbl
-POSTHOOK: query: drop table stats_tbl
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@stats_tbl
-POSTHOOK: Output: default@stats_tbl
-PREHOOK: query: drop table stats_tbl_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@stats_tbl_part
-PREHOOK: Output: default@stats_tbl_part
-POSTHOOK: query: drop table stats_tbl_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@stats_tbl_part
-POSTHOOK: Output: default@stats_tbl_part

http://git-wip-us.apache.org/repos/asf/hive/blob/d687bfb8/ql/src/test/results/clientpositive/spark/metadata_only_queries_with_filters.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/metadata_only_queries_with_filters.q.out b/ql/src/test/results/clientpositive/spark/metadata_only_queries_with_filters.q.out
deleted file mode 100644
index 6dea3e0..0000000
--- a/ql/src/test/results/clientpositive/spark/metadata_only_queries_with_filters.q.out
+++ /dev/null
@@ -1,224 +0,0 @@
-PREHOOK: query: create table over10k(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp, 
-           dec decimal,  
-           bin binary)
-       row format delimited
-       fields terminated by '|'
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over10k
-POSTHOOK: query: create table over10k(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp, 
-           dec decimal,  
-           bin binary)
-       row format delimited
-       fields terminated by '|'
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over10k
-PREHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over10k
-POSTHOOK: query: load data local inpath '../../data/files/over10k' into table over10k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over10k
-PREHOOK: query: create table stats_tbl_part(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp, 
-           dec decimal,  
-           bin binary) partitioned by (dt int)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@stats_tbl_part
-POSTHOOK: query: create table stats_tbl_part(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp, 
-           dec decimal,  
-           bin binary) partitioned by (dt int)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@stats_tbl_part
-PREHOOK: query: from over10k 
-insert overwrite table stats_tbl_part partition (dt=2010) select t,si,i,b,f,d,bo,s,ts,dec,bin where t>0 and t<30 
-insert overwrite table stats_tbl_part partition (dt=2014) select t,si,i,b,f,d,bo,s,ts,dec,bin where t > 30 and t<60
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k
-PREHOOK: Output: default@stats_tbl_part@dt=2010
-PREHOOK: Output: default@stats_tbl_part@dt=2014
-POSTHOOK: query: from over10k 
-insert overwrite table stats_tbl_part partition (dt=2010) select t,si,i,b,f,d,bo,s,ts,dec,bin where t>0 and t<30 
-insert overwrite table stats_tbl_part partition (dt=2014) select t,si,i,b,f,d,bo,s,ts,dec,bin where t > 30 and t<60
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k
-POSTHOOK: Output: default@stats_tbl_part@dt=2010
-POSTHOOK: Output: default@stats_tbl_part@dt=2014
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).b SIMPLE [(over10k)over10k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).bin SIMPLE [(over10k)over10k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).bo SIMPLE [(over10k)over10k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).d SIMPLE [(over10k)over10k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).dec SIMPLE [(over10k)over10k.FieldSchema(name:dec, type:decimal(10,0), comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).f SIMPLE [(over10k)over10k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).i SIMPLE [(over10k)over10k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).s SIMPLE [(over10k)over10k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).si SIMPLE [(over10k)over10k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).t SIMPLE [(over10k)over10k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2010).ts SIMPLE [(over10k)over10k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2014).b SIMPLE [(over10k)over10k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2014).bin SIMPLE [(over10k)over10k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2014).bo SIMPLE [(over10k)over10k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2014).d SIMPLE [(over10k)over10k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2014).dec SIMPLE [(over10k)over10k.FieldSchema(name:dec, type:decimal(10,0), comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2014).f SIMPLE [(over10k)over10k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2014).i SIMPLE [(over10k)over10k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2014).s SIMPLE [(over10k)over10k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2014).si SIMPLE [(over10k)over10k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2014).t SIMPLE [(over10k)over10k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: stats_tbl_part PARTITION(dt=2014).ts SIMPLE [(over10k)over10k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: analyze table stats_tbl_part partition(dt) compute statistics
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-PREHOOK: Input: default@stats_tbl_part@dt=2010
-PREHOOK: Input: default@stats_tbl_part@dt=2014
-PREHOOK: Output: default@stats_tbl_part
-PREHOOK: Output: default@stats_tbl_part@dt=2010
-PREHOOK: Output: default@stats_tbl_part@dt=2014
-POSTHOOK: query: analyze table stats_tbl_part partition(dt) compute statistics
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-POSTHOOK: Input: default@stats_tbl_part@dt=2010
-POSTHOOK: Input: default@stats_tbl_part@dt=2014
-POSTHOOK: Output: default@stats_tbl_part
-POSTHOOK: Output: default@stats_tbl_part@dt=2010
-POSTHOOK: Output: default@stats_tbl_part@dt=2014
-PREHOOK: query: analyze table stats_tbl_part partition(dt=2010) compute statistics for columns t,si,i,b,f,d,bo,s,bin
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-PREHOOK: Input: default@stats_tbl_part@dt=2010
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table stats_tbl_part partition(dt=2010) compute statistics for columns t,si,i,b,f,d,bo,s,bin
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-POSTHOOK: Input: default@stats_tbl_part@dt=2010
-#### A masked pattern was here ####
-PREHOOK: query: analyze table stats_tbl_part partition(dt=2014) compute statistics for columns t,si,i,b,f,d,bo,s,bin
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-PREHOOK: Input: default@stats_tbl_part@dt=2014
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table stats_tbl_part partition(dt=2014) compute statistics for columns t,si,i,b,f,d,bo,s,bin
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-POSTHOOK: Input: default@stats_tbl_part@dt=2014
-#### A masked pattern was here ####
-PREHOOK: query: explain 
-select count(*), count(1), sum(1), count(s), count(bo), count(bin), count(si), max(i), min(b), max(f), min(d) from stats_tbl_part where dt = 2010
-PREHOOK: type: QUERY
-POSTHOOK: query: explain 
-select count(*), count(1), sum(1), count(s), count(bo), count(bin), count(si), max(i), min(b), max(f), min(d) from stats_tbl_part where dt = 2010
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select count(*), count(1), sum(1), count(s), count(bo), count(bin), count(si), max(i), min(b), max(f), min(d) from stats_tbl_part where dt = 2010
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*), count(1), sum(1), count(s), count(bo), count(bin), count(si), max(i), min(b), max(f), min(d) from stats_tbl_part where dt = 2010
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-#### A masked pattern was here ####
-2322	2322	2322	2322	2322	2322	2322	65791	4294967296	99.98	0.03
-PREHOOK: query: explain 
-select count(*), count(1), sum(1), sum(2), count(s), count(bo), count(bin), count(si), max(i), min(b), max(f), min(d) from stats_tbl_part where dt > 2010
-PREHOOK: type: QUERY
-POSTHOOK: query: explain 
-select count(*), count(1), sum(1), sum(2), count(s), count(bo), count(bin), count(si), max(i), min(b), max(f), min(d) from stats_tbl_part where dt > 2010
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select count(*), count(1), sum(1), sum(2), count(s), count(bo), count(bin), count(si), max(i), min(b), max(f), min(d) from stats_tbl_part where dt > 2010
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*), count(1), sum(1), sum(2), count(s), count(bo), count(bin), count(si), max(i), min(b), max(f), min(d) from stats_tbl_part where dt > 2010
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-#### A masked pattern was here ####
-2219	2219	2219	4438	2219	2219	2219	2219	65791	4294967296	99.96	0.04
-PREHOOK: query: select count(*) from stats_tbl_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from stats_tbl_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-#### A masked pattern was here ####
-4541
-PREHOOK: query: select count(*)/2 from stats_tbl_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@stats_tbl_part
-PREHOOK: Input: default@stats_tbl_part@dt=2010
-PREHOOK: Input: default@stats_tbl_part@dt=2014
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*)/2 from stats_tbl_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@stats_tbl_part
-POSTHOOK: Input: default@stats_tbl_part@dt=2010
-POSTHOOK: Input: default@stats_tbl_part@dt=2014
-#### A masked pattern was here ####
-2270.5
-PREHOOK: query: drop table stats_tbl_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@stats_tbl_part
-PREHOOK: Output: default@stats_tbl_part
-POSTHOOK: query: drop table stats_tbl_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@stats_tbl_part
-POSTHOOK: Output: default@stats_tbl_part

http://git-wip-us.apache.org/repos/asf/hive/blob/d687bfb8/ql/src/test/results/clientpositive/tez/vectorization_0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vectorization_0.q.out b/ql/src/test/results/clientpositive/tez/vectorization_0.q.out
index ac72ed3..6d00432 100644
--- a/ql/src/test/results/clientpositive/tez/vectorization_0.q.out
+++ b/ql/src/test/results/clientpositive/tez/vectorization_0.q.out
@@ -1026,7 +1026,7 @@ STAGE PLANS:
                   alias: alltypesorc
                   Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((cstring2 like '%b%') or ((79.553 <> cint) or (cbigint < cdouble))) (type: boolean)
+                    predicate: (((cstring2 like '%b%') or ((79.553 <> cint) or (cbigint < cdouble))) or ((ctinyint >= csmallint) and ((cboolean2 = 1) and (3569.0 = ctinyint)))) (type: boolean)
                     Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: cbigint (type: bigint), cfloat (type: float), ctinyint (type: tinyint)

http://git-wip-us.apache.org/repos/asf/hive/blob/d687bfb8/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out
index 11683d4..1932817 100644
--- a/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out
+++ b/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out
@@ -2048,23 +2048,23 @@ STAGE PLANS:
                   alias: alltypesorc
                   Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((cdouble > 2563.58) and (((cbigint >= cint) and ((csmallint < cint) and (cfloat < -5638.15))) or ((cdouble <= cbigint) and (-5638.15 > cbigint)))) (type: boolean)
-                    Statistics: Num rows: 606 Data size: 130292 Basic stats: COMPLETE Column stats: NONE
+                    predicate: ((cdouble > 2563.58) and ((((cbigint >= cint) and ((csmallint < cint) and (cfloat < -5638.15))) or (2563.58 = ctinyint)) or ((cdouble <= cbigint) and (-5638.15 > cbigint)))) (type: boolean)
+                    Statistics: Num rows: 2654 Data size: 570619 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: cdouble (type: double), cfloat (type: float)
                       outputColumnNames: cdouble, cfloat
-                      Statistics: Num rows: 606 Data size: 130292 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 2654 Data size: 570619 Basic stats: COMPLETE Column stats: NONE
                       Group By Operator
                         aggregations: var_samp(cdouble), count(cfloat), sum(cfloat), var_pop(cdouble), stddev_pop(cdouble), sum(cdouble)
                         keys: cdouble (type: double)
                         mode: hash
                         outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                        Statistics: Num rows: 606 Data size: 130292 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 2654 Data size: 570619 Basic stats: COMPLETE Column stats: NONE
                         Reduce Output Operator
                           key expressions: _col0 (type: double)
                           sort order: +
                           Map-reduce partition columns: _col0 (type: double)
-                          Statistics: Num rows: 606 Data size: 130292 Basic stats: COMPLETE Column stats: NONE
+                          Statistics: Num rows: 2654 Data size: 570619 Basic stats: COMPLETE Column stats: NONE
                           value expressions: _col1 (type: struct<count:bigint,sum:double,variance:double>), _col2 (type: bigint), _col3 (type: double), _col4 (type: struct<count:bigint,sum:double,variance:double>), _col5 (type: struct<count:bigint,sum:double,variance:double>), _col6 (type: double)
             Execution mode: vectorized
         Reducer 2 
@@ -2074,25 +2074,25 @@ STAGE PLANS:
                 keys: KEY._col0 (type: double)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                Statistics: Num rows: 303 Data size: 65146 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1327 Data size: 285309 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: double), _col1 (type: double), _col5 (type: double), (_col0 + _col1) (type: double), (_col0 * 762) (type: double), _col6 (type: double), (-863.257 % (_col0 * 762)) (type: double), (2563.58 * _col1) (type: double), (- _col1) (type: double), _col2 (type: bigint), ((2563.58 * _col1) + -5638.15) (type: double), ((- _col1) * ((2563.58 * _col1) + -5638.15)) (type: double), _col3 (type: double), _col4 (type: double), (_col0 - (- _col1)) (type: double)
                   outputColumnNames: _col0, _col1, _col10, _col11, _col12, _col13, _col14, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
-                  Statistics: Num rows: 303 Data size: 65146 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1327 Data size: 285309 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: _col0 (type: double)
                     sort order: +
-                    Statistics: Num rows: 303 Data size: 65146 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1327 Data size: 285309 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: bigint), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double), _col9 (type: double), _col10 (type: double), _col11 (type: double), _col12 (type: double), _col13 (type: double), _col14 (type: double)
         Reducer 3 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: bigint), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: double), VALUE._col7 (type: double), VALUE._col8 (type: double), VALUE._col9 (type: double), VALUE._col10 (type: double), VALUE._col11 (type: double), VALUE._col12 (type: double), VALUE._col13 (type: double), VALUE._col12 (type: double)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15
-                Statistics: Num rows: 303 Data size: 65146 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1327 Data size: 285309 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 303 Data size: 65146 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1327 Data size: 285309 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/d687bfb8/ql/src/test/results/clientpositive/vectorization_0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorization_0.q.out b/ql/src/test/results/clientpositive/vectorization_0.q.out
index b7c0168..6369487 100644
--- a/ql/src/test/results/clientpositive/vectorization_0.q.out
+++ b/ql/src/test/results/clientpositive/vectorization_0.q.out
@@ -1042,7 +1042,7 @@ STAGE PLANS:
             alias: alltypesorc
             Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: ((cstring2 like '%b%') or ((79.553 <> cint) or (cbigint < cdouble))) (type: boolean)
+              predicate: (((cstring2 like '%b%') or ((79.553 <> cint) or (cbigint < cdouble))) or ((ctinyint >= csmallint) and ((cboolean2 = 1) and (3569.0 = ctinyint)))) (type: boolean)
               Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cbigint (type: bigint), cfloat (type: float), ctinyint (type: tinyint)

http://git-wip-us.apache.org/repos/asf/hive/blob/d687bfb8/ql/src/test/results/clientpositive/vectorization_short_regress.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/vectorization_short_regress.q.out
index ffb1401..2d56e7f 100644
--- a/ql/src/test/results/clientpositive/vectorization_short_regress.q.out
+++ b/ql/src/test/results/clientpositive/vectorization_short_regress.q.out
@@ -2002,23 +2002,23 @@ STAGE PLANS:
             alias: alltypesorc
             Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: ((cdouble > 2563.58) and (((cbigint >= cint) and ((csmallint < cint) and (cfloat < -5638.15))) or ((cdouble <= cbigint) and (-5638.15 > cbigint)))) (type: boolean)
-              Statistics: Num rows: 606 Data size: 130292 Basic stats: COMPLETE Column stats: NONE
+              predicate: ((cdouble > 2563.58) and ((((cbigint >= cint) and ((csmallint < cint) and (cfloat < -5638.15))) or (2563.58 = ctinyint)) or ((cdouble <= cbigint) and (-5638.15 > cbigint)))) (type: boolean)
+              Statistics: Num rows: 2654 Data size: 570619 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cdouble (type: double), cfloat (type: float)
                 outputColumnNames: cdouble, cfloat
-                Statistics: Num rows: 606 Data size: 130292 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 2654 Data size: 570619 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: var_samp(cdouble), count(cfloat), sum(cfloat), var_pop(cdouble), stddev_pop(cdouble), sum(cdouble)
                   keys: cdouble (type: double)
                   mode: hash
                   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                  Statistics: Num rows: 606 Data size: 130292 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 2654 Data size: 570619 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: _col0 (type: double)
                     sort order: +
                     Map-reduce partition columns: _col0 (type: double)
-                    Statistics: Num rows: 606 Data size: 130292 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 2654 Data size: 570619 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col1 (type: struct<count:bigint,sum:double,variance:double>), _col2 (type: bigint), _col3 (type: double), _col4 (type: struct<count:bigint,sum:double,variance:double>), _col5 (type: struct<count:bigint,sum:double,variance:double>), _col6 (type: double)
       Execution mode: vectorized
       Reduce Operator Tree:
@@ -2027,11 +2027,11 @@ STAGE PLANS:
           keys: KEY._col0 (type: double)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 303 Data size: 65146 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1327 Data size: 285309 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: _col0 (type: double), _col1 (type: double), _col5 (type: double), (_col0 + _col1) (type: double), (_col0 * 762) (type: double), _col6 (type: double), (-863.257 % (_col0 * 762)) (type: double), (2563.58 * _col1) (type: double), (- _col1) (type: double), _col2 (type: bigint), ((2563.58 * _col1) + -5638.15) (type: double), ((- _col1) * ((2563.58 * _col1) + -5638.15)) (type: double), _col3 (type: double), _col4 (type: double), (_col0 - (- _col1)) (type: double)
             outputColumnNames: _col0, _col1, _col10, _col11, _col12, _col13, _col14, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
-            Statistics: Num rows: 303 Data size: 65146 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1327 Data size: 285309 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
               table:
@@ -2046,16 +2046,16 @@ STAGE PLANS:
             Reduce Output Operator
               key expressions: _col0 (type: double)
               sort order: +
-              Statistics: Num rows: 303 Data size: 65146 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1327 Data size: 285309 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: double), _col4 (type: bigint), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double), _col9 (type: double), _col10 (type: double), _col11 (type: double), _col12 (type: double), _col13 (type: double), _col14 (type: double)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: double), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: bigint), VALUE._col4 (type: double), VALUE._col5 (type: double), VALUE._col6 (type: double), VALUE._col7 (type: double), VALUE._col8 (type: double), VALUE._col9 (type: double), VALUE._col10 (type: double), VALUE._col11 (type: double), VALUE._col12 (type: double), VALUE._col13 (type: double), VALUE._col12 (type: double)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15
-          Statistics: Num rows: 303 Data size: 65146 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1327 Data size: 285309 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 303 Data size: 65146 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1327 Data size: 285309 Basic stats: COMPLETE Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat