You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2015/05/23 20:49:21 UTC

[48/48] hive git commit: HIVE-8769 : Physical optimizer : Incorrect CE results in a shuffle join instead of a Map join (PK/FK pattern not detected) (Pengcheng Xiong via Ashutosh Chauhan)

HIVE-8769 : Physical optimizer : Incorrect CE results in a shuffle join instead of a Map join (PK/FK pattern not detected) (Pengcheng Xiong via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d823fc80
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d823fc80
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d823fc80

Branch: refs/heads/master
Commit: d823fc80729cacd5cada73db8b3432cd55c78ed9
Parents: bbdba9f
Author: Pengcheng Xiong <px...@hortonworks.com>
Authored: Fri May 22 23:08:00 2015 -0700
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Sat May 23 11:45:40 2015 -0700

----------------------------------------------------------------------
 .../results/positive/external_table_ppd.q.out   |   8 +-
 .../results/positive/hbase_custom_key2.q.out    |  18 +-
 .../results/positive/hbase_custom_key3.q.out    |  24 +-
 .../results/positive/hbase_ppd_key_range.q.out  |  76 +-
 .../test/results/positive/hbase_pushdown.q.out  |  68 +-
 .../test/results/positive/hbase_queries.q.out   |  42 +-
 .../test/results/positive/hbase_timestamp.q.out |  32 +-
 .../test/results/positive/ppd_key_ranges.q.out  |  32 +-
 .../ql/optimizer/calcite/RelOptHiveTable.java   |   4 +-
 .../stats/annotation/StatsRulesProcFactory.java | 130 ++-
 .../hive/ql/plan/AbstractOperatorDesc.java      |   2 +-
 .../hadoop/hive/ql/plan/ColStatistics.java      |  33 +-
 .../apache/hadoop/hive/ql/plan/Statistics.java  |  15 +-
 .../apache/hadoop/hive/ql/stats/StatsUtils.java | 194 ++---
 .../clientpositive/annotate_stats_filter.q.out  |  24 +-
 .../clientpositive/annotate_stats_limit.q.out   |   2 +-
 .../clientpositive/annotate_stats_part.q.out    |  10 +-
 .../clientpositive/annotate_stats_select.q.out  |   6 +-
 .../clientpositive/annotate_stats_table.q.out   |   4 +-
 .../results/clientpositive/auto_join30.q.out    |  24 +
 .../results/clientpositive/auto_join31.q.out    |   4 +
 .../results/clientpositive/auto_join32.q.out    |  32 +-
 .../clientpositive/auto_join_stats.q.out        |  28 +-
 .../clientpositive/auto_join_stats2.q.out       |   8 +-
 .../auto_join_without_localtask.q.out           |  60 ++
 .../clientpositive/auto_sortmerge_join_1.q.out  |   8 +
 .../clientpositive/auto_sortmerge_join_12.q.out |   2 +-
 .../clientpositive/auto_sortmerge_join_14.q.out |   4 +
 .../clientpositive/auto_sortmerge_join_15.q.out |   4 +
 .../clientpositive/auto_sortmerge_join_2.q.out  |   8 +
 .../clientpositive/auto_sortmerge_join_3.q.out  |   8 +
 .../clientpositive/auto_sortmerge_join_4.q.out  |   8 +
 .../clientpositive/auto_sortmerge_join_5.q.out  |   8 +
 .../clientpositive/auto_sortmerge_join_6.q.out  |  66 ++
 .../clientpositive/auto_sortmerge_join_7.q.out  |   8 +
 .../clientpositive/auto_sortmerge_join_8.q.out  |   8 +
 .../clientpositive/auto_sortmerge_join_9.q.out  | 143 ++++
 .../clientpositive/binarysortable_1.q.out       | Bin 4302 -> 4329 bytes
 .../clientpositive/bucket_map_join_1.q.out      |  10 +-
 .../clientpositive/bucket_map_join_2.q.out      |  10 +-
 .../results/clientpositive/bucketmapjoin1.q.out |  28 +-
 .../bucketsortoptimize_insert_2.q.out           |  20 +
 .../bucketsortoptimize_insert_4.q.out           |  16 +
 .../bucketsortoptimize_insert_5.q.out           |  20 +
 .../bucketsortoptimize_insert_6.q.out           |  24 +
 .../clientpositive/column_access_stats.q.out    |  78 +-
 .../results/clientpositive/complex_alias.q.out  |  42 +-
 .../clientpositive/correlationoptimizer1.q.out  |  16 +-
 .../clientpositive/correlationoptimizer10.q.out |   6 +-
 .../clientpositive/correlationoptimizer11.q.out |   8 +-
 .../clientpositive/correlationoptimizer15.q.out |   6 +-
 .../clientpositive/correlationoptimizer2.q.out  |  10 +-
 .../clientpositive/correlationoptimizer3.q.out  |  12 +-
 .../clientpositive/correlationoptimizer4.q.out  |   6 +-
 .../clientpositive/correlationoptimizer5.q.out  |   6 +
 .../clientpositive/correlationoptimizer6.q.out  |  48 +-
 .../clientpositive/cross_product_check_2.q.out  |   4 +
 .../clientpositive/decimal_precision2.q.out     |  28 +-
 .../encryption_insert_partition_dynamic.q.out   |  10 +-
 .../encryption_insert_partition_static.q.out    |  10 +-
 .../clientpositive/explain_logical.q.out        |   6 +-
 .../clientpositive/explain_rearrange.q.out      |  32 +-
 .../results/clientpositive/filter_numeric.q.out |   6 +-
 .../test/results/clientpositive/fold_case.q.out |   2 +-
 .../test/results/clientpositive/fold_when.q.out |  24 +-
 .../results/clientpositive/groupby_cube1.q.out  | 108 +--
 .../clientpositive/groupby_grouping_sets2.q.out |  36 +-
 .../clientpositive/groupby_grouping_sets3.q.out |  32 +-
 .../clientpositive/groupby_grouping_sets4.q.out |  72 +-
 .../clientpositive/groupby_grouping_sets5.q.out |  44 +-
 .../clientpositive/groupby_grouping_sets6.q.out |  30 +-
 .../results/clientpositive/groupby_ppd.q.out    |  34 +-
 .../clientpositive/groupby_rollup1.q.out        |  94 +-
 .../results/clientpositive/groupby_sort_6.q.out |  46 +-
 .../test/results/clientpositive/having2.q.out   |  32 +-
 .../identity_project_remove_skip.q.out          |   6 +
 .../results/clientpositive/implicit_cast1.q.out |   8 +-
 .../clientpositive/index_auto_empty.q.out       |   8 +-
 .../results/clientpositive/index_serde.q.out    |   6 +-
 .../clientpositive/index_skewtable.q.out        |  12 +-
 .../clientpositive/infer_const_type.q.out       |  28 +-
 .../test/results/clientpositive/input21.q.out   |  10 +-
 .../test/results/clientpositive/input23.q.out   |   8 +-
 .../test/results/clientpositive/input24.q.out   |   4 +-
 .../test/results/clientpositive/input25.q.out   |  36 +-
 .../test/results/clientpositive/input26.q.out   |  28 +-
 ql/src/test/results/clientpositive/input9.q.out |   6 +-
 .../results/clientpositive/input_part4.q.out    |   6 +-
 .../test/results/clientpositive/insert1.q.out   |  46 +-
 ql/src/test/results/clientpositive/join19.q.out |  62 +-
 ql/src/test/results/clientpositive/join29.q.out |   6 +
 ql/src/test/results/clientpositive/join31.q.out |   6 +
 .../join_cond_pushdown_unqual1.q.out            |  48 +-
 .../join_cond_pushdown_unqual2.q.out            |  24 +-
 .../join_cond_pushdown_unqual3.q.out            |  48 +-
 .../join_cond_pushdown_unqual4.q.out            |  24 +-
 .../results/clientpositive/join_hive_626.q.out  |  28 +-
 .../results/clientpositive/join_reorder.q.out   |  92 +-
 .../results/clientpositive/join_reorder2.q.out  |  68 +-
 .../results/clientpositive/join_reorder3.q.out  |  68 +-
 .../results/clientpositive/join_reorder4.q.out  |  54 +-
 .../test/results/clientpositive/join_view.q.out |  18 +-
 .../test/results/clientpositive/keyword_1.q.out |   8 +-
 .../clientpositive/lateral_view_explode2.q.out  |   6 +-
 .../list_bucket_query_oneskew_2.q.out           |  20 +-
 .../clientpositive/mapjoin_subquery2.q.out      |  20 +-
 .../results/clientpositive/merge_join_1.q.out   |  32 +-
 .../results/clientpositive/mergejoins.q.out     |  40 +-
 .../clientpositive/mergejoins_mixed.q.out       | 332 ++++----
 .../results/clientpositive/metadataonly1.q.out  |  72 +-
 .../results/clientpositive/multiMapJoin1.q.out  |  76 ++
 .../results/clientpositive/multiMapJoin2.q.out  |  30 +-
 .../clientpositive/multigroupby_singlemr.q.out  | 136 +--
 .../results/clientpositive/nullformatCTAS.q.out |   6 +-
 .../results/clientpositive/nullgroup3.q.out     |  16 +-
 .../results/clientpositive/nullgroup5.q.out     |  14 +-
 .../clientpositive/optimize_nullscan.q.out      |  82 +-
 .../clientpositive/orc_predicate_pushdown.q.out |  36 +-
 .../clientpositive/partition_boolexpr.q.out     |   8 +-
 .../results/clientpositive/ppd_gby_join.q.out   |  28 +-
 .../test/results/clientpositive/ppd_join.q.out  |  22 +-
 .../test/results/clientpositive/ppd_join4.q.out |  24 +-
 .../test/results/clientpositive/ppd_join5.q.out |  22 +-
 .../clientpositive/ppd_outer_join2.q.out        |  16 +-
 .../clientpositive/ppd_outer_join3.q.out        |  16 +-
 .../clientpositive/ppd_outer_join4.q.out        |  32 +-
 .../clientpositive/ppd_outer_join5.q.out        |  72 +-
 .../clientpositive/ppd_repeated_alias.q.out     |  78 +-
 .../results/clientpositive/ppd_udf_col.q.out    |  12 +-
 .../results/clientpositive/ppd_union_view.q.out |  46 +-
 .../clientpositive/ql_rewrite_gbtoidx.q.out     | 258 +++---
 .../ql_rewrite_gbtoidx_cbo_1.q.out              | 260 +++---
 .../ql_rewrite_gbtoidx_cbo_2.q.out              |  16 +-
 .../query_result_fileformat.q.out               |  12 +-
 ql/src/test/results/clientpositive/quote1.q.out |   6 +-
 .../results/clientpositive/quotedid_basic.q.out |  70 +-
 .../results/clientpositive/quotedid_skew.q.out  |  40 +-
 .../clientpositive/reduce_deduplicate.q.out     |  16 +-
 .../runtime_skewjoin_mapjoin_spark.q.out        |  27 +-
 .../test/results/clientpositive/sample6.q.out   |  12 +-
 .../clientpositive/select_dummy_source.q.out    |  34 +-
 .../test/results/clientpositive/semijoin.q.out  |  10 +-
 .../test/results/clientpositive/skewjoin.q.out  |  81 +-
 .../clientpositive/skewjoin_mapjoin1.q.out      | 112 +--
 .../clientpositive/skewjoin_mapjoin10.q.out     |  26 +-
 .../clientpositive/skewjoin_mapjoin11.q.out     |  32 +-
 .../clientpositive/skewjoin_mapjoin2.q.out      |  72 +-
 .../clientpositive/skewjoin_mapjoin3.q.out      |  32 +-
 .../clientpositive/skewjoin_mapjoin4.q.out      |  40 +-
 .../clientpositive/skewjoin_mapjoin5.q.out      |  50 +-
 .../clientpositive/skewjoin_mapjoin6.q.out      |  48 +-
 .../clientpositive/skewjoin_mapjoin7.q.out      |  68 +-
 .../clientpositive/skewjoin_mapjoin8.q.out      |  20 +-
 .../clientpositive/skewjoin_mapjoin9.q.out      |  40 +-
 .../skewjoin_union_remove_1.q.out               | 144 ++--
 .../skewjoin_union_remove_2.q.out               |  48 +-
 .../results/clientpositive/skewjoinopt1.q.out   | 144 ++--
 .../results/clientpositive/skewjoinopt10.q.out  |  52 +-
 .../results/clientpositive/skewjoinopt11.q.out  |  84 +-
 .../results/clientpositive/skewjoinopt12.q.out  |  40 +-
 .../results/clientpositive/skewjoinopt13.q.out  |  28 +-
 .../results/clientpositive/skewjoinopt14.q.out  |  48 +-
 .../results/clientpositive/skewjoinopt15.q.out  |  34 +-
 .../results/clientpositive/skewjoinopt16.q.out  |  40 +-
 .../results/clientpositive/skewjoinopt17.q.out  |  80 +-
 .../results/clientpositive/skewjoinopt18.q.out  |   6 +-
 .../results/clientpositive/skewjoinopt19.q.out  |  40 +-
 .../results/clientpositive/skewjoinopt2.q.out   | 168 ++--
 .../results/clientpositive/skewjoinopt20.q.out  |  40 +-
 .../results/clientpositive/skewjoinopt3.q.out   |  80 +-
 .../results/clientpositive/skewjoinopt4.q.out   |  80 +-
 .../results/clientpositive/skewjoinopt5.q.out   |  40 +-
 .../results/clientpositive/skewjoinopt6.q.out   |  40 +-
 .../results/clientpositive/skewjoinopt7.q.out   |  52 +-
 .../results/clientpositive/skewjoinopt8.q.out   |  52 +-
 .../results/clientpositive/skewjoinopt9.q.out   |  54 +-
 .../results/clientpositive/smb_mapjoin9.q.out   |   8 +-
 .../clientpositive/spark/auto_join27.q.out      |   7 +
 .../clientpositive/spark/auto_join32.q.out      |  64 +-
 .../clientpositive/spark/auto_join_stats.q.out  |   4 +-
 .../clientpositive/spark/auto_join_stats2.q.out |   8 +-
 .../spark/auto_sortmerge_join_10.q.out          |  10 +
 .../spark/auto_sortmerge_join_12.q.out          |   2 +-
 .../spark/bucket_map_join_1.q.out               |  10 +-
 .../spark/bucket_map_join_2.q.out               |  10 +-
 .../clientpositive/spark/bucketmapjoin1.q.out   |  28 +-
 .../spark/column_access_stats.q.out             | 104 ++-
 .../clientpositive/spark/groupby_cube1.q.out    | 110 +--
 .../clientpositive/spark/groupby_rollup1.q.out  |  96 +--
 .../spark/groupby_sort_1_23.q.out               |  19 +
 .../spark/groupby_sort_skew_1_23.q.out          |  19 +
 .../results/clientpositive/spark/insert1.q.out  |  46 +-
 .../results/clientpositive/spark/join19.q.out   |  62 +-
 .../results/clientpositive/spark/join34.q.out   |   8 +
 .../results/clientpositive/spark/join35.q.out   |   4 +
 .../spark/join_cond_pushdown_unqual1.q.out      |  48 +-
 .../spark/join_cond_pushdown_unqual2.q.out      |  24 +-
 .../spark/join_cond_pushdown_unqual3.q.out      |  48 +-
 .../spark/join_cond_pushdown_unqual4.q.out      |  24 +-
 .../clientpositive/spark/join_hive_626.q.out    |  28 +-
 .../clientpositive/spark/join_reorder.q.out     |  92 +-
 .../clientpositive/spark/join_reorder2.q.out    |  68 +-
 .../clientpositive/spark/join_reorder3.q.out    |  68 +-
 .../clientpositive/spark/join_reorder4.q.out    |  54 +-
 .../clientpositive/spark/join_view.q.out        |  18 +-
 .../spark/lateral_view_explode2.q.out           |   6 +-
 .../clientpositive/spark/load_dyn_part13.q.out  |   8 +
 .../clientpositive/spark/load_dyn_part14.q.out  |   9 +
 .../spark/mapjoin_subquery2.q.out               |  30 +-
 .../clientpositive/spark/mergejoins.q.out       |  40 +-
 .../clientpositive/spark/mergejoins_mixed.q.out | 332 ++++----
 .../clientpositive/spark/multi_insert.q.out     |  48 ++
 ...i_insert_move_tasks_share_dependencies.q.out |  48 ++
 .../clientpositive/spark/multi_join_union.q.out |   8 +
 .../spark/multigroupby_singlemr.q.out           | 146 ++--
 .../spark/optimize_nullscan.q.out               |  76 +-
 .../clientpositive/spark/ppd_gby_join.q.out     |  12 +-
 .../results/clientpositive/spark/ppd_join.q.out |   6 +-
 .../clientpositive/spark/ppd_join4.q.out        |  24 +-
 .../clientpositive/spark/ppd_join5.q.out        |  54 +-
 .../clientpositive/spark/ppd_outer_join1.q.out  |   6 +-
 .../clientpositive/spark/ppd_outer_join2.q.out  |   6 +-
 .../clientpositive/spark/ppd_outer_join4.q.out  |  32 +-
 .../clientpositive/spark/ppd_outer_join5.q.out  |  72 +-
 .../spark/ql_rewrite_gbtoidx.q.out              | 263 +++---
 .../spark/ql_rewrite_gbtoidx_cbo_1.q.out        | 263 +++---
 .../spark/reduce_deduplicate.q.out              |  16 +-
 .../spark/runtime_skewjoin_mapjoin_spark.q.out  |   5 +-
 .../results/clientpositive/spark/sample6.q.out  |  12 +-
 .../results/clientpositive/spark/semijoin.q.out |  10 +-
 .../results/clientpositive/spark/skewjoin.q.out |  79 +-
 .../spark/skewjoin_union_remove_1.q.out         | 120 +--
 .../spark/skewjoin_union_remove_2.q.out         |  42 +-
 .../clientpositive/spark/skewjoinopt1.q.out     | 120 +--
 .../clientpositive/spark/skewjoinopt10.q.out    |  32 +-
 .../clientpositive/spark/skewjoinopt11.q.out    |  64 +-
 .../clientpositive/spark/skewjoinopt12.q.out    |  30 +-
 .../clientpositive/spark/skewjoinopt13.q.out    |  28 +-
 .../clientpositive/spark/skewjoinopt14.q.out    |  40 +-
 .../clientpositive/spark/skewjoinopt15.q.out    |  40 +-
 .../clientpositive/spark/skewjoinopt16.q.out    |  30 +-
 .../clientpositive/spark/skewjoinopt17.q.out    |  60 +-
 .../clientpositive/spark/skewjoinopt18.q.out    |   6 +-
 .../clientpositive/spark/skewjoinopt19.q.out    |  30 +-
 .../clientpositive/spark/skewjoinopt2.q.out     | 128 +--
 .../clientpositive/spark/skewjoinopt20.q.out    |  30 +-
 .../clientpositive/spark/skewjoinopt3.q.out     |  60 +-
 .../clientpositive/spark/skewjoinopt4.q.out     |  60 +-
 .../clientpositive/spark/skewjoinopt5.q.out     |  30 +-
 .../clientpositive/spark/skewjoinopt6.q.out     |  30 +-
 .../clientpositive/spark/skewjoinopt7.q.out     |  42 +-
 .../clientpositive/spark/skewjoinopt8.q.out     |  42 +-
 .../clientpositive/spark/skewjoinopt9.q.out     |  42 +-
 .../results/clientpositive/spark/stats1.q.out   |   6 +
 .../results/clientpositive/spark/stats12.q.out  |   2 +-
 .../results/clientpositive/spark/stats13.q.out  |   2 +-
 .../results/clientpositive/spark/stats2.q.out   |   2 +-
 .../results/clientpositive/spark/stats7.q.out   |   2 +-
 .../results/clientpositive/spark/stats8.q.out   |   2 +-
 .../spark/subquery_multiinsert.q.java1.7.out    |  14 +-
 .../clientpositive/spark/temp_table.q.out       |   6 +
 .../clientpositive/spark/transform1.q.out       |   8 +-
 .../clientpositive/spark/udf_in_file.q.out      |   6 +-
 .../results/clientpositive/spark/union.q.out    |   8 +
 .../results/clientpositive/spark/union10.q.out  |  12 +
 .../results/clientpositive/spark/union11.q.out  |  23 +-
 .../results/clientpositive/spark/union12.q.out  |  12 +
 .../results/clientpositive/spark/union13.q.out  |   6 +
 .../results/clientpositive/spark/union14.q.out  |  40 +-
 .../results/clientpositive/spark/union15.q.out  |  45 +-
 .../results/clientpositive/spark/union16.q.out  | 125 +++
 .../results/clientpositive/spark/union17.q.out  |  24 +-
 .../results/clientpositive/spark/union18.q.out  |  10 +
 .../results/clientpositive/spark/union19.q.out  |  16 +-
 .../results/clientpositive/spark/union2.q.out   |  10 +
 .../results/clientpositive/spark/union20.q.out  |  18 +-
 .../results/clientpositive/spark/union21.q.out  |  24 +-
 .../results/clientpositive/spark/union22.q.out  |   9 +
 .../results/clientpositive/spark/union23.q.out  |   7 +
 .../results/clientpositive/spark/union24.q.out  |  35 +
 .../results/clientpositive/spark/union25.q.out  |  18 +
 .../results/clientpositive/spark/union26.q.out  |  16 +
 .../results/clientpositive/spark/union27.q.out  |   8 +
 .../results/clientpositive/spark/union28.q.out  |  14 +
 .../results/clientpositive/spark/union29.q.out  |  12 +
 .../results/clientpositive/spark/union3.q.out   |  12 +
 .../results/clientpositive/spark/union30.q.out  |  18 +
 .../results/clientpositive/spark/union31.q.out  |  32 +
 .../results/clientpositive/spark/union32.q.out  |  30 +
 .../results/clientpositive/spark/union33.q.out  |  14 +
 .../results/clientpositive/spark/union34.q.out  |  63 +-
 .../results/clientpositive/spark/union4.q.out   |   8 +
 .../results/clientpositive/spark/union5.q.out   |  16 +-
 .../results/clientpositive/spark/union6.q.out   |   6 +
 .../results/clientpositive/spark/union7.q.out   |  40 +-
 .../results/clientpositive/spark/union8.q.out   |   9 +
 .../results/clientpositive/spark/union9.q.out   |  15 +
 .../spark/union_lateralview.q.out               |  24 +
 .../clientpositive/spark/union_ppr.q.out        |  10 +
 .../clientpositive/spark/union_remove_1.q.out   |  20 +-
 .../clientpositive/spark/union_remove_10.q.out  |  16 +-
 .../clientpositive/spark/union_remove_11.q.out  |  12 +
 .../clientpositive/spark/union_remove_12.q.out  |  14 +-
 .../clientpositive/spark/union_remove_13.q.out  |  22 +-
 .../clientpositive/spark/union_remove_14.q.out  |  14 +-
 .../clientpositive/spark/union_remove_15.q.out  |  22 +-
 .../clientpositive/spark/union_remove_16.q.out  |  22 +-
 .../clientpositive/spark/union_remove_17.q.out  |   8 +
 .../clientpositive/spark/union_remove_18.q.out  |  22 +-
 .../clientpositive/spark/union_remove_19.q.out  |  52 +-
 .../clientpositive/spark/union_remove_2.q.out   |  16 +-
 .../clientpositive/spark/union_remove_20.q.out  |  22 +-
 .../clientpositive/spark/union_remove_21.q.out  |  24 +-
 .../clientpositive/spark/union_remove_22.q.out  |  44 +-
 .../clientpositive/spark/union_remove_23.q.out  |  30 +-
 .../clientpositive/spark/union_remove_24.q.out  |  22 +-
 .../clientpositive/spark/union_remove_25.q.out  |  36 +-
 .../clientpositive/spark/union_remove_3.q.out   |  12 +
 .../clientpositive/spark/union_remove_4.q.out   |  20 +-
 .../clientpositive/spark/union_remove_5.q.out   |  16 +-
 .../clientpositive/spark/union_remove_6.q.out   |  22 +-
 .../spark/union_remove_6_subq.q.out             |  36 +-
 .../clientpositive/spark/union_remove_7.q.out   |  20 +-
 .../clientpositive/spark/union_remove_8.q.out   |  16 +-
 .../clientpositive/spark/union_remove_9.q.out   |  18 +-
 .../clientpositive/spark/union_top_level.q.out  |  42 +
 .../clientpositive/spark/union_view.q.out       | 143 ++++
 .../spark/vectorization_short_regress.q.out     |   8 +-
 .../test/results/clientpositive/stats12.q.out   |   2 +-
 .../test/results/clientpositive/stats13.q.out   |   2 +-
 ql/src/test/results/clientpositive/stats2.q.out |   2 +-
 ql/src/test/results/clientpositive/stats7.q.out |   2 +-
 ql/src/test/results/clientpositive/stats8.q.out |   2 +-
 .../subq_where_serialization.q.out              |   5 +
 .../clientpositive/subquery_in_having.q.out     |   4 +
 .../subquery_multiinsert.q.java1.7.out          |  18 +-
 .../results/clientpositive/subquery_notin.q.out |  34 +-
 .../subquery_notin_having.q.java1.7.out         |  18 +-
 .../subquery_unqual_corr_expr.q.out             |   8 +-
 .../subquery_unqualcolumnrefs.q.out             |  40 +-
 .../results/clientpositive/subquery_views.q.out |  12 +-
 .../symlink_text_input_format.q.out             |  24 +-
 .../tez/auto_sortmerge_join_10.q.out            |  14 +
 .../tez/auto_sortmerge_join_12.q.out            |   5 +-
 .../tez/dynamic_partition_pruning.q.out         | 390 +++++----
 .../tez/dynamic_partition_pruning_2.q.out       |  10 +
 .../clientpositive/tez/explainuser_1.q.out      | 853 ++++++++++---------
 .../clientpositive/tez/explainuser_2.q.out      | 555 +++++++++++-
 .../tez/hybridgrace_hashjoin_2.q.out            |  20 +-
 .../results/clientpositive/tez/mergejoin.q.out  |  12 +
 .../clientpositive/tez/metadataonly1.q.out      |  72 +-
 .../clientpositive/tez/optimize_nullscan.q.out  |  72 +-
 .../clientpositive/tez/selectDistinctStar.q.out |  16 +
 .../results/clientpositive/tez/skewjoin.q.out   |  76 +-
 .../results/clientpositive/tez/temp_table.q.out |   6 +
 .../results/clientpositive/tez/tez_join.q.out   |  28 +-
 .../clientpositive/tez/tez_join_hash.q.out      |  11 +
 .../clientpositive/tez/tez_smb_main.q.out       |  20 +
 .../results/clientpositive/tez/tez_union.q.out  |  81 ++
 .../results/clientpositive/tez/tez_union2.q.out |  18 +
 .../clientpositive/tez/tez_union_group_by.q.out |  51 +-
 .../tez/tez_union_multiinsert.q.out             | 126 ++-
 .../results/clientpositive/tez/transform1.q.out |   8 +-
 .../results/clientpositive/tez/union2.q.out     |  10 +
 .../results/clientpositive/tez/union3.q.out     |  12 +
 .../results/clientpositive/tez/union4.q.out     |   8 +
 .../results/clientpositive/tez/union5.q.out     |  12 +-
 .../results/clientpositive/tez/union6.q.out     |   6 +
 .../results/clientpositive/tez/union7.q.out     |  12 +-
 .../results/clientpositive/tez/union8.q.out     |   9 +
 .../results/clientpositive/tez/union9.q.out     |  15 +
 .../clientpositive/tez/unionDistinct_1.q.out    | 817 ++++++++++++++++--
 .../clientpositive/tez/vector_bucket.q.out      |  10 +-
 .../clientpositive/tez/vector_date_1.q.out      |  20 +-
 .../clientpositive/tez/vector_interval_2.q.out  |  60 +-
 .../tez/vector_leftsemi_mapjoin.q.out           |  60 +-
 .../tez/vector_mr_diff_schema_alias.q.out       |  42 +-
 .../tez/vectorization_short_regress.q.out       |   8 +-
 .../vectorized_dynamic_partition_pruning.q.out  | 390 +++++----
 .../clientpositive/timestamp_literal.q.out      |  12 +-
 .../results/clientpositive/transform1.q.out     |   8 +-
 .../results/clientpositive/type_widening.q.out  |   4 +-
 .../results/clientpositive/udf_add_months.q.out |   4 +-
 .../clientpositive/udf_bitwise_shiftleft.q.out  |   4 +-
 .../clientpositive/udf_bitwise_shiftright.q.out |   4 +-
 .../udf_bitwise_shiftrightunsigned.q.out        |   4 +-
 .../test/results/clientpositive/udf_cbrt.q.out  |   4 +-
 .../clientpositive/udf_current_database.q.out   |  16 +-
 .../clientpositive/udf_date_format.q.out        |   4 +-
 .../results/clientpositive/udf_decode.q.out     |   4 +-
 .../results/clientpositive/udf_factorial.q.out  |   4 +-
 .../clientpositive/udf_from_utc_timestamp.q.out |   4 +-
 .../results/clientpositive/udf_in_file.q.out    |   8 +-
 .../results/clientpositive/udf_last_day.q.out   |   4 +-
 .../results/clientpositive/udf_length.q.out     |   4 +-
 .../clientpositive/udf_levenshtein.q.out        |   4 +-
 .../test/results/clientpositive/udf_md5.q.out   |   4 +-
 .../clientpositive/udf_months_between.q.out     |   4 +-
 .../results/clientpositive/udf_quarter.q.out    |   4 +-
 .../test/results/clientpositive/udf_sha2.q.out  |   4 +-
 .../results/clientpositive/udf_soundex.q.out    |   4 +-
 .../clientpositive/udf_to_utc_timestamp.q.out   |   4 +-
 .../test/results/clientpositive/udf_trunc.q.out |  32 +-
 .../results/clientpositive/udtf_stack.q.out     |   8 +-
 .../test/results/clientpositive/union10.q.out   |  12 +-
 .../test/results/clientpositive/union11.q.out   |  16 +-
 .../test/results/clientpositive/union12.q.out   |  12 +-
 .../test/results/clientpositive/union14.q.out   |  12 +-
 .../test/results/clientpositive/union15.q.out   |  16 +-
 .../test/results/clientpositive/union17.q.out   |  22 +-
 .../test/results/clientpositive/union18.q.out   |   8 +-
 .../test/results/clientpositive/union19.q.out   |  20 +-
 .../test/results/clientpositive/union20.q.out   |   4 +-
 .../test/results/clientpositive/union21.q.out   |  24 +-
 .../test/results/clientpositive/union22.q.out   |   7 +
 ql/src/test/results/clientpositive/union4.q.out |   8 +-
 ql/src/test/results/clientpositive/union5.q.out |  12 +-
 ql/src/test/results/clientpositive/union7.q.out |  12 +-
 .../clientpositive/unionDistinct_1.q.out        | 351 ++++----
 .../results/clientpositive/union_remove_1.q.out |  24 +-
 .../clientpositive/union_remove_10.q.out        |  24 +-
 .../clientpositive/union_remove_11.q.out        |  30 +-
 .../clientpositive/union_remove_12.q.out        |  20 +-
 .../clientpositive/union_remove_13.q.out        |  28 +-
 .../clientpositive/union_remove_14.q.out        |  20 +-
 .../clientpositive/union_remove_15.q.out        |  28 +-
 .../clientpositive/union_remove_16.q.out        |  28 +-
 .../clientpositive/union_remove_17.q.out        |  20 +-
 .../clientpositive/union_remove_18.q.out        |  28 +-
 .../clientpositive/union_remove_19.q.out        |  56 +-
 .../results/clientpositive/union_remove_2.q.out |  24 +-
 .../clientpositive/union_remove_20.q.out        |  28 +-
 .../clientpositive/union_remove_21.q.out        |  24 +-
 .../clientpositive/union_remove_22.q.out        |  56 +-
 .../clientpositive/union_remove_23.q.out        |  34 +-
 .../clientpositive/union_remove_24.q.out        |  28 +-
 .../clientpositive/union_remove_25.q.out        |  24 +-
 .../results/clientpositive/union_remove_3.q.out |  30 +-
 .../results/clientpositive/union_remove_4.q.out |  24 +-
 .../results/clientpositive/union_remove_5.q.out |  24 +-
 .../results/clientpositive/union_remove_6.q.out |  32 +-
 .../clientpositive/union_remove_6_subq.q.out    |  32 +-
 .../results/clientpositive/union_remove_7.q.out |  24 +-
 .../results/clientpositive/union_remove_8.q.out |  24 +-
 .../results/clientpositive/union_remove_9.q.out |  32 +-
 .../results/clientpositive/union_view.q.out     | 216 ++---
 .../results/clientpositive/vector_bucket.q.out  |  10 +-
 .../results/clientpositive/vector_date_1.q.out  |  20 +-
 .../clientpositive/vector_interval_2.q.out      |  60 +-
 .../vector_leftsemi_mapjoin.q.out               |  48 +-
 .../clientpositive/vector_mapjoin_reduce.q.out  |  12 +
 .../vector_mr_diff_schema_alias.q.out           |  42 +-
 .../clientpositive/vector_multi_insert.q.out    |   6 +-
 .../vectorization_short_regress.q.out           |   8 +-
 454 files changed, 11101 insertions(+), 7296 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/d823fc80/hbase-handler/src/test/results/positive/external_table_ppd.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/external_table_ppd.q.out b/hbase-handler/src/test/results/positive/external_table_ppd.q.out
index 6d48edb..57424ce 100644
--- a/hbase-handler/src/test/results/positive/external_table_ppd.q.out
+++ b/hbase-handler/src/test/results/positive/external_table_ppd.q.out
@@ -130,17 +130,17 @@ STAGE PLANS:
           TableScan
             alias: t_hbase
             filterExpr: (int_col > 0) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (int_col > 0) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), tinyint_col (type: tinyint), smallint_col (type: smallint), int_col (type: int), bigint_col (type: bigint), float_col (type: float), double_col (type: double), boolean_col (type: boolean)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/d823fc80/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out b/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out
index c9b5a84..a0f5183 100644
--- a/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_custom_key2.q.out
@@ -70,14 +70,14 @@ STAGE PLANS:
         TableScan
           alias: hbase_ck_4
           filterExpr: ((key.col1 = '238') and (key.col2 = '1238')) (type: boolean)
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Filter Operator
             predicate: ((key.col1 = '238') and (key.col2 = '1238')) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: key (type: struct<col1:string,col2:string,col3:string>), value (type: string)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               ListSink
 
 PREHOOK: query: select * from hbase_ck_4 where key.col1 = '238' AND key.col2 = '1238'
@@ -108,14 +108,14 @@ STAGE PLANS:
         TableScan
           alias: hbase_ck_4
           filterExpr: ((key.col1 >= '165') and (key.col1 < '27')) (type: boolean)
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Filter Operator
             predicate: ((key.col1 >= '165') and (key.col1 < '27')) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: key (type: struct<col1:string,col2:string,col3:string>), value (type: string)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               ListSink
 
 PREHOOK: query: select * from hbase_ck_4 where key.col1 >= '165' AND key.col1 < '27'
@@ -147,14 +147,14 @@ STAGE PLANS:
         TableScan
           alias: hbase_ck_4
           filterExpr: ((key.col1 > '100') and (key.col2 >= '1238')) (type: boolean)
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Filter Operator
             predicate: ((key.col1 > '100') and (key.col2 >= '1238')) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: key (type: struct<col1:string,col2:string,col3:string>), value (type: string)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               ListSink
 
 PREHOOK: query: select * from hbase_ck_4 where key.col1 > '100' AND key.col2 >= '1238'

http://git-wip-us.apache.org/repos/asf/hive/blob/d823fc80/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out b/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out
index 76848e0..25032cc 100644
--- a/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_custom_key3.q.out
@@ -70,14 +70,14 @@ STAGE PLANS:
         TableScan
           alias: hbase_ck_5
           filterExpr: ((key.col1 = '238') and (key.col2 = '1238')) (type: boolean)
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Filter Operator
             predicate: ((key.col1 = '238') and (key.col2 = '1238')) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: key (type: struct<col1:string,col2:string,col3:string>), value (type: string)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               ListSink
 
 PREHOOK: query: select * from hbase_ck_5 where key.col1 = '238' AND key.col2 = '1238'
@@ -107,14 +107,14 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: hbase_ck_5
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Filter Operator
             predicate: ((key.col1 >= '165') and (key.col1 < '27')) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: key (type: struct<col1:string,col2:string,col3:string>), value (type: string)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               ListSink
 
 PREHOOK: query: select * from hbase_ck_5 where key.col1 >= '165' AND key.col1 < '27'
@@ -145,14 +145,14 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: hbase_ck_5
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Filter Operator
             predicate: ((key.col1 > '100') and (key.col2 >= '1238')) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: key (type: struct<col1:string,col2:string,col3:string>), value (type: string)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               ListSink
 
 PREHOOK: query: select * from hbase_ck_5 where key.col1 > '100' AND key.col2 >= '1238'
@@ -181,14 +181,14 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: hbase_ck_5
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Filter Operator
             predicate: ((key.col1 < '50') and (key.col2 >= '3238')) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: key (type: struct<col1:string,col2:string,col3:string>), value (type: string)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               ListSink
 
 PREHOOK: query: select * from hbase_ck_5 where key.col1 < '50' AND key.col2 >= '3238'

http://git-wip-us.apache.org/repos/asf/hive/blob/d823fc80/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out b/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out
index 6174bfb..97e9aa2 100644
--- a/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out
@@ -39,17 +39,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_pushdown
             filterExpr: (key > '90') (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (key > '90') (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -195,17 +195,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_pushdown
             filterExpr: (key >= '90') (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (key >= '90') (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -250,17 +250,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_pushdown
             filterExpr: (key > '90') (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (value like '%9%') (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -306,17 +306,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_pushdown
             filterExpr: (key >= '90') (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((value like '%9%') and (UDFToDouble(key) = UDFToDouble(UDFToInteger(value)))) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -359,17 +359,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_pushdown
             filterExpr: ((key < '80') and (key > '90')) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (value like '%90%') (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -409,11 +409,11 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: hbase_pushdown
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Select Operator
             expressions: key (type: string), value (type: string)
             outputColumnNames: _col0, _col1
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             ListSink
 
 PREHOOK: query: -- with a predicate which is not actually part of the filter, so
@@ -438,17 +438,17 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_pushdown
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (CASE WHEN ((key < '90')) THEN (2) ELSE (4) END > 3) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -482,17 +482,17 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_pushdown
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((key <= '80') or (value like '%90%')) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -523,17 +523,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_pushdown
             filterExpr: ((key > '281') and (key < '287')) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((key > '281') and (key < '287')) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -578,17 +578,17 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_pushdown
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (key <= '90') (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/d823fc80/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_pushdown.q.out b/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
index 8a979bf..be96eec 100644
--- a/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
@@ -39,17 +39,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_pushdown
             filterExpr: (key = 90) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (key = 90) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: 90 (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -89,17 +89,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_pushdown
             filterExpr: (key = 90) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (value like '%90%') (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: 90 (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -137,17 +137,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_pushdown
             filterExpr: (key = 90) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (value like '%90%') (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: 90 (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -189,17 +189,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_pushdown
             filterExpr: (key = 90) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((value like '%90%') and (key = UDFToInteger(value))) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: 90 (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -231,17 +231,17 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_pushdown
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (((key = 80) and (key = 90)) and (value like '%90%')) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: 90 (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -281,11 +281,11 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: hbase_pushdown
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Select Operator
             expressions: key (type: int), value (type: string)
             outputColumnNames: _col0, _col1
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             ListSink
 
 PREHOOK: query: -- with a predicate which is not actually part of the filter, so
@@ -310,17 +310,17 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_pushdown
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (CASE WHEN ((key = 90)) THEN (2) ELSE (4) END > 3) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -354,17 +354,17 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_pushdown
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((key = 80) or (value like '%90%')) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -394,17 +394,17 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_pushdown
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (key = 90) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: 90 (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/d823fc80/hbase-handler/src/test/results/positive/hbase_queries.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_queries.q.out b/hbase-handler/src/test/results/positive/hbase_queries.q.out
index 7863f69..e76169a 100644
--- a/hbase-handler/src/test/results/positive/hbase_queries.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_queries.q.out
@@ -119,19 +119,19 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_table_1
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: UDFToDouble(key) is not null (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: int)
                 outputColumnNames: _col0
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 Reduce Output Operator
                   key expressions: UDFToDouble(_col0) (type: double)
                   sort order: +
                   Map-reduce partition columns: UDFToDouble(_col0) (type: double)
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           TableScan
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
@@ -269,34 +269,34 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_table_1
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (100 < key) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: int)
                 outputColumnNames: _col0
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: int)
                   sort order: +
                   Map-reduce partition columns: _col0 (type: int)
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           TableScan
             alias: hbase_table_2
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (key < 120) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: int)
                   sort order: +
                   Map-reduce partition columns: _col0 (type: int)
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   value expressions: _col1 (type: string)
       Reduce Operator Tree:
         Join Operator
@@ -306,11 +306,11 @@ STAGE PLANS:
             0 _col0 (type: int)
             1 _col0 (type: int)
           outputColumnNames: _col2, _col3
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Select Operator
             expressions: _col2 (type: int), _col3 (type: string)
             outputColumnNames: _col0, _col1
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             File Output Operator
               compressed: false
               table:
@@ -325,15 +325,15 @@ STAGE PLANS:
             Reduce Output Operator
               key expressions: _col0 (type: int), _col1 (type: string)
               sort order: ++
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string)
           outputColumnNames: _col0, _col1
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           File Output Operator
             compressed: false
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -526,19 +526,19 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_table_1
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: UDFToDouble(key) is not null (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 Reduce Output Operator
                   key expressions: UDFToDouble(_col0) (type: double)
                   sort order: +
                   Map-reduce partition columns: UDFToDouble(_col0) (type: double)
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   value expressions: _col0 (type: int), _col1 (type: string)
           TableScan
             Reduce Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/d823fc80/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
index 3aae7d0..7aef504 100644
--- a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
@@ -172,17 +172,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_table
             filterExpr: (((key > 100.0) and (key < 400.0)) and (time < 200000000000)) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time < 200000000000))) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -221,17 +221,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_table
             filterExpr: (((key > 100.0) and (key < 400.0)) and (time > 100000000000)) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time > 100000000000))) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -272,17 +272,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_table
             filterExpr: (((key > 100.0) and (key < 400.0)) and (time <= 100000000000)) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time <= 100000000000))) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -321,17 +321,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_table
             filterExpr: (((key > 100.0) and (key < 400.0)) and (time >= 200000000000)) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time >= 200000000000))) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/d823fc80/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out b/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
index 5936735..7f2c75b 100644
--- a/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
+++ b/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
@@ -37,17 +37,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_ppd_keyrange
             filterExpr: ((key > 8) and (key < 21)) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((key > 8) and (key < 21)) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -91,17 +91,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_ppd_keyrange
             filterExpr: ((key > 8) and (key <= 17)) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: ((key > 8) and (key <= 17)) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -142,17 +142,17 @@ STAGE PLANS:
           TableScan
             alias: hbase_ppd_keyrange
             filterExpr: ((key > 8) and (key <= 17)) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (value like '%11%') (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -187,17 +187,17 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_ppd_keyrange
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (((key >= 9) and (key < 17)) and (key = 11)) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: 11 (type: int), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/d823fc80/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
index 0de7488..43882e7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
@@ -322,7 +322,7 @@ public class RelOptHiveTable extends RelOptAbstractTable {
             hiveColStats = new ArrayList<ColStatistics>();
             for (String c : nonPartColNamesThatRqrStats) {
               // add empty stats object for each column
-              hiveColStats.add(new ColStatistics(hiveTblMetadata.getTableName(), c, null));
+              hiveColStats.add(new ColStatistics(c, null));
             }
             colNamesFailedStats.clear();
           } else {
@@ -358,7 +358,7 @@ public class RelOptHiveTable extends RelOptAbstractTable {
     if (colNamesFailedStats.isEmpty() && !partColNamesThatRqrStats.isEmpty()) {
       ColStatistics cStats = null;
       for (int i = 0; i < partColNamesThatRqrStats.size(); i++) {
-        cStats = new ColStatistics(hiveTblMetadata.getTableName(), partColNamesThatRqrStats.get(i),
+        cStats = new ColStatistics(partColNamesThatRqrStats.get(i),
             hivePartitionColsMap.get(partColIndxsThatRqrStats.get(i)).getTypeName());
         cStats.setCountDistint(getDistinctCount(partitionList.getPartitions(),
             partColNamesThatRqrStats.get(i)));