You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pr...@apache.org on 2015/09/15 22:42:30 UTC

[24/51] [partial] hive git commit: HIVE-11776: LLAP: Generate golden files for all MiniLlapCluster tests (Prasanth Jayachandran)

http://git-wip-us.apache.org/repos/asf/hive/blob/ace87818/ql/src/test/results/clientpositive/llap/explainuser_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/explainuser_3.q.out b/ql/src/test/results/clientpositive/llap/explainuser_3.q.out
new file mode 100644
index 0000000..79c7116
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/explainuser_3.q.out
@@ -0,0 +1,522 @@
+PREHOOK: query: explain select key, value
+FROM srcpart LATERAL VIEW explode(array(1,2,3)) myTable AS myCol
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select key, value
+FROM srcpart LATERAL VIEW explode(array(1,2,3)) myTable AS myCol
+POSTHOOK: type: QUERY
+Plan not optimized by CBO.
+
+Stage-0
+   Fetch Operator
+      limit:-1
+      Select Operator [SEL_6]
+         outputColumnNames:["_col0","_col1"]
+         Lateral View Join Operator [LVJ_5]
+            outputColumnNames:["_col0","_col1","_col7"]
+            Select Operator [SEL_2]
+               outputColumnNames:["key","value"]
+               Lateral View Forward [LVF_1]
+                  TableScan [TS_0]
+                     alias:srcpart
+      Select Operator [SEL_6]
+         outputColumnNames:["_col0","_col1"]
+         Lateral View Join Operator [LVJ_5]
+            outputColumnNames:["_col0","_col1","_col7"]
+            UDTF Operator [UDTF_4]
+               function name:explode
+               Select Operator [SEL_3]
+                  outputColumnNames:["_col0"]
+                   Please refer to the previous Lateral View Forward [LVF_1]
+
+PREHOOK: query: explain show tables
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: explain show tables
+POSTHOOK: type: SHOWTABLES
+Stage-1
+   Fetch Operator
+      limit:-1
+      Stage-0
+         Show Table Operator:
+            database name:default
+
+#### A masked pattern was here ####
+PREHOOK: type: CREATEDATABASE
+#### A masked pattern was here ####
+POSTHOOK: type: CREATEDATABASE
+Stage-0
+
+#### A masked pattern was here ####
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:newDB
+#### A masked pattern was here ####
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:newDB
+#### A masked pattern was here ####
+PREHOOK: query: explain describe database extended newDB
+PREHOOK: type: DESCDATABASE
+POSTHOOK: query: explain describe database extended newDB
+POSTHOOK: type: DESCDATABASE
+Stage-1
+   Fetch Operator
+      limit:-1
+      Stage-0
+
+PREHOOK: query: describe database extended newDB
+PREHOOK: type: DESCDATABASE
+PREHOOK: Input: database:newdb
+POSTHOOK: query: describe database extended newDB
+POSTHOOK: type: DESCDATABASE
+POSTHOOK: Input: database:newdb
+newdb		location/in/test	hive_test_user	USER	
+PREHOOK: query: explain use newDB
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: explain use newDB
+POSTHOOK: type: SWITCHDATABASE
+Stage-0
+
+PREHOOK: query: use newDB
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:newdb
+POSTHOOK: query: use newDB
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:newdb
+PREHOOK: query: create table tab (name string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:newdb
+PREHOOK: Output: newDB@tab
+POSTHOOK: query: create table tab (name string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:newdb
+POSTHOOK: Output: newDB@tab
+PREHOOK: query: explain alter table tab rename to newName
+PREHOOK: type: ALTERTABLE_RENAME
+POSTHOOK: query: explain alter table tab rename to newName
+POSTHOOK: type: ALTERTABLE_RENAME
+Stage-0
+   Alter Table Operator:
+      new name:newDB.newName
+      old name:newDB.tab
+      type:rename
+
+PREHOOK: query: explain drop table tab
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: explain drop table tab
+POSTHOOK: type: DROPTABLE
+Stage-0
+   Drop Table Operator:
+      table:tab
+
+PREHOOK: query: drop table tab
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: newdb@tab
+PREHOOK: Output: newdb@tab
+POSTHOOK: query: drop table tab
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: newdb@tab
+POSTHOOK: Output: newdb@tab
+PREHOOK: query: explain use default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: explain use default
+POSTHOOK: type: SWITCHDATABASE
+Stage-0
+
+PREHOOK: query: use default
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
+POSTHOOK: query: use default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
+PREHOOK: query: drop database newDB
+PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:newdb
+PREHOOK: Output: database:newdb
+POSTHOOK: query: drop database newDB
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:newdb
+POSTHOOK: Output: database:newdb
+PREHOOK: query: explain analyze table src compute statistics
+PREHOOK: type: QUERY
+POSTHOOK: query: explain analyze table src compute statistics
+POSTHOOK: type: QUERY
+Stage-2
+   Stats-Aggr Operator
+      Stage-0
+         Map 1
+         TableScan [TS_0]
+            alias:src
+            Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+
+PREHOOK: query: explain analyze table src compute statistics for columns
+PREHOOK: type: QUERY
+POSTHOOK: query: explain analyze table src compute statistics for columns
+POSTHOOK: type: QUERY
+Vertex dependency in root stage
+Reducer 2 <- Map 1 (SIMPLE_EDGE)
+
+Stage-2
+   Column Stats Work{}
+      Stage-0
+         Reducer 2
+         File Output Operator [FS_6]
+            compressed:false
+            Statistics:Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            table:{"input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}
+            Group By Operator [GBY_4]
+            |  aggregations:["compute_stats(VALUE._col0)","compute_stats(VALUE._col1)"]
+            |  outputColumnNames:["_col0","_col1"]
+            |  Statistics:Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            |<-Map 1 [SIMPLE_EDGE]
+               Reduce Output Operator [RS_3]
+                  sort order:
+                  Statistics:Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                  value expressions:_col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>)
+                  Group By Operator [GBY_2]
+                     aggregations:["compute_stats(key, 16)","compute_stats(value, 16)"]
+                     outputColumnNames:["_col0","_col1"]
+                     Statistics:Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                     Select Operator [SEL_1]
+                        outputColumnNames:["key","value"]
+                        Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                        TableScan [TS_0]
+                           alias:src
+                           Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+
+PREHOOK: query: explain
+CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))
+PREHOOK: type: CREATEMACRO
+POSTHOOK: query: explain
+CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))
+POSTHOOK: type: CREATEMACRO
+Stage-0
+
+PREHOOK: query: CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))
+PREHOOK: type: CREATEMACRO
+PREHOOK: Output: database:default
+POSTHOOK: query: CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))
+POSTHOOK: type: CREATEMACRO
+POSTHOOK: Output: database:default
+PREHOOK: query: EXPLAIN SELECT SIGMOID(2) FROM src LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT SIGMOID(2) FROM src LIMIT 1
+POSTHOOK: type: QUERY
+Plan optimized by CBO.
+
+Stage-0
+   Fetch Operator
+      limit:1
+      Limit [LIM_2]
+         Number of rows:1
+         Select Operator [SEL_1]
+            outputColumnNames:["_col0"]
+            TableScan [TS_0]
+               alias:src
+
+PREHOOK: query: explain DROP TEMPORARY MACRO SIGMOID
+PREHOOK: type: DROPMACRO
+POSTHOOK: query: explain DROP TEMPORARY MACRO SIGMOID
+POSTHOOK: type: DROPMACRO
+Stage-0
+
+PREHOOK: query: DROP TEMPORARY MACRO SIGMOID
+PREHOOK: type: DROPMACRO
+PREHOOK: Output: database:default
+POSTHOOK: query: DROP TEMPORARY MACRO SIGMOID
+POSTHOOK: type: DROPMACRO
+POSTHOOK: Output: database:default
+PREHOOK: query: explain create table src_autho_test as select * from src
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: explain create table src_autho_test as select * from src
+POSTHOOK: type: CREATETABLE_AS_SELECT
+Plan optimized by CBO.
+
+Stage-3
+   Stats-Aggr Operator
+      Stage-4
+         Create Table Operator:
+            columns:["key string","value string"]
+            input format:org.apache.hadoop.mapred.TextInputFormat
+            name:default.src_autho_test
+            output format:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+            Stage-2
+               Dependency Collection{}
+                  Stage-1
+                     Map 1
+                     File Output Operator [FS_2]
+                        compressed:false
+                        Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                        table:{"input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe","name:":"default.src_autho_test"}
+                        Select Operator [SEL_1]
+                           outputColumnNames:["_col0","_col1"]
+                           Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                           TableScan [TS_0]
+                              alias:src
+                              Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Stage-0
+               Move Operator
+                   Please refer to the previous Stage-1
+
+PREHOOK: query: create table src_autho_test as select * from src
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_autho_test
+POSTHOOK: query: create table src_autho_test as select * from src
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@src_autho_test
+PREHOOK: query: explain grant select on table src_autho_test to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: query: explain grant select on table src_autho_test to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+Stage-0
+
+PREHOOK: query: grant select on table src_autho_test to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+PREHOOK: Output: default@src_autho_test
+POSTHOOK: query: grant select on table src_autho_test to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: Output: default@src_autho_test
+PREHOOK: query: explain show grant user hive_test_user on table src_autho_test
+PREHOOK: type: SHOW_GRANT
+POSTHOOK: query: explain show grant user hive_test_user on table src_autho_test
+POSTHOOK: type: SHOW_GRANT
+Stage-1
+   Fetch Operator
+      limit:-1
+      Stage-0
+
+PREHOOK: query: explain show grant user hive_test_user on table src_autho_test(key)
+PREHOOK: type: SHOW_GRANT
+POSTHOOK: query: explain show grant user hive_test_user on table src_autho_test(key)
+POSTHOOK: type: SHOW_GRANT
+Stage-1
+   Fetch Operator
+      limit:-1
+      Stage-0
+
+PREHOOK: query: select key from src_autho_test order by key limit 20
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_autho_test
+#### A masked pattern was here ####
+POSTHOOK: query: select key from src_autho_test order by key limit 20
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_autho_test
+#### A masked pattern was here ####
+0
+0
+0
+10
+100
+100
+103
+103
+104
+104
+105
+11
+111
+113
+113
+114
+116
+118
+118
+119
+PREHOOK: query: explain revoke select on table src_autho_test from user hive_test_user
+PREHOOK: type: REVOKE_PRIVILEGE
+POSTHOOK: query: explain revoke select on table src_autho_test from user hive_test_user
+POSTHOOK: type: REVOKE_PRIVILEGE
+Stage-0
+
+PREHOOK: query: explain grant select(key) on table src_autho_test to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: query: explain grant select(key) on table src_autho_test to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+Stage-0
+
+PREHOOK: query: explain revoke select(key) on table src_autho_test from user hive_test_user
+PREHOOK: type: REVOKE_PRIVILEGE
+POSTHOOK: query: explain revoke select(key) on table src_autho_test from user hive_test_user
+POSTHOOK: type: REVOKE_PRIVILEGE
+Stage-0
+
+PREHOOK: query: explain 
+create role sRc_roLE
+PREHOOK: type: CREATEROLE
+POSTHOOK: query: explain 
+create role sRc_roLE
+POSTHOOK: type: CREATEROLE
+Stage-0
+
+PREHOOK: query: create role sRc_roLE
+PREHOOK: type: CREATEROLE
+POSTHOOK: query: create role sRc_roLE
+POSTHOOK: type: CREATEROLE
+PREHOOK: query: explain
+grant role sRc_roLE to user hive_test_user
+PREHOOK: type: GRANT_ROLE
+POSTHOOK: query: explain
+grant role sRc_roLE to user hive_test_user
+POSTHOOK: type: GRANT_ROLE
+Stage-0
+
+PREHOOK: query: grant role sRc_roLE to user hive_test_user
+PREHOOK: type: GRANT_ROLE
+POSTHOOK: query: grant role sRc_roLE to user hive_test_user
+POSTHOOK: type: GRANT_ROLE
+PREHOOK: query: explain show role grant user hive_test_user
+PREHOOK: type: SHOW_ROLE_GRANT
+POSTHOOK: query: explain show role grant user hive_test_user
+POSTHOOK: type: SHOW_ROLE_GRANT
+Stage-1
+   Fetch Operator
+      limit:-1
+      Stage-0
+
+PREHOOK: query: explain drop role sRc_roLE
+PREHOOK: type: DROPROLE
+POSTHOOK: query: explain drop role sRc_roLE
+POSTHOOK: type: DROPROLE
+Stage-0
+
+PREHOOK: query: drop role sRc_roLE
+PREHOOK: type: DROPROLE
+POSTHOOK: query: drop role sRc_roLE
+POSTHOOK: type: DROPROLE
+PREHOOK: query: drop table src_autho_test
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@src_autho_test
+PREHOOK: Output: default@src_autho_test
+POSTHOOK: query: drop table src_autho_test
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@src_autho_test
+POSTHOOK: Output: default@src_autho_test
+PREHOOK: query: explain drop view v
+PREHOOK: type: DROPVIEW
+POSTHOOK: query: explain drop view v
+POSTHOOK: type: DROPVIEW
+Stage-0
+   Drop Table Operator:
+      table:v
+
+PREHOOK: query: explain create view v as with cte as (select * from src  order by key limit 5)
+select * from cte
+PREHOOK: type: CREATEVIEW
+POSTHOOK: query: explain create view v as with cte as (select * from src  order by key limit 5)
+select * from cte
+POSTHOOK: type: CREATEVIEW
+Plan not optimized by CBO.
+
+Stage-0
+   Create View Operator:
+      name:default.v
+      original text:with cte as (select * from src  order by key limit 5)
+select * from cte
+
+PREHOOK: query: explain with cte as (select * from src  order by key limit 5)
+select * from cte
+PREHOOK: type: QUERY
+POSTHOOK: query: explain with cte as (select * from src  order by key limit 5)
+select * from cte
+POSTHOOK: type: QUERY
+Plan optimized by CBO.
+
+Vertex dependency in root stage
+Reducer 2 <- Map 1 (SIMPLE_EDGE)
+
+Stage-0
+   Fetch Operator
+      limit:5
+      Stage-1
+         Reducer 2
+         File Output Operator [FS_5]
+            compressed:false
+            Statistics:Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+            table:{"input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}
+            Limit [LIM_4]
+               Number of rows:5
+               Statistics:Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE
+               Select Operator [SEL_3]
+               |  outputColumnNames:["_col0","_col1"]
+               |  Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+               |<-Map 1 [SIMPLE_EDGE]
+                  Reduce Output Operator [RS_2]
+                     key expressions:_col0 (type: string)
+                     sort order:+
+                     Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                     value expressions:_col1 (type: string)
+                     Select Operator [SEL_1]
+                        outputColumnNames:["_col0","_col1"]
+                        Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                        TableScan [TS_0]
+                           alias:src
+                           Statistics:Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+
+PREHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_merge5
+POSTHOOK: query: create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_merge5
+PREHOOK: query: load data local inpath '../../data/files/orc_split_elim.orc' into table orc_merge5
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@orc_merge5
+POSTHOOK: query: load data local inpath '../../data/files/orc_split_elim.orc' into table orc_merge5
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@orc_merge5
+PREHOOK: query: explain insert overwrite table orc_merge5 select userid,string1,subtype,decimal1,ts from orc_merge5 where userid<=13
+PREHOOK: type: QUERY
+POSTHOOK: query: explain insert overwrite table orc_merge5 select userid,string1,subtype,decimal1,ts from orc_merge5 where userid<=13
+POSTHOOK: type: QUERY
+Plan optimized by CBO.
+
+Stage-3
+   Stats-Aggr Operator
+      Stage-0
+         Move Operator
+            table:{"input format:":"org.apache.hadoop.hive.ql.io.orc.OrcInputFormat","output format:":"org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat","serde:":"org.apache.hadoop.hive.ql.io.orc.OrcSerde","name:":"default.orc_merge5"}
+            Stage-2
+               Dependency Collection{}
+                  Stage-5(CONDITIONAL)
+                     Move Operator
+                        Stage-8(CONDITIONAL CHILD TASKS: Stage-5, Stage-4, Stage-6)
+                           Conditional Operator
+                              Stage-1
+                                 Map 1
+                                 File Output Operator [FS_3]
+                                    compressed:false
+                                    Statistics:Num rows: 306 Data size: 82044 Basic stats: COMPLETE Column stats: NONE
+                                    table:{"input format:":"org.apache.hadoop.hive.ql.io.orc.OrcInputFormat","output format:":"org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat","serde:":"org.apache.hadoop.hive.ql.io.orc.OrcSerde","name:":"default.orc_merge5"}
+                                    Select Operator [SEL_2]
+                                       outputColumnNames:["_col0","_col1","_col2","_col3","_col4"]
+                                       Statistics:Num rows: 306 Data size: 82044 Basic stats: COMPLETE Column stats: NONE
+                                       Filter Operator [FIL_4]
+                                          predicate:(userid <= 13) (type: boolean)
+                                          Statistics:Num rows: 306 Data size: 82044 Basic stats: COMPLETE Column stats: NONE
+                                          TableScan [TS_0]
+                                             alias:orc_merge5
+                                             Statistics:Num rows: 919 Data size: 246402 Basic stats: COMPLETE Column stats: NONE
+                  Stage-4(CONDITIONAL)
+                     File Merge
+                     ORC File Merge Operator [OFM_7]
+                         Please refer to the previous Stage-8(CONDITIONAL CHILD TASKS: Stage-5, Stage-4, Stage-6)
+                  Stage-7
+                     Move Operator
+                        Stage-6(CONDITIONAL)
+                           File Merge
+                           ORC File Merge Operator [OFM_7]
+                               Please refer to the previous Stage-8(CONDITIONAL CHILD TASKS: Stage-5, Stage-4, Stage-6)
+
+PREHOOK: query: drop table orc_merge5
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@orc_merge5
+PREHOOK: Output: default@orc_merge5
+POSTHOOK: query: drop table orc_merge5
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@orc_merge5
+POSTHOOK: Output: default@orc_merge5

http://git-wip-us.apache.org/repos/asf/hive/blob/ace87818/ql/src/test/results/clientpositive/llap/filter_join_breaktask.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/filter_join_breaktask.q.out b/ql/src/test/results/clientpositive/llap/filter_join_breaktask.q.out
new file mode 100644
index 0000000..b8e738c
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/filter_join_breaktask.q.out
@@ -0,0 +1,445 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE filter_join_breaktask(key int, value string) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@filter_join_breaktask
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE filter_join_breaktask(key int, value string) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@filter_join_breaktask
+PREHOOK: query: INSERT OVERWRITE TABLE filter_join_breaktask PARTITION(ds='2008-04-08')
+SELECT key, value from src1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@filter_join_breaktask@ds=2008-04-08
+POSTHOOK: query: INSERT OVERWRITE TABLE filter_join_breaktask PARTITION(ds='2008-04-08')
+SELECT key, value from src1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@filter_join_breaktask@ds=2008-04-08
+POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).key EXPRESSION [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: filter_join_breaktask PARTITION(ds=2008-04-08).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: EXPLAIN EXTENDED  
+SELECT f.key, g.value 
+FROM filter_join_breaktask f JOIN filter_join_breaktask m ON( f.key = m.key AND f.ds='2008-04-08' AND m.ds='2008-04-08' AND f.key is not null) 
+JOIN filter_join_breaktask g ON(g.value = m.value AND g.ds='2008-04-08' AND m.ds='2008-04-08' AND m.value is not null AND m.value !='')
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED  
+SELECT f.key, g.value 
+FROM filter_join_breaktask f JOIN filter_join_breaktask m ON( f.key = m.key AND f.ds='2008-04-08' AND m.ds='2008-04-08' AND f.key is not null) 
+JOIN filter_join_breaktask g ON(g.value = m.value AND g.ds='2008-04-08' AND m.ds='2008-04-08' AND m.value is not null AND m.value !='')
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_JOIN
+         TOK_JOIN
+            TOK_TABREF
+               TOK_TABNAME
+                  filter_join_breaktask
+               f
+            TOK_TABREF
+               TOK_TABNAME
+                  filter_join_breaktask
+               m
+            AND
+               AND
+                  AND
+                     =
+                        .
+                           TOK_TABLE_OR_COL
+                              f
+                           key
+                        .
+                           TOK_TABLE_OR_COL
+                              m
+                           key
+                     =
+                        .
+                           TOK_TABLE_OR_COL
+                              f
+                           ds
+                        '2008-04-08'
+                  =
+                     .
+                        TOK_TABLE_OR_COL
+                           m
+                        ds
+                     '2008-04-08'
+               TOK_FUNCTION
+                  TOK_ISNOTNULL
+                  .
+                     TOK_TABLE_OR_COL
+                        f
+                     key
+         TOK_TABREF
+            TOK_TABNAME
+               filter_join_breaktask
+            g
+         AND
+            AND
+               AND
+                  AND
+                     =
+                        .
+                           TOK_TABLE_OR_COL
+                              g
+                           value
+                        .
+                           TOK_TABLE_OR_COL
+                              m
+                           value
+                     =
+                        .
+                           TOK_TABLE_OR_COL
+                              g
+                           ds
+                        '2008-04-08'
+                  =
+                     .
+                        TOK_TABLE_OR_COL
+                           m
+                        ds
+                     '2008-04-08'
+               TOK_FUNCTION
+                  TOK_ISNOTNULL
+                  .
+                     TOK_TABLE_OR_COL
+                        m
+                     value
+            !=
+               .
+                  TOK_TABLE_OR_COL
+                     m
+                  value
+               ''
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_SELEXPR
+            .
+               TOK_TABLE_OR_COL
+                  f
+               key
+         TOK_SELEXPR
+            .
+               TOK_TABLE_OR_COL
+                  g
+               value
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
+        Reducer 3 <- Map 5 (SIMPLE_EDGE), Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: f
+                  Statistics: Num rows: 25 Data size: 211 Basic stats: COMPLETE Column stats: NONE
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 13 Data size: 109 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: key (type: int)
+                      sort order: +
+                      Map-reduce partition columns: key (type: int)
+                      Statistics: Num rows: 13 Data size: 109 Basic stats: COMPLETE Column stats: NONE
+                      tag: 0
+                      auto parallelism: true
+            Execution mode: llap
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    COLUMN_STATS_ACCURATE true
+                    bucket_count -1
+                    columns key,value
+                    columns.comments 
+                    columns.types int:string
+#### A masked pattern was here ####
+                    name default.filter_join_breaktask
+                    numFiles 1
+                    numRows 25
+                    partition_columns ds
+                    partition_columns.types string
+                    rawDataSize 211
+                    serialization.ddl struct filter_join_breaktask { i32 key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 236
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types int:string
+#### A masked pattern was here ####
+                      name default.filter_join_breaktask
+                      partition_columns ds
+                      partition_columns.types string
+                      serialization.ddl struct filter_join_breaktask { i32 key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.filter_join_breaktask
+                  name: default.filter_join_breaktask
+            Truncated Path -> Alias:
+              /filter_join_breaktask/ds=2008-04-08 [f]
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: m
+                  Statistics: Num rows: 25 Data size: 211 Basic stats: COMPLETE Column stats: NONE
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: ((key is not null and value is not null) and (value <> '')) (type: boolean)
+                    Statistics: Num rows: 7 Data size: 59 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: key (type: int)
+                      sort order: +
+                      Map-reduce partition columns: key (type: int)
+                      Statistics: Num rows: 7 Data size: 59 Basic stats: COMPLETE Column stats: NONE
+                      tag: 1
+                      value expressions: value (type: string)
+                      auto parallelism: true
+            Execution mode: llap
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    COLUMN_STATS_ACCURATE true
+                    bucket_count -1
+                    columns key,value
+                    columns.comments 
+                    columns.types int:string
+#### A masked pattern was here ####
+                    name default.filter_join_breaktask
+                    numFiles 1
+                    numRows 25
+                    partition_columns ds
+                    partition_columns.types string
+                    rawDataSize 211
+                    serialization.ddl struct filter_join_breaktask { i32 key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 236
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types int:string
+#### A masked pattern was here ####
+                      name default.filter_join_breaktask
+                      partition_columns ds
+                      partition_columns.types string
+                      serialization.ddl struct filter_join_breaktask { i32 key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.filter_join_breaktask
+                  name: default.filter_join_breaktask
+            Truncated Path -> Alias:
+              /filter_join_breaktask/ds=2008-04-08 [m]
+        Map 5 
+            Map Operator Tree:
+                TableScan
+                  alias: g
+                  Statistics: Num rows: 25 Data size: 211 Basic stats: COMPLETE Column stats: NONE
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: ((value <> '') and value is not null) (type: boolean)
+                    Statistics: Num rows: 13 Data size: 109 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: value (type: string)
+                      sort order: +
+                      Map-reduce partition columns: value (type: string)
+                      Statistics: Num rows: 13 Data size: 109 Basic stats: COMPLETE Column stats: NONE
+                      tag: 1
+                      auto parallelism: true
+            Execution mode: llap
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    COLUMN_STATS_ACCURATE true
+                    bucket_count -1
+                    columns key,value
+                    columns.comments 
+                    columns.types int:string
+#### A masked pattern was here ####
+                    name default.filter_join_breaktask
+                    numFiles 1
+                    numRows 25
+                    partition_columns ds
+                    partition_columns.types string
+                    rawDataSize 211
+                    serialization.ddl struct filter_join_breaktask { i32 key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 236
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types int:string
+#### A masked pattern was here ####
+                      name default.filter_join_breaktask
+                      partition_columns ds
+                      partition_columns.types string
+                      serialization.ddl struct filter_join_breaktask { i32 key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.filter_join_breaktask
+                  name: default.filter_join_breaktask
+            Truncated Path -> Alias:
+              /filter_join_breaktask/ds=2008-04-08 [g]
+        Reducer 2 
+            Execution mode: llap
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 key (type: int)
+                  1 key (type: int)
+                outputColumnNames: _col0, _col7
+                Position of Big Table: 0
+                Statistics: Num rows: 14 Data size: 119 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col7 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col7 (type: string)
+                  Statistics: Num rows: 14 Data size: 119 Basic stats: COMPLETE Column stats: NONE
+                  tag: 0
+                  value expressions: _col0 (type: int)
+                  auto parallelism: true
+        Reducer 3 
+            Execution mode: llap
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 _col7 (type: string)
+                  1 value (type: string)
+                outputColumnNames: _col0, _col13
+                Position of Big Table: 0
+                Statistics: Num rows: 15 Data size: 130 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: int), _col13 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 15 Data size: 130 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+#### A masked pattern was here ####
+                    NumFilesPerFileSink: 1
+                    Statistics: Num rows: 15 Data size: 130 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          columns _col0,_col1
+                          columns.types int:string
+                          escape.delim \
+                          hive.serialization.extend.additional.nesting.levels true
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    TotalFiles: 1
+                    GatherStats: false
+                    MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT f.key, g.value 
+FROM filter_join_breaktask f JOIN filter_join_breaktask m ON( f.key = m.key AND f.ds='2008-04-08' AND m.ds='2008-04-08' AND f.key is not null) 
+JOIN filter_join_breaktask g ON(g.value = m.value AND g.ds='2008-04-08' AND m.ds='2008-04-08' AND m.value is not null AND m.value !='')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@filter_join_breaktask
+PREHOOK: Input: default@filter_join_breaktask@ds=2008-04-08
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT f.key, g.value 
+FROM filter_join_breaktask f JOIN filter_join_breaktask m ON( f.key = m.key AND f.ds='2008-04-08' AND m.ds='2008-04-08' AND f.key is not null) 
+JOIN filter_join_breaktask g ON(g.value = m.value AND g.ds='2008-04-08' AND m.ds='2008-04-08' AND m.value is not null AND m.value !='')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@filter_join_breaktask
+POSTHOOK: Input: default@filter_join_breaktask@ds=2008-04-08
+#### A masked pattern was here ####
+146	val_146
+150	val_150
+213	val_213
+238	val_238
+255	val_255
+273	val_273
+278	val_278
+311	val_311
+401	val_401
+406	val_406
+66	val_66
+98	val_98

http://git-wip-us.apache.org/repos/asf/hive/blob/ace87818/ql/src/test/results/clientpositive/llap/filter_join_breaktask2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/filter_join_breaktask2.q.out b/ql/src/test/results/clientpositive/llap/filter_join_breaktask2.q.out
new file mode 100644
index 0000000..af85af9
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/filter_join_breaktask2.q.out
@@ -0,0 +1,272 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+create table T1(c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string) 
+partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@T1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+create table T1(c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string) 
+partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@T1
+PREHOOK: query: create table T2(c1 string, c2 string, c3 string, c0 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string,  c25 string) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@T2
+POSTHOOK: query: create table T2(c1 string, c2 string, c3 string, c0 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string,  c25 string) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@T2
+PREHOOK: query: create table T3 (c0 bigint,  c1 bigint, c2 int) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@T3
+POSTHOOK: query: create table T3 (c0 bigint,  c1 bigint, c2 int) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@T3
+PREHOOK: query: create table T4 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c8
 1 bigint, c82 bigint, c83 bigint) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@T4
+POSTHOOK: query: create table T4 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c
 81 bigint, c82 bigint, c83 bigint) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@T4
+PREHOOK: query: insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1',  0, 0,4 from src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t1@ds=2010-04-17
+POSTHOOK: query: insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1',  0, 0,4 from src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t1@ds=2010-04-17
+POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c1 SIMPLE []
+POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c2 SIMPLE []
+POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c3 SIMPLE []
+POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c4 SIMPLE []
+POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c5 SIMPLE []
+POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c6 SIMPLE []
+POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c7 SIMPLE []
+PREHOOK: query: insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t2@ds=2010-04-17
+POSTHOOK: query: insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t2@ds=2010-04-17
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c0 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c1 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c10 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c11 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c12 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c13 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c14 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c15 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c16 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c17 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c18 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c19 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c2 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c20 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c21 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c22 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c23 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c24 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c25 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c3 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c4 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c5 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c6 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c7 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c8 SIMPLE []
+POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c9 SIMPLE []
+PREHOOK: query: insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t3@ds=2010-04-17
+POSTHOOK: query: insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t3@ds=2010-04-17
+POSTHOOK: Lineage: t3 PARTITION(ds=2010-04-17).c0 EXPRESSION []
+POSTHOOK: Lineage: t3 PARTITION(ds=2010-04-17).c1 EXPRESSION []
+POSTHOOK: Lineage: t3 PARTITION(ds=2010-04-17).c2 SIMPLE []
+PREHOOK: query: insert overwrite table T4 partition(ds='2010-04-17') 
+select 4,'1','1','8','4','5','1','0','9','U','2','2', '0','2','1','1','J','C','A','U', '2','s', '2',NULL, NULL, NULL,NULL, NULL, NULL,'1','j', 'S', '6',NULL,'1', '2', 'J', 'g', '1', 'e', '2', '1', '2', 'U', 'P', 'p', '3', '0', '0', '0', '1', '1', '1', '0', '0', '0', '6', '2', 'j',NULL, NULL, NULL,NULL,NULL, NULL, '5',NULL, 'j', 'j', 2, 2, 1, '2', '2', '1', '1', '1', '1', '1', '1', 1, 1, 32,NULL from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t4@ds=2010-04-17
+POSTHOOK: query: insert overwrite table T4 partition(ds='2010-04-17') 
+select 4,'1','1','8','4','5','1','0','9','U','2','2', '0','2','1','1','J','C','A','U', '2','s', '2',NULL, NULL, NULL,NULL, NULL, NULL,'1','j', 'S', '6',NULL,'1', '2', 'J', 'g', '1', 'e', '2', '1', '2', 'U', 'P', 'p', '3', '0', '0', '0', '1', '1', '1', '0', '0', '0', '6', '2', 'j',NULL, NULL, NULL,NULL,NULL, NULL, '5',NULL, 'j', 'j', 2, 2, 1, '2', '2', '1', '1', '1', '1', '1', '1', 1, 1, 32,NULL from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t4@ds=2010-04-17
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c0 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c1 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c10 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c11 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c12 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c13 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c14 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c15 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c16 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c17 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c18 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c19 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c2 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c20 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c21 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c22 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c23 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c24 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c25 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c26 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c27 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c28 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c29 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c3 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c30 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c31 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c32 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c33 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c34 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c35 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c36 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c37 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c38 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c39 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c4 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c40 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c41 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c42 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c43 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c44 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c45 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c46 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c47 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c48 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c49 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c5 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c50 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c51 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c52 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c53 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c54 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c55 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c56 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c57 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c58 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c59 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c6 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c60 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c61 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c62 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c63 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c64 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c65 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c66 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c67 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c68 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c69 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c7 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c70 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c71 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c72 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c73 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c74 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c75 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c76 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c77 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c78 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c79 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c8 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c80 SIMPLE []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c81 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c82 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c83 EXPRESSION []
+POSTHOOK: Lineage: t4 PARTITION(ds=2010-04-17).c9 SIMPLE []
+PREHOOK: query: select * from T2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t2
+PREHOOK: Input: default@t2@ds=2010-04-17
+#### A masked pattern was here ####
+POSTHOOK: query: select * from T2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t2
+POSTHOOK: Input: default@t2@ds=2010-04-17
+#### A masked pattern was here ####
+5	name	NULL	2	kavin	NULL	9	c	8	0	0	7	1	2	0	3	2	NULL	1	NULL	3	2	0	0	5	10	2010-04-17
+PREHOOK: query: select * from T1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+PREHOOK: Input: default@t1@ds=2010-04-17
+#### A masked pattern was here ####
+POSTHOOK: query: select * from T1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+POSTHOOK: Input: default@t1@ds=2010-04-17
+#### A masked pattern was here ####
+5	1	1	1	0	0	4	2010-04-17
+PREHOOK: query: select * from T3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t3
+PREHOOK: Input: default@t3@ds=2010-04-17
+#### A masked pattern was here ####
+POSTHOOK: query: select * from T3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t3
+POSTHOOK: Input: default@t3@ds=2010-04-17
+#### A masked pattern was here ####
+4	5	0	2010-04-17
+PREHOOK: query: select * from T4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t4
+PREHOOK: Input: default@t4@ds=2010-04-17
+#### A masked pattern was here ####
+POSTHOOK: query: select * from T4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t4
+POSTHOOK: Input: default@t4@ds=2010-04-17
+#### A masked pattern was here ####
+4	1	1	8	4	5	1	0	9	U	2	2	0	2	1	1	J	C	A	U	2	s	2	NULL	NULL	NULL	NULL	NULL	NULL	1	j	S	6	NULL	1	2	J	g	1	e	2	1	2	U	P	p	3	0	0	0	1	1	1	0	0	0	6	2	j	NULL	NULL	NULL	NULL	NULL	NULL	5	NULL	NULL	j	2	2	1	2	2	1	1	1	1	1	1	1	1	32	NULL	2010-04-17
+WARNING: Comparing a bigint and a string may result in a loss of precision.
+PREHOOK: query: SELECT a.c1 as a_c1, b.c1 b_c1, d.c0 as d_c0
+FROM T1 a JOIN T2 b 
+       ON (a.c1 = b.c1 AND a.ds='2010-04-17' AND b.ds='2010-04-17')
+     JOIN T3 c 
+       ON (a.c1 = c.c1 AND a.ds='2010-04-17' AND c.ds='2010-04-17')
+     JOIN T4 d 
+       ON (c.c0 = d.c0 AND c.ds='2010-04-17' AND d.ds='2010-04-17')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+PREHOOK: Input: default@t1@ds=2010-04-17
+PREHOOK: Input: default@t2
+PREHOOK: Input: default@t2@ds=2010-04-17
+PREHOOK: Input: default@t3
+PREHOOK: Input: default@t3@ds=2010-04-17
+PREHOOK: Input: default@t4
+PREHOOK: Input: default@t4@ds=2010-04-17
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT a.c1 as a_c1, b.c1 b_c1, d.c0 as d_c0
+FROM T1 a JOIN T2 b 
+       ON (a.c1 = b.c1 AND a.ds='2010-04-17' AND b.ds='2010-04-17')
+     JOIN T3 c 
+       ON (a.c1 = c.c1 AND a.ds='2010-04-17' AND c.ds='2010-04-17')
+     JOIN T4 d 
+       ON (c.c0 = d.c0 AND c.ds='2010-04-17' AND d.ds='2010-04-17')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+POSTHOOK: Input: default@t1@ds=2010-04-17
+POSTHOOK: Input: default@t2
+POSTHOOK: Input: default@t2@ds=2010-04-17
+POSTHOOK: Input: default@t3
+POSTHOOK: Input: default@t3@ds=2010-04-17
+POSTHOOK: Input: default@t4
+POSTHOOK: Input: default@t4@ds=2010-04-17
+#### A masked pattern was here ####
+5	5	4

http://git-wip-us.apache.org/repos/asf/hive/blob/ace87818/ql/src/test/results/clientpositive/llap/groupby1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/groupby1.q.out b/ql/src/test/results/clientpositive/llap/groupby1.q.out
new file mode 100644
index 0000000..1323a73
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/groupby1.q.out
@@ -0,0 +1,428 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest_g1(key INT, value DOUBLE) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest_g1
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE dest_g1(key INT, value DOUBLE) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest_g1
+PREHOOK: query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), substr(value, 5) (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string)
+                      sort order: +
+                      Map-reduce partition columns: rand() (type: double)
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col1 (type: string)
+            Execution mode: llap
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: partial1
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: double)
+        Reducer 3 
+            Execution mode: uber
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0)
+                keys: KEY._col0 (type: string)
+                mode: final
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: double)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest_g1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest_g1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest_g1
+POSTHOOK: query: FROM src INSERT OVERWRITE TABLE dest_g1 SELECT src.key, sum(substr(src.value,5)) GROUP BY src.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest_g1
+POSTHOOK: Lineage: dest_g1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest_g1.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT dest_g1.* FROM dest_g1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest_g1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest_g1.* FROM dest_g1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest_g1
+#### A masked pattern was here ####
+0	0.0
+10	10.0
+100	200.0
+103	206.0
+104	208.0
+105	105.0
+11	11.0
+111	111.0
+113	226.0
+114	114.0
+116	116.0
+118	236.0
+119	357.0
+12	24.0
+120	240.0
+125	250.0
+126	126.0
+128	384.0
+129	258.0
+131	131.0
+133	133.0
+134	268.0
+136	136.0
+137	274.0
+138	552.0
+143	143.0
+145	145.0
+146	292.0
+149	298.0
+15	30.0
+150	150.0
+152	304.0
+153	153.0
+155	155.0
+156	156.0
+157	157.0
+158	158.0
+160	160.0
+162	162.0
+163	163.0
+164	328.0
+165	330.0
+166	166.0
+167	501.0
+168	168.0
+169	676.0
+17	17.0
+170	170.0
+172	344.0
+174	348.0
+175	350.0
+176	352.0
+177	177.0
+178	178.0
+179	358.0
+18	36.0
+180	180.0
+181	181.0
+183	183.0
+186	186.0
+187	561.0
+189	189.0
+19	19.0
+190	190.0
+191	382.0
+192	192.0
+193	579.0
+194	194.0
+195	390.0
+196	196.0
+197	394.0
+199	597.0
+2	2.0
+20	20.0
+200	400.0
+201	201.0
+202	202.0
+203	406.0
+205	410.0
+207	414.0
+208	624.0
+209	418.0
+213	426.0
+214	214.0
+216	432.0
+217	434.0
+218	218.0
+219	438.0
+221	442.0
+222	222.0
+223	446.0
+224	448.0
+226	226.0
+228	228.0
+229	458.0
+230	1150.0
+233	466.0
+235	235.0
+237	474.0
+238	476.0
+239	478.0
+24	48.0
+241	241.0
+242	484.0
+244	244.0
+247	247.0
+248	248.0
+249	249.0
+252	252.0
+255	510.0
+256	512.0
+257	257.0
+258	258.0
+26	52.0
+260	260.0
+262	262.0
+263	263.0
+265	530.0
+266	266.0
+27	27.0
+272	544.0
+273	819.0
+274	274.0
+275	275.0
+277	1108.0
+278	556.0
+28	28.0
+280	560.0
+281	562.0
+282	564.0
+283	283.0
+284	284.0
+285	285.0
+286	286.0
+287	287.0
+288	576.0
+289	289.0
+291	291.0
+292	292.0
+296	296.0
+298	894.0
+30	30.0
+302	302.0
+305	305.0
+306	306.0
+307	614.0
+308	308.0
+309	618.0
+310	310.0
+311	933.0
+315	315.0
+316	948.0
+317	634.0
+318	954.0
+321	642.0
+322	644.0
+323	323.0
+325	650.0
+327	981.0
+33	33.0
+331	662.0
+332	332.0
+333	666.0
+335	335.0
+336	336.0
+338	338.0
+339	339.0
+34	34.0
+341	341.0
+342	684.0
+344	688.0
+345	345.0
+348	1740.0
+35	105.0
+351	351.0
+353	706.0
+356	356.0
+360	360.0
+362	362.0
+364	364.0
+365	365.0
+366	366.0
+367	734.0
+368	368.0
+369	1107.0
+37	74.0
+373	373.0
+374	374.0
+375	375.0
+377	377.0
+378	378.0
+379	379.0
+382	764.0
+384	1152.0
+386	386.0
+389	389.0
+392	392.0
+393	393.0
+394	394.0
+395	790.0
+396	1188.0
+397	794.0
+399	798.0
+4	4.0
+400	400.0
+401	2005.0
+402	402.0
+403	1209.0
+404	808.0
+406	1624.0
+407	407.0
+409	1227.0
+41	41.0
+411	411.0
+413	826.0
+414	828.0
+417	1251.0
+418	418.0
+419	419.0
+42	84.0
+421	421.0
+424	848.0
+427	427.0
+429	858.0
+43	43.0
+430	1290.0
+431	1293.0
+432	432.0
+435	435.0
+436	436.0
+437	437.0
+438	1314.0
+439	878.0
+44	44.0
+443	443.0
+444	444.0
+446	446.0
+448	448.0
+449	449.0
+452	452.0
+453	453.0
+454	1362.0
+455	455.0
+457	457.0
+458	916.0
+459	918.0
+460	460.0
+462	924.0
+463	926.0
+466	1398.0
+467	467.0
+468	1872.0
+469	2345.0
+47	47.0
+470	470.0
+472	472.0
+475	475.0
+477	477.0
+478	956.0
+479	479.0
+480	1440.0
+481	481.0
+482	482.0
+483	483.0
+484	484.0
+485	485.0
+487	487.0
+489	1956.0
+490	490.0
+491	491.0
+492	984.0
+493	493.0
+494	494.0
+495	495.0
+496	496.0
+497	497.0
+498	1494.0
+5	15.0
+51	102.0
+53	53.0
+54	54.0
+57	57.0
+58	116.0
+64	64.0
+65	65.0
+66	66.0
+67	134.0
+69	69.0
+70	210.0
+72	144.0
+74	74.0
+76	152.0
+77	77.0
+78	78.0
+8	8.0
+80	80.0
+82	82.0
+83	166.0
+84	168.0
+85	85.0
+86	86.0
+87	87.0
+9	9.0
+90	270.0
+92	92.0
+95	190.0
+96	96.0
+97	194.0
+98	196.0

http://git-wip-us.apache.org/repos/asf/hive/blob/ace87818/ql/src/test/results/clientpositive/llap/groupby2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/groupby2.q.out b/ql/src/test/results/clientpositive/llap/groupby2.q.out
new file mode 100644
index 0000000..94d8c81
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/groupby2.q.out
@@ -0,0 +1,133 @@
+PREHOOK: query: CREATE TABLE dest_g2(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest_g2
+POSTHOOK: query: CREATE TABLE dest_g2(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest_g2
+PREHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: substr(key, 1, 1) (type: string), substr(value, 5) (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string), _col1 (type: string)
+                      sort order: ++
+                      Map-reduce partition columns: _col0 (type: string)
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Execution mode: llap
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(DISTINCT KEY._col1:0._col0), sum(KEY._col1:0._col0)
+                keys: KEY._col0 (type: string)
+                mode: partial1
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: bigint), _col2 (type: double)
+        Reducer 3 
+            Execution mode: uber
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0), sum(VALUE._col1)
+                keys: KEY._col0 (type: string)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), UDFToInteger(_col1) (type: int), concat(_col0, _col2) (type: string)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest_g2
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest_g2
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest_g2
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) GROUP BY substr(src.key,1,1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest_g2
+POSTHOOK: Lineage: dest_g2.c1 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest_g2.c2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest_g2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+SELECT dest_g2.* FROM dest_g2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest_g2
+#### A masked pattern was here ####
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+SELECT dest_g2.* FROM dest_g2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest_g2
+#### A masked pattern was here ####
+0	1	00.0
+1	71	116414.0
+2	69	225571.0
+3	62	332004.0
+4	74	452763.0
+5	6	5397.0
+6	5	6398.0
+7	6	7735.0
+8	8	8762.0
+9	7	91047.0

http://git-wip-us.apache.org/repos/asf/hive/blob/ace87818/ql/src/test/results/clientpositive/llap/groupby3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/groupby3.q.out b/ql/src/test/results/clientpositive/llap/groupby3.q.out
new file mode 100644
index 0000000..75f0c36
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/groupby3.q.out
@@ -0,0 +1,158 @@
+PREHOOK: query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE, c6 DOUBLE, c7 DOUBLE, c8 DOUBLE, c9 DOUBLE) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1
+POSTHOOK: query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE, c6 DOUBLE, c7 DOUBLE, c8 DOUBLE, c9 DOUBLE) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT 
+  sum(substr(src.value,5)), 
+  avg(substr(src.value,5)), 
+  avg(DISTINCT substr(src.value,5)), 
+  max(substr(src.value,5)),
+  min(substr(src.value,5)), 
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT 
+  sum(substr(src.value,5)), 
+  avg(substr(src.value,5)), 
+  avg(DISTINCT substr(src.value,5)), 
+  max(substr(src.value,5)),
+  min(substr(src.value,5)), 
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: substr(value, 5) (type: string)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string)
+                      sort order: +
+                      Map-reduce partition columns: _col0 (type: string)
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Execution mode: llap
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(KEY._col0:0._col0), avg(KEY._col0:0._col0), avg(DISTINCT KEY._col0:0._col0), max(KEY._col0:0._col0), min(KEY._col0:0._col0), std(KEY._col0:0._col0), stddev_samp(KEY._col0:0._col0), variance(KEY._col0:0._col0), var_samp(KEY._col0:0._col0)
+                mode: partial1
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+                Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: double), _col1 (type: struct<count:bigint,sum:double,input:string>), _col2 (type: struct<count:bigint,sum:double,input:string>), _col3 (type: string), _col4 (type: string), _col5 (type: struct<count:bigint,sum:double,variance:double>), _col6 (type: struct<count:bigint,sum:double,variance:double>), _col7 (type: struct<count:bigint,sum:double,variance:double>), _col8 (type: struct<count:bigint,sum:double,variance:double>)
+        Reducer 3 
+            Execution mode: uber
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2), max(VALUE._col3), min(VALUE._col4), std(VALUE._col5), stddev_samp(VALUE._col6), variance(VALUE._col7), var_samp(VALUE._col8)
+                mode: final
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+                Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), UDFToDouble(_col3) (type: double), UDFToDouble(_col4) (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: double)
+                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+                  Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest1
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT 
+  sum(substr(src.value,5)), 
+  avg(substr(src.value,5)), 
+  avg(DISTINCT substr(src.value,5)), 
+  max(substr(src.value,5)), 
+  min(substr(src.value,5)), 
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT 
+  sum(substr(src.value,5)), 
+  avg(substr(src.value,5)), 
+  avg(DISTINCT substr(src.value,5)), 
+  max(substr(src.value,5)), 
+  min(substr(src.value,5)), 
+  std(substr(src.value,5)),
+  stddev_samp(substr(src.value,5)),
+  variance(substr(src.value,5)),
+  var_samp(substr(src.value,5))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.c1 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c2 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c3 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c4 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c5 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c6 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c7 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c8 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c9 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+130091.0	260.182	256.10355987055016	98.0	0.0	142.92680950752379	143.06995106518903	20428.07287599999	20469.010897795582