You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2017/02/03 21:50:28 UTC

[15/51] [partial] hive git commit: HIVE-15790: Remove unused beeline golden files (Gunther Hagleitner, reviewed by Sergey Shelukhin)

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/groupby_bigdata.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/groupby_bigdata.q.out b/ql/src/test/results/beelinepositive/groupby_bigdata.q.out
deleted file mode 100644
index 7ea72e6..0000000
--- a/ql/src/test/results/beelinepositive/groupby_bigdata.q.out
+++ /dev/null
@@ -1,16 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/groupby_bigdata.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/groupby_bigdata.q
->>>  set hive.map.aggr.hash.percentmemory = 0.3;
-No rows affected 
->>>  set hive.mapred.local.mem = 384;
-No rows affected 
->>>  
->>>  add file ../data/scripts/dumpdata_script.py;
-No rows affected 
->>>  
->>>  select count(distinct subq.key) from 
-(FROM src MAP src.key USING 'python dumpdata_script.py' AS key WHERE src.key = 10) subq;
-'_c0'
-'1000022'
-1 row selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/groupby_map_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/groupby_map_ppr.q.out b/ql/src/test/results/beelinepositive/groupby_map_ppr.q.out
deleted file mode 100644
index 69523b1..0000000
--- a/ql/src/test/results/beelinepositive/groupby_map_ppr.q.out
+++ /dev/null
@@ -1,286 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/groupby_map_ppr.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/groupby_map_ppr.q
->>>  set hive.map.aggr=true;
-No rows affected 
->>>  set hive.groupby.skewindata=false;
-No rows affected 
->>>  set mapred.reduce.tasks=31;
-No rows affected 
->>>  
->>>  CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN EXTENDED 
-FROM srcpart src 
-INSERT OVERWRITE TABLE dest1 
-SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) 
-WHERE src.ds = '2008-04-08' 
-GROUP BY substr(src.key,1,1);
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME srcpart) src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) ds) '2008-04-08')) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 depends on stages: Stage-1'
-'  Stage-2 depends on stages: Stage-0'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            GatherStats: false'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: key, value'
-'              Group By Operator'
-'                aggregations:'
-'                      expr: count(DISTINCT substr(value, 5))'
-'                      expr: sum(substr(value, 5))'
-'                bucketGroup: false'
-'                keys:'
-'                      expr: substr(key, 1, 1)'
-'                      type: string'
-'                      expr: substr(value, 5)'
-'                      type: string'
-'                mode: hash'
-'                outputColumnNames: _col0, _col1, _col2, _col3'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: _col1'
-'                        type: string'
-'                  sort order: ++'
-'                  Map-reduce partition columns:'
-'                        expr: _col0'
-'                        type: string'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col2'
-'                        type: bigint'
-'                        expr: _col3'
-'                        type: double'
-'      Needs Tagging: false'
-'      Path -> Alias:'
-'        !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr.db/srcpart/ds=2008-04-08/hr=11 [src]'
-'        !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr.db/srcpart/ds=2008-04-08/hr=12 [src]'
-'      Path -> Partition:'
-'        !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr.db/srcpart/ds=2008-04-08/hr=11 '
-'          Partition'
-'            base file name: hr=11'
-'            input format: org.apache.hadoop.mapred.TextInputFormat'
-'            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'            partition values:'
-'              ds 2008-04-08'
-'              hr 11'
-'            properties:'
-'              bucket_count -1'
-'              columns key,value'
-'              columns.types string:string'
-'              file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr.db/srcpart/ds=2008-04-08/hr=11'
-'              name groupby_map_ppr.srcpart'
-'              numFiles 1'
-'              numPartitions 4'
-'              numRows 0'
-'              partition_columns ds/hr'
-'              rawDataSize 0'
-'              serialization.ddl struct srcpart { string key, string value}'
-'              serialization.format 1'
-'              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              totalSize 5812'
-'              transient_lastDdlTime !!UNIXTIME!!'
-'            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'          '
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns key,value'
-'                columns.types string:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr.db/srcpart'
-'                name groupby_map_ppr.srcpart'
-'                numFiles 4'
-'                numPartitions 4'
-'                numRows 0'
-'                partition_columns ds/hr'
-'                rawDataSize 0'
-'                serialization.ddl struct srcpart { string key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                totalSize 23248'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_map_ppr.srcpart'
-'            name: groupby_map_ppr.srcpart'
-'        !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr.db/srcpart/ds=2008-04-08/hr=12 '
-'          Partition'
-'            base file name: hr=12'
-'            input format: org.apache.hadoop.mapred.TextInputFormat'
-'            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'            partition values:'
-'              ds 2008-04-08'
-'              hr 12'
-'            properties:'
-'              bucket_count -1'
-'              columns key,value'
-'              columns.types string:string'
-'              file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr.db/srcpart/ds=2008-04-08/hr=12'
-'              name groupby_map_ppr.srcpart'
-'              numFiles 1'
-'              numPartitions 4'
-'              numRows 0'
-'              partition_columns ds/hr'
-'              rawDataSize 0'
-'              serialization.ddl struct srcpart { string key, string value}'
-'              serialization.format 1'
-'              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              totalSize 5812'
-'              transient_lastDdlTime !!UNIXTIME!!'
-'            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'          '
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns key,value'
-'                columns.types string:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr.db/srcpart'
-'                name groupby_map_ppr.srcpart'
-'                numFiles 4'
-'                numPartitions 4'
-'                numRows 0'
-'                partition_columns ds/hr'
-'                rawDataSize 0'
-'                serialization.ddl struct srcpart { string key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                totalSize 23248'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_map_ppr.srcpart'
-'            name: groupby_map_ppr.srcpart'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: count(DISTINCT KEY._col1:0._col0)'
-'                expr: sum(VALUE._col1)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: string'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1, _col2'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: bigint'
-'                  expr: concat(_col0, _col2)'
-'                  type: string'
-'            outputColumnNames: _col0, _col1, _col2'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: UDFToInteger(_col1)'
-'                    type: int'
-'                    expr: _col2'
-'                    type: string'
-'              outputColumnNames: _col0, _col1, _col2'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 1'
-'                directory: pfile:!!{hive.exec.scratchdir}!!'
-'                NumFilesPerFileSink: 1'
-'                Stats Publishing Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    properties:'
-'                      bucket_count -1'
-'                      columns key,c1,c2'
-'                      columns.types string:int:string'
-'                      file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                      file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                      location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr.db/dest1'
-'                      name groupby_map_ppr.dest1'
-'                      serialization.ddl struct dest1 { string key, i32 c1, string c2}'
-'                      serialization.format 1'
-'                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                      transient_lastDdlTime !!UNIXTIME!!'
-'                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    name: groupby_map_ppr.dest1'
-'                TotalFiles: 1'
-'                GatherStats: true'
-'                MultiFileSpray: false'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          source: pfile:!!{hive.exec.scratchdir}!!'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns key,c1,c2'
-'                columns.types string:int:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr.db/dest1'
-'                name groupby_map_ppr.dest1'
-'                serialization.ddl struct dest1 { string key, i32 c1, string c2}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_map_ppr.dest1'
-'          tmp directory: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Stats-Aggr Operator'
-'      Stats Aggregation Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-''
-''
-243 rows selected 
->>>  
->>>  FROM srcpart src 
-INSERT OVERWRITE TABLE dest1 
-SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) 
-WHERE src.ds = '2008-04-08' 
-GROUP BY substr(src.key,1,1);
-'_col0','_col1','_col2'
-No rows selected 
->>>  
->>>  SELECT dest1.* FROM dest1;
-'key','c1','c2'
-'0','1','00.0'
-'1','71','132828.0'
-'2','69','251142.0'
-'3','62','364008.0'
-'4','74','4105526.0'
-'5','6','5794.0'
-'6','5','6796.0'
-'7','6','71470.0'
-'8','8','81524.0'
-'9','7','92094.0'
-10 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/groupby_map_ppr_multi_distinct.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/groupby_map_ppr_multi_distinct.q.out b/ql/src/test/results/beelinepositive/groupby_map_ppr_multi_distinct.q.out
deleted file mode 100644
index 444188b..0000000
--- a/ql/src/test/results/beelinepositive/groupby_map_ppr_multi_distinct.q.out
+++ /dev/null
@@ -1,306 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/groupby_map_ppr_multi_distinct.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/groupby_map_ppr_multi_distinct.q
->>>  set hive.map.aggr=true;
-No rows affected 
->>>  set hive.groupby.skewindata=false;
-No rows affected 
->>>  set mapred.reduce.tasks=31;
-No rows affected 
->>>  
->>>  CREATE TABLE dest1(key STRING, c1 INT, c2 STRING, C3 INT, c4 INT) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN EXTENDED 
-FROM srcpart src 
-INSERT OVERWRITE TABLE dest1 
-SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(DISTINCT src.value) 
-WHERE src.ds = '2008-04-08' 
-GROUP BY substr(src.key,1,1);
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME srcpart) src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_SELEXPR (TOK_FUNCTIONDI sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTIONDI count (. (TOK_TABLE_OR_COL src) value)))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) ds) '2008-04-08')) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 depends on stages: Stage-1'
-'  Stage-2 depends on stages: Stage-0'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            GatherStats: false'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: key, value'
-'              Group By Operator'
-'                aggregations:'
-'                      expr: count(DISTINCT substr(value, 5))'
-'                      expr: sum(substr(value, 5))'
-'                      expr: sum(DISTINCT substr(value, 5))'
-'                      expr: count(DISTINCT value)'
-'                bucketGroup: false'
-'                keys:'
-'                      expr: substr(key, 1, 1)'
-'                      type: string'
-'                      expr: substr(value, 5)'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                mode: hash'
-'                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: _col1'
-'                        type: string'
-'                        expr: _col2'
-'                        type: string'
-'                  sort order: +++'
-'                  Map-reduce partition columns:'
-'                        expr: _col0'
-'                        type: string'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col3'
-'                        type: bigint'
-'                        expr: _col4'
-'                        type: double'
-'                        expr: _col5'
-'                        type: double'
-'                        expr: _col6'
-'                        type: bigint'
-'      Needs Tagging: false'
-'      Path -> Alias:'
-'        !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr_multi_distinct.db/srcpart/ds=2008-04-08/hr=11 [src]'
-'        !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr_multi_distinct.db/srcpart/ds=2008-04-08/hr=12 [src]'
-'      Path -> Partition:'
-'        !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr_multi_distinct.db/srcpart/ds=2008-04-08/hr=11 '
-'          Partition'
-'            base file name: hr=11'
-'            input format: org.apache.hadoop.mapred.TextInputFormat'
-'            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'            partition values:'
-'              ds 2008-04-08'
-'              hr 11'
-'            properties:'
-'              bucket_count -1'
-'              columns key,value'
-'              columns.types string:string'
-'              file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr_multi_distinct.db/srcpart/ds=2008-04-08/hr=11'
-'              name groupby_map_ppr_multi_distinct.srcpart'
-'              numFiles 1'
-'              numPartitions 4'
-'              numRows 0'
-'              partition_columns ds/hr'
-'              rawDataSize 0'
-'              serialization.ddl struct srcpart { string key, string value}'
-'              serialization.format 1'
-'              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              totalSize 5812'
-'              transient_lastDdlTime !!UNIXTIME!!'
-'            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'          '
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns key,value'
-'                columns.types string:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr_multi_distinct.db/srcpart'
-'                name groupby_map_ppr_multi_distinct.srcpart'
-'                numFiles 4'
-'                numPartitions 4'
-'                numRows 0'
-'                partition_columns ds/hr'
-'                rawDataSize 0'
-'                serialization.ddl struct srcpart { string key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                totalSize 23248'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_map_ppr_multi_distinct.srcpart'
-'            name: groupby_map_ppr_multi_distinct.srcpart'
-'        !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr_multi_distinct.db/srcpart/ds=2008-04-08/hr=12 '
-'          Partition'
-'            base file name: hr=12'
-'            input format: org.apache.hadoop.mapred.TextInputFormat'
-'            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'            partition values:'
-'              ds 2008-04-08'
-'              hr 12'
-'            properties:'
-'              bucket_count -1'
-'              columns key,value'
-'              columns.types string:string'
-'              file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr_multi_distinct.db/srcpart/ds=2008-04-08/hr=12'
-'              name groupby_map_ppr_multi_distinct.srcpart'
-'              numFiles 1'
-'              numPartitions 4'
-'              numRows 0'
-'              partition_columns ds/hr'
-'              rawDataSize 0'
-'              serialization.ddl struct srcpart { string key, string value}'
-'              serialization.format 1'
-'              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              totalSize 5812'
-'              transient_lastDdlTime !!UNIXTIME!!'
-'            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'          '
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns key,value'
-'                columns.types string:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr_multi_distinct.db/srcpart'
-'                name groupby_map_ppr_multi_distinct.srcpart'
-'                numFiles 4'
-'                numPartitions 4'
-'                numRows 0'
-'                partition_columns ds/hr'
-'                rawDataSize 0'
-'                serialization.ddl struct srcpart { string key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                totalSize 23248'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_map_ppr_multi_distinct.srcpart'
-'            name: groupby_map_ppr_multi_distinct.srcpart'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: count(DISTINCT KEY._col1:0._col0)'
-'                expr: sum(VALUE._col1)'
-'                expr: sum(DISTINCT KEY._col1:1._col0)'
-'                expr: count(DISTINCT KEY._col1:2._col0)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: string'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: bigint'
-'                  expr: concat(_col0, _col2)'
-'                  type: string'
-'                  expr: _col3'
-'                  type: double'
-'                  expr: _col4'
-'                  type: bigint'
-'            outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: UDFToInteger(_col1)'
-'                    type: int'
-'                    expr: _col2'
-'                    type: string'
-'                    expr: UDFToInteger(_col3)'
-'                    type: int'
-'                    expr: UDFToInteger(_col4)'
-'                    type: int'
-'              outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 1'
-'                directory: pfile:!!{hive.exec.scratchdir}!!'
-'                NumFilesPerFileSink: 1'
-'                Stats Publishing Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    properties:'
-'                      bucket_count -1'
-'                      columns key,c1,c2,c3,c4'
-'                      columns.types string:int:string:int:int'
-'                      file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                      file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                      location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr_multi_distinct.db/dest1'
-'                      name groupby_map_ppr_multi_distinct.dest1'
-'                      serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}'
-'                      serialization.format 1'
-'                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                      transient_lastDdlTime !!UNIXTIME!!'
-'                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    name: groupby_map_ppr_multi_distinct.dest1'
-'                TotalFiles: 1'
-'                GatherStats: true'
-'                MultiFileSpray: false'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          source: pfile:!!{hive.exec.scratchdir}!!'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns key,c1,c2,c3,c4'
-'                columns.types string:int:string:int:int'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/groupby_map_ppr_multi_distinct.db/dest1'
-'                name groupby_map_ppr_multi_distinct.dest1'
-'                serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_map_ppr_multi_distinct.dest1'
-'          tmp directory: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Stats-Aggr Operator'
-'      Stats Aggregation Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-''
-''
-263 rows selected 
->>>  
->>>  FROM srcpart src 
-INSERT OVERWRITE TABLE dest1 
-SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(DISTINCT src.value) 
-WHERE src.ds = '2008-04-08' 
-GROUP BY substr(src.key,1,1);
-'_col0','_col1','_col2','_col3','_col4'
-No rows selected 
->>>  
->>>  SELECT dest1.* FROM dest1;
-'key','c1','c2','c3','c4'
-'0','1','00.0','0','1'
-'1','71','132828.0','10044','71'
-'2','69','251142.0','15780','69'
-'3','62','364008.0','20119','62'
-'4','74','4105526.0','30965','74'
-'5','6','5794.0','278','6'
-'6','5','6796.0','331','5'
-'7','6','71470.0','447','6'
-'8','8','81524.0','595','8'
-'9','7','92094.0','577','7'
-10 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/groupby_multi_single_reducer.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/groupby_multi_single_reducer.q.out b/ql/src/test/results/beelinepositive/groupby_multi_single_reducer.q.out
deleted file mode 100644
index 507185b..0000000
--- a/ql/src/test/results/beelinepositive/groupby_multi_single_reducer.q.out
+++ /dev/null
@@ -1,824 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/groupby_multi_single_reducer.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/groupby_multi_single_reducer.q
->>>  set hive.multigroupby.singlereducer=true;
-No rows affected 
->>>  
->>>  CREATE TABLE dest_g2(key STRING, c1 INT, c2 STRING, c3 INT, c4 INT) STORED AS TEXTFILE;
-No rows affected 
->>>  CREATE TABLE dest_g3(key STRING, c1 INT, c2 STRING, c3 INT, c4 INT) STORED AS TEXTFILE;
-No rows affected 
->>>  CREATE TABLE dest_g4(key STRING, c1 INT, c2 STRING, c3 INT, c4 INT) STORED AS TEXTFILE;
-No rows affected 
->>>  CREATE TABLE dest_h2(key STRING, c1 INT, c2 STRING, c3 INT, c4 INT) STORED AS TEXTFILE;
-No rows affected 
->>>  CREATE TABLE dest_h3(key STRING, c1 INT, c2 STRING, c3 INT, c4 INT) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN 
-FROM src 
-INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) >= 5 GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_g3 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) < 5 GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_g4 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1);
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest_g2))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_SELEXPR (TOK_FUNCTIONDI sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) value)))) (TOK_WHERE (>= (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) 5)) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest_g3))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL sr
 c) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_SELEXPR (TOK_FUNCTIONDI sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) value)))) (TOK_WHERE (< (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) 5)) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest_g4))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_SELEXPR (TOK_FUNCTIONDI sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_
 SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) value)))) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-3 is a root stage'
-'  Stage-0 depends on stages: Stage-3'
-'  Stage-4 depends on stages: Stage-0'
-'  Stage-1 depends on stages: Stage-3'
-'  Stage-5 depends on stages: Stage-1'
-'  Stage-2 depends on stages: Stage-3'
-'  Stage-6 depends on stages: Stage-2'
-''
-'STAGE PLANS:'
-'  Stage: Stage-3'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: key, value'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: substr(key, 1, 1)'
-'                      type: string'
-'                      expr: substr(value, 5)'
-'                      type: string'
-'                sort order: ++'
-'                Map-reduce partition columns:'
-'                      expr: substr(key, 1, 1)'
-'                      type: string'
-'                tag: -1'
-'                value expressions:'
-'                      expr: value'
-'                      type: string'
-'      Reduce Operator Tree:'
-'        Forward'
-'          Filter Operator'
-'            predicate:'
-'                expr: (KEY._col0 >= 5.0)'
-'                type: boolean'
-'            Group By Operator'
-'              aggregations:'
-'                    expr: count(DISTINCT KEY._col1:1._col0)'
-'                    expr: sum(KEY._col1:1._col0)'
-'                    expr: sum(DISTINCT KEY._col1:1._col0)'
-'                    expr: count(VALUE._col0)'
-'              bucketGroup: false'
-'              keys:'
-'                    expr: KEY._col0'
-'                    type: string'
-'              mode: complete'
-'              outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'              Select Operator'
-'                expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: _col1'
-'                      type: bigint'
-'                      expr: concat(_col0, _col2)'
-'                      type: string'
-'                      expr: _col3'
-'                      type: double'
-'                      expr: _col4'
-'                      type: bigint'
-'                outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                Select Operator'
-'                  expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: UDFToInteger(_col1)'
-'                        type: int'
-'                        expr: _col2'
-'                        type: string'
-'                        expr: UDFToInteger(_col3)'
-'                        type: int'
-'                        expr: UDFToInteger(_col4)'
-'                        type: int'
-'                  outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 1'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: groupby_multi_single_reducer.dest_g2'
-'          Filter Operator'
-'            predicate:'
-'                expr: (KEY._col0 < 5.0)'
-'                type: boolean'
-'            Group By Operator'
-'              aggregations:'
-'                    expr: count(DISTINCT KEY._col1:1._col0)'
-'                    expr: sum(KEY._col1:1._col0)'
-'                    expr: sum(DISTINCT KEY._col1:1._col0)'
-'                    expr: count(VALUE._col0)'
-'              bucketGroup: false'
-'              keys:'
-'                    expr: KEY._col0'
-'                    type: string'
-'              mode: complete'
-'              outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'              Select Operator'
-'                expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: _col1'
-'                      type: bigint'
-'                      expr: concat(_col0, _col2)'
-'                      type: string'
-'                      expr: _col3'
-'                      type: double'
-'                      expr: _col4'
-'                      type: bigint'
-'                outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                Select Operator'
-'                  expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: UDFToInteger(_col1)'
-'                        type: int'
-'                        expr: _col2'
-'                        type: string'
-'                        expr: UDFToInteger(_col3)'
-'                        type: int'
-'                        expr: UDFToInteger(_col4)'
-'                        type: int'
-'                  outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 2'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: groupby_multi_single_reducer.dest_g3'
-'          Group By Operator'
-'            aggregations:'
-'                  expr: count(DISTINCT KEY._col1:1._col0)'
-'                  expr: sum(KEY._col1:1._col0)'
-'                  expr: sum(DISTINCT KEY._col1:1._col0)'
-'                  expr: count(VALUE._col0)'
-'            bucketGroup: false'
-'            keys:'
-'                  expr: KEY._col0'
-'                  type: string'
-'            mode: complete'
-'            outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: bigint'
-'                    expr: concat(_col0, _col2)'
-'                    type: string'
-'                    expr: _col3'
-'                    type: double'
-'                    expr: _col4'
-'                    type: bigint'
-'              outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'              Select Operator'
-'                expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: UDFToInteger(_col1)'
-'                      type: int'
-'                      expr: _col2'
-'                      type: string'
-'                      expr: UDFToInteger(_col3)'
-'                      type: int'
-'                      expr: UDFToInteger(_col4)'
-'                      type: int'
-'                outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 3'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                      name: groupby_multi_single_reducer.dest_g4'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_multi_single_reducer.dest_g2'
-''
-'  Stage: Stage-4'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-1'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_multi_single_reducer.dest_g3'
-''
-'  Stage: Stage-5'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-2'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_multi_single_reducer.dest_g4'
-''
-'  Stage: Stage-6'
-'    Stats-Aggr Operator'
-''
-''
-229 rows selected 
->>>  
->>>  FROM src 
-INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) >= 5 GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_g3 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) < 5 GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_g4 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1);
-'_col0','_col1','_col2','_col3','_col4'
-No rows selected 
->>>  
->>>  SELECT * FROM dest_g2 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
-'key','c1','c2','c3','c4'
-'5','6','5397.0','278','10'
-'6','5','6398.0','331','6'
-'7','6','7735.0','447','10'
-'8','8','8762.0','595','10'
-'9','7','91047.0','577','12'
-5 rows selected 
->>>  SELECT * FROM dest_g3 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
-'key','c1','c2','c3','c4'
-'0','1','00.0','0','3'
-'1','71','116414.0','10044','115'
-'2','69','225571.0','15780','111'
-'3','62','332004.0','20119','99'
-'4','74','452763.0','30965','124'
-5 rows selected 
->>>  SELECT * FROM dest_g4 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
-'key','c1','c2','c3','c4'
-'0','1','00.0','0','3'
-'1','71','116414.0','10044','115'
-'2','69','225571.0','15780','111'
-'3','62','332004.0','20119','99'
-'4','74','452763.0','30965','124'
-'5','6','5397.0','278','10'
-'6','5','6398.0','331','6'
-'7','6','7735.0','447','10'
-'8','8','8762.0','595','10'
-'9','7','91047.0','577','12'
-10 rows selected 
->>>  
->>>  EXPLAIN 
-FROM src 
-INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) >= 5 GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_g3 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) < 5 GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_g4 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_h2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1), substr(src.key,2,1) LIMIT 10 
-INSERT OVERWRITE TABLE dest_h3 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) >= 5 GROUP BY substr(src.key,1,1), substr(src.key,2,1);
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest_g2))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_SELEXPR (TOK_FUNCTIONDI sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) value)))) (TOK_WHERE (>= (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) 5)) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest_g3))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL sr
 c) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_SELEXPR (TOK_FUNCTIONDI sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) value)))) (TOK_WHERE (< (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) 5)) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest_g4))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_SELEXPR (TOK_FUNCTIONDI sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_
 SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) value)))) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest_h2))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) value)))) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 2 1)) (TOK_LIMIT 10)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest_h3))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_
 SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5)))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) value)))) (TOK_WHERE (>= (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) 5)) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 2 1))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-5 is a root stage'
-'  Stage-0 depends on stages: Stage-5'
-'  Stage-6 depends on stages: Stage-0'
-'  Stage-1 depends on stages: Stage-5'
-'  Stage-7 depends on stages: Stage-1'
-'  Stage-2 depends on stages: Stage-5'
-'  Stage-8 depends on stages: Stage-2'
-'  Stage-9 depends on stages: Stage-5'
-'  Stage-10 depends on stages: Stage-9'
-'  Stage-3 depends on stages: Stage-10'
-'  Stage-11 depends on stages: Stage-3'
-'  Stage-4 depends on stages: Stage-10'
-'  Stage-12 depends on stages: Stage-4'
-''
-'STAGE PLANS:'
-'  Stage: Stage-5'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: key, value'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: substr(key, 1, 1)'
-'                      type: string'
-'                      expr: substr(value, 5)'
-'                      type: string'
-'                sort order: ++'
-'                Map-reduce partition columns:'
-'                      expr: substr(key, 1, 1)'
-'                      type: string'
-'                tag: -1'
-'                value expressions:'
-'                      expr: value'
-'                      type: string'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: key, value'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 0'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
-'      Reduce Operator Tree:'
-'        Forward'
-'          Filter Operator'
-'            predicate:'
-'                expr: (KEY._col0 >= 5.0)'
-'                type: boolean'
-'            Group By Operator'
-'              aggregations:'
-'                    expr: count(DISTINCT KEY._col1:1._col0)'
-'                    expr: sum(KEY._col1:1._col0)'
-'                    expr: sum(DISTINCT KEY._col1:1._col0)'
-'                    expr: count(VALUE._col0)'
-'              bucketGroup: false'
-'              keys:'
-'                    expr: KEY._col0'
-'                    type: string'
-'              mode: complete'
-'              outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'              Select Operator'
-'                expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: _col1'
-'                      type: bigint'
-'                      expr: concat(_col0, _col2)'
-'                      type: string'
-'                      expr: _col3'
-'                      type: double'
-'                      expr: _col4'
-'                      type: bigint'
-'                outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                Select Operator'
-'                  expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: UDFToInteger(_col1)'
-'                        type: int'
-'                        expr: _col2'
-'                        type: string'
-'                        expr: UDFToInteger(_col3)'
-'                        type: int'
-'                        expr: UDFToInteger(_col4)'
-'                        type: int'
-'                  outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 1'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: groupby_multi_single_reducer.dest_g2'
-'          Filter Operator'
-'            predicate:'
-'                expr: (KEY._col0 < 5.0)'
-'                type: boolean'
-'            Group By Operator'
-'              aggregations:'
-'                    expr: count(DISTINCT KEY._col1:1._col0)'
-'                    expr: sum(KEY._col1:1._col0)'
-'                    expr: sum(DISTINCT KEY._col1:1._col0)'
-'                    expr: count(VALUE._col0)'
-'              bucketGroup: false'
-'              keys:'
-'                    expr: KEY._col0'
-'                    type: string'
-'              mode: complete'
-'              outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'              Select Operator'
-'                expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: _col1'
-'                      type: bigint'
-'                      expr: concat(_col0, _col2)'
-'                      type: string'
-'                      expr: _col3'
-'                      type: double'
-'                      expr: _col4'
-'                      type: bigint'
-'                outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                Select Operator'
-'                  expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: UDFToInteger(_col1)'
-'                        type: int'
-'                        expr: _col2'
-'                        type: string'
-'                        expr: UDFToInteger(_col3)'
-'                        type: int'
-'                        expr: UDFToInteger(_col4)'
-'                        type: int'
-'                  outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 2'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: groupby_multi_single_reducer.dest_g3'
-'          Group By Operator'
-'            aggregations:'
-'                  expr: count(DISTINCT KEY._col1:1._col0)'
-'                  expr: sum(KEY._col1:1._col0)'
-'                  expr: sum(DISTINCT KEY._col1:1._col0)'
-'                  expr: count(VALUE._col0)'
-'            bucketGroup: false'
-'            keys:'
-'                  expr: KEY._col0'
-'                  type: string'
-'            mode: complete'
-'            outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: bigint'
-'                    expr: concat(_col0, _col2)'
-'                    type: string'
-'                    expr: _col3'
-'                    type: double'
-'                    expr: _col4'
-'                    type: bigint'
-'              outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'              Select Operator'
-'                expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: UDFToInteger(_col1)'
-'                      type: int'
-'                      expr: _col2'
-'                      type: string'
-'                      expr: UDFToInteger(_col3)'
-'                      type: int'
-'                      expr: UDFToInteger(_col4)'
-'                      type: int'
-'                outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 3'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                      name: groupby_multi_single_reducer.dest_g4'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_multi_single_reducer.dest_g2'
-''
-'  Stage: Stage-6'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-1'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_multi_single_reducer.dest_g3'
-''
-'  Stage: Stage-7'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-2'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_multi_single_reducer.dest_g4'
-''
-'  Stage: Stage-8'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-9'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            Reduce Output Operator'
-'              key expressions:'
-'                    expr: substr(key, 1, 1)'
-'                    type: string'
-'                    expr: substr(key, 2, 1)'
-'                    type: string'
-'                    expr: substr(value, 5)'
-'                    type: string'
-'              sort order: +++'
-'              Map-reduce partition columns:'
-'                    expr: substr(key, 1, 1)'
-'                    type: string'
-'                    expr: substr(key, 2, 1)'
-'                    type: string'
-'              tag: -1'
-'              value expressions:'
-'                    expr: value'
-'                    type: string'
-'      Reduce Operator Tree:'
-'        Forward'
-'          Group By Operator'
-'            aggregations:'
-'                  expr: count(DISTINCT KEY._col2:0._col0)'
-'                  expr: sum(KEY._col2:0._col0)'
-'                  expr: count(VALUE._col0)'
-'            bucketGroup: false'
-'            keys:'
-'                  expr: KEY._col0'
-'                  type: string'
-'                  expr: KEY._col1'
-'                  type: string'
-'            mode: complete'
-'            outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col2'
-'                    type: bigint'
-'                    expr: concat(_col0, _col3)'
-'                    type: string'
-'                    expr: _col3'
-'                    type: double'
-'                    expr: _col4'
-'                    type: bigint'
-'              outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'              Limit'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 0'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
-'          Filter Operator'
-'            predicate:'
-'                expr: (KEY._col0 >= 5.0)'
-'                type: boolean'
-'            Group By Operator'
-'              aggregations:'
-'                    expr: count(DISTINCT KEY._col2:0._col0)'
-'                    expr: sum(KEY._col2:0._col0)'
-'                    expr: count(VALUE._col0)'
-'              bucketGroup: false'
-'              keys:'
-'                    expr: KEY._col0'
-'                    type: string'
-'                    expr: KEY._col1'
-'                    type: string'
-'              mode: complete'
-'              outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'              Select Operator'
-'                expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: _col2'
-'                      type: bigint'
-'                      expr: concat(_col0, _col3)'
-'                      type: string'
-'                      expr: _col3'
-'                      type: double'
-'                      expr: _col4'
-'                      type: bigint'
-'                outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                Select Operator'
-'                  expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: UDFToInteger(_col1)'
-'                        type: int'
-'                        expr: _col2'
-'                        type: string'
-'                        expr: UDFToInteger(_col3)'
-'                        type: int'
-'                        expr: UDFToInteger(_col4)'
-'                        type: int'
-'                  outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 5'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: groupby_multi_single_reducer.dest_h3'
-''
-'  Stage: Stage-10'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            Reduce Output Operator'
-'              sort order: '
-'              tag: -1'
-'              value expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: bigint'
-'                    expr: _col2'
-'                    type: string'
-'                    expr: _col3'
-'                    type: double'
-'                    expr: _col4'
-'                    type: bigint'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Limit'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: UDFToInteger(_col1)'
-'                    type: int'
-'                    expr: _col2'
-'                    type: string'
-'                    expr: UDFToInteger(_col3)'
-'                    type: int'
-'                    expr: UDFToInteger(_col4)'
-'                    type: int'
-'              outputColumnNames: _col0, _col1, _col2, _col3, _col4'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 4'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    name: groupby_multi_single_reducer.dest_h2'
-''
-'  Stage: Stage-3'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_multi_single_reducer.dest_h2'
-''
-'  Stage: Stage-11'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-4'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_multi_single_reducer.dest_h3'
-''
-'  Stage: Stage-12'
-'    Stats-Aggr Operator'
-''
-''
-426 rows selected 
->>>  
->>>  FROM src 
-INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) >= 5 GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_g3 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) < 5 GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_g4 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_h2 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1), substr(src.key,2,1) LIMIT 10 
-INSERT OVERWRITE TABLE dest_h3 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(substr(src.value, 5)), count(src.value) WHERE substr(src.key,1,1) >= 5 GROUP BY substr(src.key,1,1), substr(src.key,2,1);
-'_col0','_col1','_col2','_col3','_col4'
-No rows selected 
->>>  
->>>  SELECT * FROM dest_g2 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
-'key','c1','c2','c3','c4'
-'5','6','5397.0','278','10'
-'6','5','6398.0','331','6'
-'7','6','7735.0','447','10'
-'8','8','8762.0','595','10'
-'9','7','91047.0','577','12'
-5 rows selected 
->>>  SELECT * FROM dest_g3 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
-'key','c1','c2','c3','c4'
-'0','1','00.0','0','3'
-'1','71','116414.0','10044','115'
-'2','69','225571.0','15780','111'
-'3','62','332004.0','20119','99'
-'4','74','452763.0','30965','124'
-5 rows selected 
->>>  SELECT * FROM dest_g4 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
-'key','c1','c2','c3','c4'
-'0','1','00.0','0','3'
-'1','71','116414.0','10044','115'
-'2','69','225571.0','15780','111'
-'3','62','332004.0','20119','99'
-'4','74','452763.0','30965','124'
-'5','6','5397.0','278','10'
-'6','5','6398.0','331','6'
-'7','6','7735.0','447','10'
-'8','8','8762.0','595','10'
-'9','7','91047.0','577','12'
-10 rows selected 
->>>  SELECT * FROM dest_h2 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
-'key','c1','c2','c3','c4'
-'0','1','00.0','0','3'
-'1','4','1878.0','878','6'
-'1','5','1729.0','729','8'
-'1','6','11282.0','1282','12'
-'1','6','11494.0','1494','11'
-'1','7','11171.0','1171','11'
-'1','7','11516.0','1516','10'
-'1','8','11263.0','1263','10'
-'1','9','12294.0','2294','14'
-'1','9','12654.0','2654','16'
-10 rows selected 
->>>  SELECT * FROM dest_h3 ORDER BY key ASC, c1 ASC, c2 ASC, c3 ASC, c4 ASC;
-'key','c1','c2','c3','c4'
-'5','1','5102.0','102','2'
-'5','1','5116.0','116','2'
-'5','1','515.0','15','3'
-'5','1','553.0','53','1'
-'5','1','554.0','54','1'
-'5','1','557.0','57','1'
-'6','1','6134.0','134','2'
-'6','1','664.0','64','1'
-'6','1','665.0','65','1'
-'6','1','666.0','66','1'
-'6','1','669.0','69','1'
-'7','1','7144.0','144','2'
-'7','1','7152.0','152','2'
-'7','1','7210.0','210','3'
-'7','1','774.0','74','1'
-'7','1','777.0','77','1'
-'7','1','778.0','78','1'
-'8','1','8166.0','166','2'
-'8','1','8168.0','168','2'
-'8','1','88.0','8','1'
-'8','1','880.0','80','1'
-'8','1','882.0','82','1'
-'8','1','885.0','85','1'
-'8','1','886.0','86','1'
-'8','1','887.0','87','1'
-'9','1','9190.0','190','2'
-'9','1','9194.0','194','2'
-'9','1','9196.0','196','2'
-'9','1','9270.0','270','3'
-'9','1','99.0','9','1'
-'9','1','992.0','92','1'
-'9','1','996.0','96','1'
-32 rows selected 
->>>  
->>>  DROP TABLE dest_g2;
-No rows affected 
->>>  DROP TABLE dest_g3;
-No rows affected 
->>>  DROP TABLE dest_g4;
-No rows affected 
->>>  DROP TABLE dest_h2;
-No rows affected 
->>>  DROP TABLE dest_h3;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/groupby_multi_single_reducer2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/groupby_multi_single_reducer2.q.out b/ql/src/test/results/beelinepositive/groupby_multi_single_reducer2.q.out
deleted file mode 100644
index 399875e..0000000
--- a/ql/src/test/results/beelinepositive/groupby_multi_single_reducer2.q.out
+++ /dev/null
@@ -1,194 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/groupby_multi_single_reducer2.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/groupby_multi_single_reducer2.q
->>>  set hive.multigroupby.singlereducer=true;
-No rows affected 
->>>  
->>>  CREATE TABLE dest_g2(key STRING, c1 INT) STORED AS TEXTFILE;
-No rows affected 
->>>  CREATE TABLE dest_g3(key STRING, c1 INT, c2 INT) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN 
-FROM src 
-INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT src.key) WHERE substr(src.key,1,1) >= 5 GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_g3 SELECT substr(src.key,1,1), count(DISTINCT src.key), count(src.value) WHERE substr(src.key,1,1) < 5 GROUP BY substr(src.key,1,1);
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest_g2))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (. (TOK_TABLE_OR_COL src) key)))) (TOK_WHERE (>= (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) 5)) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest_g3))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (. (TOK_TABLE_OR_COL src) key))) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) value)))) (TOK_WHERE (< (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) 5)) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-2 is a root stage'
-'  Stage-0 depends on stages: Stage-2'
-'  Stage-3 depends on stages: Stage-0'
-'  Stage-1 depends on stages: Stage-2'
-'  Stage-4 depends on stages: Stage-1'
-''
-'STAGE PLANS:'
-'  Stage: Stage-2'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((substr(key, 1, 1) >= 5) or (substr(key, 1, 1) < 5))'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: key, value'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: substr(key, 1, 1)'
-'                        type: string'
-'                        expr: key'
-'                        type: string'
-'                  sort order: ++'
-'                  Map-reduce partition columns:'
-'                        expr: substr(key, 1, 1)'
-'                        type: string'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: value'
-'                        type: string'
-'      Reduce Operator Tree:'
-'        Forward'
-'          Group By Operator'
-'            aggregations:'
-'                  expr: count(DISTINCT KEY._col1:0._col0)'
-'            bucketGroup: false'
-'            keys:'
-'                  expr: KEY._col0'
-'                  type: string'
-'            mode: complete'
-'            outputColumnNames: _col0, _col1'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: bigint'
-'              outputColumnNames: _col0, _col1'
-'              Select Operator'
-'                expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: UDFToInteger(_col1)'
-'                      type: int'
-'                outputColumnNames: _col0, _col1'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 1'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                      name: groupby_multi_single_reducer2.dest_g2'
-'          Group By Operator'
-'            aggregations:'
-'                  expr: count(DISTINCT KEY._col1:0._col0)'
-'                  expr: count(VALUE._col0)'
-'            bucketGroup: false'
-'            keys:'
-'                  expr: KEY._col0'
-'                  type: string'
-'            mode: complete'
-'            outputColumnNames: _col0, _col1, _col2'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: bigint'
-'                    expr: _col2'
-'                    type: bigint'
-'              outputColumnNames: _col0, _col1, _col2'
-'              Select Operator'
-'                expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: UDFToInteger(_col1)'
-'                      type: int'
-'                      expr: UDFToInteger(_col2)'
-'                      type: int'
-'                outputColumnNames: _col0, _col1, _col2'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 2'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                      name: groupby_multi_single_reducer2.dest_g3'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_multi_single_reducer2.dest_g2'
-''
-'  Stage: Stage-3'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-1'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: groupby_multi_single_reducer2.dest_g3'
-''
-'  Stage: Stage-4'
-'    Stats-Aggr Operator'
-''
-''
-139 rows selected 
->>>  
->>>  FROM src 
-INSERT OVERWRITE TABLE dest_g2 SELECT substr(src.key,1,1), count(DISTINCT src.key) WHERE substr(src.key,1,1) >= 5 GROUP BY substr(src.key,1,1) 
-INSERT OVERWRITE TABLE dest_g3 SELECT substr(src.key,1,1), count(DISTINCT src.key), count(src.value) WHERE substr(src.key,1,1) < 5 GROUP BY substr(src.key,1,1);
-'_col0','_col1','_col2'
-No rows selected 
->>>  
->>>  SELECT * FROM dest_g2;
-'key','c1'
-'0','1'
-'1','71'
-'2','69'
-'3','62'
-'4','74'
-'5','6'
-'6','5'
-'7','6'
-'8','8'
-'9','7'
-10 rows selected 
->>>  SELECT * FROM dest_g3;
-'key','c1','c2'
-'0','1','3'
-'1','71','115'
-'2','69','111'
-'3','62','99'
-'4','74','124'
-'5','6','10'
-'6','5','6'
-'7','6','10'
-'8','8','10'
-'9','7','12'
-10 rows selected 
->>>  
->>>  DROP TABLE dest_g2;
-No rows affected 
->>>  DROP TABLE dest_g3;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/groupby_neg_float.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/groupby_neg_float.q.out b/ql/src/test/results/beelinepositive/groupby_neg_float.q.out
deleted file mode 100644
index a996f4f..0000000
--- a/ql/src/test/results/beelinepositive/groupby_neg_float.q.out
+++ /dev/null
@@ -1,19 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/groupby_neg_float.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/groupby_neg_float.q
->>>  FROM src 
-SELECT cast('-30.33' as DOUBLE) 
-GROUP BY cast('-30.33' as DOUBLE) 
-LIMIT 1;
-'_c0'
-'-30.33'
-1 row selected 
->>>  
->>>  
->>>  FROM src 
-SELECT '-30.33' 
-GROUP BY '-30.33' 
-LIMIT 1;
-'_c0'
-'-30.33'
-1 row selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/groupby_ppd.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/groupby_ppd.q.out b/ql/src/test/results/beelinepositive/groupby_ppd.q.out
deleted file mode 100644
index 8ccd1fc..0000000
--- a/ql/src/test/results/beelinepositive/groupby_ppd.q.out
+++ /dev/null
@@ -1,153 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/groupby_ppd.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/groupby_ppd.q
->>>  -- see HIVE-2382
->>>  create table invites (id int, foo int, bar int);
-No rows affected 
->>>  explain select * from (select foo, bar from (select bar, foo from invites c union all select bar, foo from invites d) b) a group by bar, foo having bar=1;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME invites) c)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL bar)) (TOK_SELEXPR (TOK_TABLE_OR_COL foo))))) (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME invites) d)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL bar)) (TOK_SELEXPR (TOK_TABLE_OR_COL foo)))))) b)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL foo)) (TOK_SELEXPR (TOK_TABLE_OR_COL bar))))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_GROUPBY (TOK_TABLE_OR_COL bar) (TOK_TABLE_OR_COL foo)) (TOK_HAVING (= (TOK_TABLE_OR_COL bar) 1))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        a-subquery1:b-subquery1:c '
-'          TableScan'
-'            alias: c'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (bar = 1)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: bar'
-'                      type: int'
-'                      expr: foo'
-'                      type: int'
-'                outputColumnNames: _col0, _col1'
-'                Union'
-'                  Select Operator'
-'                    expressions:'
-'                          expr: _col1'
-'                          type: int'
-'                          expr: _col0'
-'                          type: int'
-'                    outputColumnNames: _col0, _col1'
-'                    Select Operator'
-'                      expressions:'
-'                            expr: _col0'
-'                            type: int'
-'                            expr: _col1'
-'                            type: int'
-'                      outputColumnNames: _col0, _col1'
-'                      Group By Operator'
-'                        bucketGroup: false'
-'                        keys:'
-'                              expr: _col1'
-'                              type: int'
-'                              expr: _col0'
-'                              type: int'
-'                        mode: hash'
-'                        outputColumnNames: _col0, _col1'
-'                        Reduce Output Operator'
-'                          key expressions:'
-'                                expr: _col0'
-'                                type: int'
-'                                expr: _col1'
-'                                type: int'
-'                          sort order: ++'
-'                          Map-reduce partition columns:'
-'                                expr: _col0'
-'                                type: int'
-'                                expr: _col1'
-'                                type: int'
-'                          tag: -1'
-'        a-subquery2:b-subquery2:d '
-'          TableScan'
-'            alias: d'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (bar = 1)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: bar'
-'                      type: int'
-'                      expr: foo'
-'                      type: int'
-'                outputColumnNames: _col0, _col1'
-'                Union'
-'                  Select Operator'
-'                    expressions:'
-'                          expr: _col1'
-'                          type: int'
-'                          expr: _col0'
-'                          type: int'
-'                    outputColumnNames: _col0, _col1'
-'                    Select Operator'
-'                      expressions:'
-'                            expr: _col0'
-'                            type: int'
-'                            expr: _col1'
-'                            type: int'
-'                      outputColumnNames: _col0, _col1'
-'                      Group By Operator'
-'                        bucketGroup: false'
-'                        keys:'
-'                              expr: _col1'
-'                              type: int'
-'                              expr: _col0'
-'                              type: int'
-'                        mode: hash'
-'                        outputColumnNames: _col0, _col1'
-'                        Reduce Output Operator'
-'                          key expressions:'
-'                                expr: _col0'
-'                                type: int'
-'                                expr: _col1'
-'                                type: int'
-'                          sort order: ++'
-'                          Map-reduce partition columns:'
-'                                expr: _col0'
-'                                type: int'
-'                                expr: _col1'
-'                                type: int'
-'                          tag: -1'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: int'
-'                expr: KEY._col1'
-'                type: int'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: int'
-'                  expr: _col1'
-'                  type: int'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-142 rows selected 
->>>  drop table invites;
-No rows affected 
->>>  !record